diff --git a/.eslintignore b/.eslintignore index 1967d51fb36e2..7b2dbc6fb1c0c 100644 --- a/.eslintignore +++ b/.eslintignore @@ -11,3 +11,5 @@ website-prototyping-tools/node_modules *.md src/flow/ packages/relay-flight-experimental/ +# The VSCode Extension gets linted with a separate setup +vscode-extension diff --git a/.eslintrc.js b/.eslintrc.js index b0f1dfdbb3b5c..199408e5ceff0 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -26,6 +26,9 @@ module.exports = { // Flow declares trip up the no-redeclare rule 'no-redeclare': 'off', + // Flow handles these rules + 'no-unreachable': 'off', + // Prettier and ESLint may disagree on the following rules indent: 'off', 'array-bracket-spacing': 'off', @@ -41,7 +44,15 @@ module.exports = { // TODO T31139228: remove or re-enable these once eslint-plugin-flowtype // is compatible with babel-eslint >= 8 'no-undef': 'off', - 'no-unused-vars': [1, {args: 'none'}], + 'no-unused-vars': [ + 1, + { + args: 'none', + varsIgnorePattern: '^_', + argsIgnorePattern: '^_', + ignoreRestSiblings: true, + }, + ], // This has a different name internally 'no-label-var': 'off', diff --git a/.flowconfig b/.flowconfig index 670998fef8244..fa0c1524d65a3 100644 --- a/.flowconfig +++ b/.flowconfig @@ -3,11 +3,8 @@ /compiler/.* /.*/node_modules/.* .*/node_modules/resolve/test/resolver/malformed_package_json/package.json -/packages/relay-flight-experimental/.* [options] -exact_by_default=true - module.system=haste module.system.haste.use_name_reducers=true # get basename @@ -20,6 +17,7 @@ module.system.haste.paths.excludes=.*/__mocks__/.* module.system.haste.paths.includes=/node_modules/fbjs/lib/.* munge_underscores=true +; https://fburl.com/code/h2jnts20 - to match internval value suppress_type=$FlowIssue suppress_type=$FlowFixMe @@ -29,9 +27,6 @@ suppress_type=$FlowExpectedError format.bracket_spacing=false -experimental.abstract_locations=true -inference_mode=lti - [lints] untyped-type-import=error @@ -45,4 +40,4 @@ untyped-import untyped-type-import [version] -^0.201.0 +^0.238.0 diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 71055b37fd9aa..44b3b87082896 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -33,10 +33,11 @@ For JavaScript changes: For Rust changes: -1. Ensure all rust code is formatted by running `cargo fmt` from within `./compiler`. -2. If you've added or removed any fixture tests, ensure all generated tests are up to date by running `./scripts/update-fixtures.sh` from the repository root. -3. Ensure all code typechecks by running `cargo check` from within `./compiler`. -4. Ensure all tests pass by running `cargo test` from within `./compiler`. +1. [Install Rust and Cargo](https://www.rust-lang.org/tools/install) +2. Ensure all rust code is formatted by running `cargo fmt` from within `./compiler`. +3. If you've added or removed any fixture tests, ensure all generated tests are up to date by running `./scripts/update-fixtures.sh` from the repository root. +4. Ensure all code typechecks by running `cargo check` from within `./compiler`. +5. Ensure all tests pass by running `cargo test` from within `./compiler`. ### Contributor License Agreement (CLA) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0f493a5814d92..8471b92b77b0b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,11 +23,11 @@ jobs: - name: Install dependencies run: yarn install --frozen-lockfile --ignore-scripts - name: ESLint - run: yarn run lint + run: yarn --ignore-engines run lint - name: Prettier - run: yarn run prettier-check + run: yarn --ignore-engines run prettier-check - name: Typecheck - run: yarn run typecheck + run: yarn --ignore-engines run typecheck js-tests: name: JS Tests (Node ${{ matrix.node-version }}) @@ -95,7 +95,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: - toolchain: 1.64.0 + toolchain: 1.72.0 override: true - name: "Run tests" run: cargo test --manifest-path=compiler/Cargo.toml --locked ${{ matrix.target.features && '--features' }} ${{ matrix.target.features }} @@ -118,7 +118,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: - toolchain: 1.64.0 + toolchain: 1.72.0 override: true - name: "Update fixture tests" run: ./scripts/update-fixtures.sh @@ -136,7 +136,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: - toolchain: nightly-2022-06-26 + toolchain: nightly-2023-11-11 override: true components: rustfmt - name: "rustfmt" @@ -177,7 +177,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: - toolchain: 1.64.0 + toolchain: 1.72.0 override: true target: ${{ matrix.target.target }} - uses: actions/setup-node@v2 diff --git a/.github/workflows/docusaurus.yml b/.github/workflows/docusaurus.yml index d72c7b8c941ca..b16c33b82a5d0 100644 --- a/.github/workflows/docusaurus.yml +++ b/.github/workflows/docusaurus.yml @@ -10,25 +10,74 @@ on: branches: - main +permissions: + contents: write + jobs: + build-compiler-explorer: + name: Build Compiler Explorer + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions-rs/toolchain@v1 + with: + toolchain: 1.73.0 # We hit an LLVM error building Wasm on 1.72 + override: true + - name: Install wasm-pack + run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh + - name: "Build Compiler Playground Wasm NPM package" + run: wasm-pack build --target web + working-directory: ./compiler/crates/relay-compiler-playground + - uses: actions/upload-artifact@v4 + with: + name: compiler-playground-package + path: compiler/crates/relay-compiler-playground/pkg/ + build-and-deploy: runs-on: ubuntu-latest + needs: [build-compiler-explorer] steps: - name: Checkout - uses: actions/checkout@v2.3.1 + uses: actions/checkout@v4 with: persist-credentials: false + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: '20' + cache: 'yarn' + cache-dependency-path: website/yarn.lock + + - name: Set up Docusaurus Build Cache + id: cache-docusaurus + uses: actions/cache@v4 + with: + path: | + website/node_modules/.cache + website/.docusaurus + key: "docusaurus-build-${{ hashFiles('website/yarn.lock') }}" + + - name: Download Compiler Explorer + uses: actions/download-artifact@v4 + with: + name: compiler-playground-package + path: tmp/compiler-playground-package + + - name: Link Compiler Explorer + run: yarn link + working-directory: tmp/compiler-playground-package + - name: Install and Build run: | yarn + yarn link relay-compiler-playground yarn build working-directory: website/ - name: Deploy - uses: JamesIves/github-pages-deploy-action@3.7.1 + uses: JamesIves/github-pages-deploy-action@v4 with: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - BRANCH: gh-pages - FOLDER: website/build - CLEAN: true + branch: gh-pages + folder: website/build + clean: true diff --git a/.github/workflows/update-cargo-lock.yml b/.github/workflows/update-cargo-lock.yml index 6dd9e960d4a8e..a489e41291b48 100644 --- a/.github/workflows/update-cargo-lock.yml +++ b/.github/workflows/update-cargo-lock.yml @@ -21,7 +21,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: - toolchain: 1.64.0 + toolchain: 1.72.0 override: true - name: cargo check run: cargo check --features vendored --manifest-path=compiler/Cargo.toml diff --git a/.github/workflows/vscode.yml b/.github/workflows/vscode.yml index e9ffb33d0fd9b..4c30236b3380a 100644 --- a/.github/workflows/vscode.yml +++ b/.github/workflows/vscode.yml @@ -18,10 +18,10 @@ jobs: - name: Install dependencies run: yarn install --frozen-lockfile --ignore-scripts - name: ESLint - run: yarn run lint + run: yarn --ignore-engines run lint - name: Prettier - run: yarn run prettier-check + run: yarn --ignore-engines run prettier-check - name: Typecheck - run: yarn run typecheck + run: yarn --ignore-engines run typecheck - name: Publish - run: yarn vsce publish --pat ${{ secrets.VSCE_PAT }} + run: yarn --ignore-engines vsce publish --pat ${{ secrets.VSCE_PAT }} diff --git a/.gitignore b/.gitignore index b9963443fcd3f..ae5b69e76acce 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,4 @@ npm-debug.log !.vscode/launch.json !.vscode/extensions.json .DS_Store +fb diff --git a/.vscode/extensions.json b/.vscode/extensions.json index a1ea267f24f37..0fb652f5ce522 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -8,7 +8,9 @@ "dbaeumer.vscode-eslint", "rust-lang.rust-analyzer", "meta.relay", - "flowtype.flow-for-vscode" + "flowtype.flow-for-vscode", + // https://github.com/flow/flow-for-vscode#known-issues + "mgmcdermott.vscode-language-babel" ], // List of extensions recommended by VS Code that should not be recommended for users of this workspace. "unwantedRecommendations": [ diff --git a/compiler/.cargo/config.toml b/compiler/.cargo/config.toml index d239c52984d49..3fc150a2c853a 100644 --- a/compiler/.cargo/config.toml +++ b/compiler/.cargo/config.toml @@ -4,3 +4,11 @@ # replace-with = "domorexp" # [unstable] # registry-auth = true +# This file should be kept in sync with the project's Buck flags. +# See parent PACKAGE. + +[build] +rustflags = [ + "-Drust-2018-idioms", + "-Dwarnings", +] diff --git a/compiler/Cargo.lock b/compiler/Cargo.lock index fc8a6606dd0ad..2584d0a45f7e0 100644 --- a/compiler/Cargo.lock +++ b/compiler/Cargo.lock @@ -3,46 +3,79 @@ version = 3 [[package]] -name = "Inflector" -version = "0.11.4" +name = "addr2line" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" dependencies = [ - "lazy_static", - "regex", + "gimli", ] +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + [[package]] name = "ahash" -version = "0.7.4" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43bb833f0bf979d8475d38fbf09ed3b8a55e1885fe93ad3f93239fc6a4f17b98" +checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01" dependencies = [ - "getrandom", + "cfg-if 1.0.0", "once_cell", "version_check", + "zerocopy", ] [[package]] name = "aho-corasick" -version = "0.7.18" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" dependencies = [ "memchr", ] +[[package]] +name = "aliasable" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd" + +[[package]] +name = "allocator-api2" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "anyhow" -version = "1.0.42" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "595d3cfa7a60d4555cb5067b99f07142a08ea778de5cf993f7b75c7d8fabc486" +checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" [[package]] name = "arbitrary" -version = "1.2.3" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e90af4de65aa7b293ef2d09daff88501eb254f58edde2e1ac02c82d873eadad" +checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" [[package]] name = "assert_matches" @@ -52,13 +85,13 @@ checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" [[package]] name = "async-trait" -version = "0.1.58" +version = "0.1.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e805d94e6b5001b651426cf4cd446b1ab5f319d27bab5c644f61de0a804360c" +checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.48", ] [[package]] @@ -67,7 +100,7 @@ version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ - "hermit-abi", + "hermit-abi 0.1.19", "libc", "winapi", ] @@ -78,6 +111,21 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +[[package]] +name = "backtrace" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +dependencies = [ + "addr2line", + "cc", + "cfg-if 1.0.0", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + [[package]] name = "beef" version = "0.5.2" @@ -95,24 +143,36 @@ dependencies = [ [[package]] name = "bitflags" -version = "1.2.1" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" [[package]] name = "block-buffer" -version = "0.10.2" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] +[[package]] +name = "bumpalo" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" + [[package]] name = "byteorder" -version = "1.3.4" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" @@ -126,22 +186,29 @@ dependencies = [ [[package]] name = "bytes" -version = "1.0.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b700ce4376041dcd0a327fd0097c41095743c4c8af8887265942faf1100bd040" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" dependencies = [ "serde", ] [[package]] name = "cc" -version = "1.0.67" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3c69b077ad434294d3ce9f1f6143a2a4b89a8a2d54ef813d85003a4fd1137fd" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" dependencies = [ "jobserver", + "libc", ] +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + [[package]] name = "cfg-if" version = "1.0.0" @@ -150,28 +217,29 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.19" +version = "0.4.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" +checksum = "9f13690e35a5e4ace198e7beea2895d29f3a9cc55015fcebe6336bd2010af9eb" dependencies = [ - "libc", - "num-integer", + "android-tzdata", + "iana-time-zone", + "js-sys", "num-traits", - "time", - "winapi", + "wasm-bindgen", + "windows-targets 0.52.0", ] [[package]] name = "clap" -version = "3.2.23" +version = "3.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5" +checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" dependencies = [ "atty", - "bitflags", + "bitflags 1.3.2", "clap_derive", "clap_lex", - "indexmap", + "indexmap 1.9.3", "once_cell", "regex", "strsim", @@ -183,35 +251,34 @@ dependencies = [ [[package]] name = "clap_derive" -version = "3.2.18" +version = "3.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea0c8bce528c4be4da13ea6fead8965e95b6073585a2f05204bd8f4119f82a65" +checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008" dependencies = [ - "heck 0.4.0", + "heck", "proc-macro-error", "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] name = "clap_lex" -version = "0.2.2" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5538cd660450ebeb4234cfecf8f2284b844ffc4c50531e66d584ad5b91293613" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" dependencies = [ "os_str_bytes", ] [[package]] name = "colored" -version = "1.9.3" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4ffc801dacf156c5854b9df4f425a626539c3a6ef7893cc0c5084a23f0b6c59" +checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" dependencies = [ - "atty", "lazy_static", - "winapi", + "windows-sys 0.48.0", ] [[package]] @@ -219,7 +286,7 @@ name = "common" version = "0.0.0" dependencies = [ "colored", - "indexmap", + "indexmap 2.2.6", "intern", "log", "lsp-types", @@ -227,6 +294,7 @@ dependencies = [ "rayon", "serde", "serde_json", + "typetag", ] [[package]] @@ -235,11 +303,21 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2382f75942f4b3be3690fe4f86365e9c853c1587d6ee58212cebf6e2a9ccd101" +[[package]] +name = "console_error_panic_hook" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" +dependencies = [ + "cfg-if 1.0.0", + "wasm-bindgen", +] + [[package]] name = "core-foundation" -version = "0.9.1" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a89e2ae426ea83155dccf10c0fa6b1463ef6d5fcb44cee0b224a408fa640a62" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", @@ -247,26 +325,25 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.2" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea221b5284a47e40033bf9b66f35f984ec0ea2931eb03505246cd27a963f981b" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] name = "cpufeatures" -version = "0.2.2" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" dependencies = [ "libc", ] [[package]] name = "crossbeam" -version = "0.8.1" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae5588f6b3c3cb05239e90bd110f257254aecd01e4635400391aeae07497845" +checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8" dependencies = [ - "cfg-if", "crossbeam-channel", "crossbeam-deque", "crossbeam-epoch", @@ -276,57 +353,46 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.0" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775" +checksum = "176dc175b78f56c0f321911d9c8eb2b77a78a4860b9c19db83835fea1a46649b" dependencies = [ - "cfg-if", "crossbeam-utils", ] [[package]] name = "crossbeam-deque" -version = "0.8.2" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" +checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" dependencies = [ - "cfg-if", "crossbeam-epoch", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" -version = "0.9.5" +version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ - "cfg-if", "crossbeam-utils", - "lazy_static", - "memoffset", - "scopeguard", ] [[package]] name = "crossbeam-queue" -version = "0.3.2" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b10ddc024425c88c2ad148c1b0fd53f4c6d38db9697c9f1588381212fa657c9" +checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" dependencies = [ - "cfg-if", "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.8" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38" -dependencies = [ - "cfg-if", - "lazy_static", -] +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" [[package]] name = "crypto-common" @@ -338,27 +404,17 @@ dependencies = [ "typenum", ] -[[package]] -name = "ctor" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8f45d9ad417bcef4817d614a501ab55cdd96a6fdb24f49aab89a54acfd66b19" -dependencies = [ - "quote", - "syn", -] - [[package]] name = "dashmap" -version = "5.4.0" +version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ - "cfg-if", - "hashbrown", + "cfg-if 1.0.0", + "hashbrown 0.14.3", "lock_api", "once_cell", - "parking_lot_core 0.9.4", + "parking_lot_core", "rayon", "serde", ] @@ -376,19 +432,22 @@ dependencies = [ "relay-transforms", "rustc-hash", "schema", + "schema-diff", + "serde", + "tokio", ] [[package]] name = "diff" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" [[package]] name = "digest" -version = "0.10.6" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", @@ -399,8 +458,11 @@ name = "docblock-shared" version = "0.0.0" dependencies = [ "common", + "hex", "intern", "lazy_static", + "md-5", + "serde", ] [[package]] @@ -408,44 +470,50 @@ name = "docblock-syntax" version = "0.0.0" dependencies = [ "common", + "docblock-shared", "fixture-tests", "graphql-test-helpers", "intern", "serde", "thiserror", + "tokio", ] [[package]] name = "dunce" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bd4b30a6560bbd9b4620f4de34c3f14f60848e58a9b7216801afcb4c7b31c3c" +checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" [[package]] name = "either" -version = "1.6.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] -name = "errno" -version = "0.2.8" +name = "equivalent" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "erased-serde" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55d05712b2d8d88102bc9868020c9e5c7a1f5527c452b9b97450a1d006140ba7" dependencies = [ - "errno-dragonfly", - "libc", - "winapi", + "serde", ] [[package]] -name = "errno-dragonfly" -version = "0.1.2" +name = "errno" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ - "cc", "libc", + "windows-sys 0.52.0", ] [[package]] @@ -463,16 +531,20 @@ dependencies = [ "docblock-syntax", "fixture-tests", "graphql-syntax", + "tokio", ] [[package]] name = "fastrand" -version = "1.7.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" -dependencies = [ - "instant", -] +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "fixture-tests" @@ -482,17 +554,17 @@ dependencies = [ "colored", "diff", "lazy_static", - "parking_lot 0.11.2", "signedsource", + "tokio", ] [[package]] name = "flatbuffers" -version = "2.0.0" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef4c5738bcd7fad10315029c50026f83c9da5e4a21f8ed66826f43e0e2bde5f6" +checksum = "86b428b715fdbdd1c364b84573b5fdc0f84f8e423661b9f398735278bc7f2b6a" dependencies = [ - "bitflags", + "bitflags 1.3.2", "smallvec", "thiserror", ] @@ -520,11 +592,10 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.0.1" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ - "matches", "percent-encoding", ] @@ -536,9 +607,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] name = "futures" -version = "0.3.24" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f21eda599937fba36daeb58a22e8f5cee2d14c4a17b5b7739c7c8e5e3b8230c" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" dependencies = [ "futures-channel", "futures-core", @@ -551,9 +622,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.24" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30bdd20c28fadd505d0fd6712cdfcb0d4b5648baf45faef7f852afb2399bb050" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" dependencies = [ "futures-core", "futures-sink", @@ -561,15 +632,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.24" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e5aa3de05362c3fb88de6531e6296e85cde7739cccad4b9dfeeb7f6ebce56bf" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" [[package]] name = "futures-executor" -version = "0.3.24" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ff63c23854bee61b6e9cd331d523909f238fc7636290b96826e9cfa5faa00ab" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" dependencies = [ "futures-core", "futures-task", @@ -578,38 +649,38 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.24" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbf4d2a7a308fd4578637c0b17c7e1c7ba127b8f6ba00b29f717e9655d85eb68" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" [[package]] name = "futures-macro" -version = "0.3.24" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42cd15d1c7456c04dbdf7e88bcd69760d74f3a798d6444e16974b505b0e62f17" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.48", ] [[package]] name = "futures-sink" -version = "0.3.24" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b20ba5a92e727ba30e72834706623d94ac93a725410b6a6b6fbc1b07f7ba56" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" [[package]] name = "futures-task" -version = "0.3.24" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6508c467c73851293f390476d4491cf4d227dbabcd4170f3bb6044959b294f1" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" [[package]] name = "futures-util" -version = "0.3.24" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44fb6cb1be61cc1d2e43b262516aafcf63b241cffdb1d3fa115f91d9c7b09c90" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" dependencies = [ "futures 0.1.31", "futures-channel", @@ -626,9 +697,9 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.14.6" +version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", @@ -636,20 +707,26 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.2" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "libc", - "wasi 0.10.2+wasi-snapshot-preview1", + "wasi", ] +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + [[package]] name = "glob" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "graphql-cli" @@ -669,7 +746,7 @@ dependencies = [ "fnv", "graphql-cli", "graphql-syntax", - "indexmap", + "indexmap 2.2.6", "intern", "lazy_static", "once_cell", @@ -677,6 +754,7 @@ dependencies = [ "schema", "serde", "thiserror", + "tokio", ] [[package]] @@ -693,9 +771,12 @@ dependencies = [ "graphql-test-helpers", "graphql-text-printer", "intern", + "relay-config", "relay-test-schema", "schema", + "serde", "thiserror", + "tokio", ] [[package]] @@ -709,6 +790,7 @@ dependencies = [ "logos", "serde", "thiserror", + "tokio", ] [[package]] @@ -717,11 +799,13 @@ version = "0.0.0" dependencies = [ "common", "fixture-tests", + "fnv", "graphql-cli", "graphql-ir", "graphql-syntax", "graphql-text-printer", "relay-test-schema", + "walkdir", ] [[package]] @@ -737,6 +821,7 @@ dependencies = [ "relay-test-schema", "relay-transforms", "schema", + "tokio", ] [[package]] @@ -751,20 +836,20 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.3" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "825343c4eef0b63f541f8903f395dc5beb362a979b5799a84062527ef1e37726" +checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" dependencies = [ - "bytes 1.0.1", + "bytes 1.5.0", "fnv", "futures-core", "futures-sink", "futures-util", "http", - "indexmap", + "indexmap 2.2.6", "slab", "tokio", - "tokio-util", + "tokio-util 0.7.10", "tracing", ] @@ -773,35 +858,39 @@ name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" -dependencies = [ - "ahash", - "serde", -] [[package]] -name = "heck" -version = "0.3.3" +name = "hashbrown" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" dependencies = [ - "unicode-segmentation", + "ahash", + "allocator-api2", + "serde", ] [[package]] name = "heck" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "hermit-abi" -version = "0.1.18" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "322f4de77956e22ed0e5032c359a0f1273f1f7f0d79bfa3b8ffbc730d7fbcc5c" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" dependencies = [ "libc", ] +[[package]] +name = "hermit-abi" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0c62115964e08cb8039170eb33c1d0e2388a256930279edca206fff675f82c3" + [[package]] name = "hex" version = "0.4.3" @@ -810,44 +899,45 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "http" -version = "0.2.3" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7245cd7449cc792608c3c8a9eaf69bd4eabbabf802713748fd739c98b82f0747" +checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" dependencies = [ - "bytes 1.0.1", + "bytes 1.5.0", "fnv", - "itoa 0.4.7", + "itoa", ] [[package]] name = "http-body" -version = "0.4.0" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2861bd27ee074e5ee891e8b539837a9430012e249d7f0ca2d795650f579c1994" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ - "bytes 1.0.1", + "bytes 1.5.0", "http", + "pin-project-lite", ] [[package]] name = "httparse" -version = "1.7.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "496ce29bb5a52785b44e0f7ca2847ae0bb839c9bd28f69acac9b99d461c0c04c" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" -version = "1.0.1" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6456b8a6c8f33fee7d958fcd1b60d55b11940a79e63ae87013e6d22e26034440" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "0.14.12" +version = "0.14.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13f67199e765030fa08fe0bd581af683f0d5bc04ea09c2b1102012c5fb90e7fd" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" dependencies = [ - "bytes 1.0.1", + "bytes 1.5.0", "futures-channel", "futures-core", "futures-util", @@ -856,7 +946,7 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa 0.4.7", + "itoa", "pin-project-lite", "socket2", "tokio", @@ -871,44 +961,67 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ - "bytes 1.0.1", + "bytes 1.5.0", "hyper", "native-tls", "tokio", "tokio-native-tls", ] +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + [[package]] name = "idna" -version = "0.2.2" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89829a5d69c23d348314a7ac337fe39173b61149a9864deabd260983aed48c21" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ - "matches", "unicode-bidi", "unicode-normalization", ] [[package]] name = "indexmap" -version = "1.9.2" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ - "arbitrary", "autocfg", - "hashbrown", - "rayon", - "serde", + "hashbrown 0.12.3", ] [[package]] -name = "instant" -version = "0.1.9" +name = "indexmap" +version = "2.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ - "cfg-if", + "arbitrary", + "equivalent", + "hashbrown 0.14.3", + "rayon", + "serde", ] [[package]] @@ -917,10 +1030,10 @@ version = "0.1.0" dependencies = [ "bincode", "fnv", - "hashbrown", - "indexmap", + "hashbrown 0.14.3", + "indexmap 2.2.6", "once_cell", - "parking_lot 0.11.2", + "parking_lot", "rand", "serde", "serde_bytes", @@ -936,15 +1049,26 @@ dependencies = [ "fnv", "lazy_static", "once_cell", - "parking_lot 0.11.2", + "parking_lot", "serde", ] +[[package]] +name = "inventory" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f958d3d68f4167080a18141e10381e7634563984a537f2a49a30fd8e53ac5767" + [[package]] name = "io-lifetimes" -version = "0.7.3" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ea37f355c05dde75b84bba2d767906ad522e97cd9e2eef2be7a4ab7fb442c06" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" +dependencies = [ + "hermit-abi 0.3.5", + "libc", + "windows-sys 0.48.0", +] [[package]] name = "iovec" @@ -957,30 +1081,33 @@ dependencies = [ [[package]] name = "itertools" -version = "0.10.3" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" dependencies = [ "either", ] [[package]] -name = "itoa" -version = "0.4.7" +name = "itertools" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] [[package]] name = "itoa" -version = "1.0.1" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" [[package]] name = "jobserver" -version = "0.1.24" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af25a77299a7f711a01975c35a6a424eb6862092cc2d6c72c4ed6cbc56dfc1fa" +checksum = "ab46a6e9526ddef3ae7f787c06f0f2600639ba80ea3eade3d8e670a2230f51d6" dependencies = [ "libc", ] @@ -995,6 +1122,15 @@ dependencies = [ "thiserror", ] +[[package]] +name = "js-sys" +version = "0.3.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "406cda4b368d531c842222cf9d2600a9a4acce8d29423695379c6868a143a9ee" +dependencies = [ + "wasm-bindgen", +] + [[package]] name = "lazy_static" version = "1.4.0" @@ -1003,21 +1139,27 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.134" +version = "0.2.153" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "329c933548736bc49fd575ee68c89e8be4d260064184389a5b77517cddd99ffb" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" [[package]] name = "linux-raw-sys" -version = "0.0.46" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d2456c373231a208ad294c33dc5bff30051eafd954cd4caae83a712b12854d" +checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" + +[[package]] +name = "linux-raw-sys" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "lock_api" -version = "0.4.9" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" dependencies = [ "autocfg", "scopeguard", @@ -1025,11 +1167,10 @@ dependencies = [ [[package]] name = "log" -version = "0.4.17" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" dependencies = [ - "cfg-if", "value-bag", ] @@ -1052,15 +1193,15 @@ dependencies = [ "fnv", "proc-macro2", "quote", - "regex-syntax", - "syn", + "regex-syntax 0.6.29", + "syn 1.0.109", ] [[package]] name = "lsp-server" -version = "0.5.2" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c351c75989da23b355226dc188dc2b52538a7f4f218d70fd7393c6b62b110444" +checksum = "248f65b78f6db5d8e1b1604b4098a28b43d21a8eb1deeca22b1c421b276c7095" dependencies = [ "crossbeam-channel", "log", @@ -1070,11 +1211,11 @@ dependencies = [ [[package]] name = "lsp-types" -version = "0.93.2" +version = "0.94.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9be6e9c7e2d18f651974370d7aff703f9513e0df6e464fd795660edc77e6ca51" +checksum = "c66bfd44a06ae10647fe3f8214762e9369fd4248df1350924b4ef9e770a85ea1" dependencies = [ - "bitflags", + "bitflags 1.3.2", "serde", "serde_json", "serde_repr", @@ -1087,53 +1228,53 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" -[[package]] -name = "matches" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" - [[package]] name = "md-5" -version = "0.10.1" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "658646b21e0b72f7866c7038ab086d3d5e1cd6271f060fd37defb241949d0582" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ + "cfg-if 1.0.0", "digest", ] [[package]] name = "memchr" -version = "2.4.1" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" + +[[package]] +name = "memory_units" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" +checksum = "8452105ba047068f40ff7093dd1d9da90898e63dd61736462e9cdda6a90ad3c3" [[package]] -name = "memoffset" -version = "0.6.1" +name = "miniz_oxide" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87" +checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" dependencies = [ - "autocfg", + "adler", ] [[package]] name = "mio" -version = "0.8.4" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf" +checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" dependencies = [ "libc", - "log", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.36.1", + "wasi", + "windows-sys 0.48.0", ] [[package]] name = "native-tls" -version = "0.2.7" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8d96b2e1c8da3957d58100b09f102c6d9cfdfced01b7ec5a8974044bb09dbd4" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" dependencies = [ "lazy_static", "libc", @@ -1148,76 +1289,86 @@ dependencies = [ ] [[package]] -name = "num-integer" -version = "0.1.44" +name = "num-traits" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" +checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" dependencies = [ "autocfg", - "num-traits", ] [[package]] -name = "num-traits" -version = "0.2.14" +name = "num_cpus" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "autocfg", + "hermit-abi 0.3.5", + "libc", ] [[package]] -name = "num_cpus" -version = "1.13.1" +name = "object" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" dependencies = [ - "hermit-abi", - "libc", + "memchr", ] [[package]] name = "once_cell" -version = "1.16.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "openssl" -version = "0.10.32" +version = "0.10.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038d43985d1ddca7a9900630d8cd031b56e4794eecc2e9ea39dd17aa04399a70" +checksum = "15c9d69dd87a29568d4d017cfe8ec518706046a05184e5aea92d0af890b803c8" dependencies = [ - "bitflags", - "cfg-if", + "bitflags 2.4.2", + "cfg-if 1.0.0", "foreign-types", - "lazy_static", "libc", + "once_cell", + "openssl-macros", "openssl-sys", ] +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + [[package]] name = "openssl-probe" -version = "0.1.2" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-src" -version = "111.25.0+1.1.1t" +version = "300.2.2+3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3173cd3626c43e3854b1b727422a276e568d9ec5fe8cec197822cf52cfb743d6" +checksum = "8bbfad0063610ac26ee79f7484739e2b07555a75c42453b89263830b5c8103bc" dependencies = [ "cc", ] [[package]] name = "openssl-sys" -version = "0.9.60" +version = "0.9.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "921fc71883267538946025deffb622905ecad223c28efbfdef9bb59a0175f3e6" +checksum = "22e1bf214306098e4832460f797824c05d25aacdf896f64a985fb0fd992454ae" dependencies = [ - "autocfg", "cc", "libc", "openssl-src", @@ -1227,42 +1378,33 @@ dependencies = [ [[package]] name = "os_str_bytes" -version = "6.0.0" +version = "6.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64" +checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" [[package]] name = "ouroboros" -version = "0.8.2" +version = "0.18.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "217b1cea6c9a366538f0a7149612444ac4fc254bf48448785b83000df8542f90" +checksum = "944fa20996a25aded6b4795c6d63f10014a7a83f8be9828a11860b08c5fc4a67" dependencies = [ + "aliasable", "ouroboros_macro", - "stable_deref_trait", + "static_assertions", ] [[package]] name = "ouroboros_macro" -version = "0.8.2" +version = "0.18.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "415c757b9596114edb32bd5332565eeefba79a69eb3c374d9876801af5bebcd3" +checksum = "39b0deead1528fd0e5947a8546a9642a9777c25f6e1e26f34c97b204bbb465bd" dependencies = [ - "Inflector", - "proc-macro-error", + "heck", + "itertools 0.12.1", "proc-macro2", + "proc-macro2-diagnostics", "quote", - "syn", -] - -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.5", + "syn 2.0.48", ] [[package]] @@ -1272,34 +1414,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.4", + "parking_lot_core", ] [[package]] name = "parking_lot_core" -version = "0.8.5" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ - "cfg-if", - "instant", + "cfg-if 1.0.0", "libc", "redox_syscall", "smallvec", - "winapi", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-sys 0.42.0", + "windows-targets 0.48.5", ] [[package]] @@ -1316,9 +1444,9 @@ checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" [[package]] name = "percent-encoding" -version = "2.1.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "persist-query" @@ -1332,11 +1460,23 @@ dependencies = [ "url", ] +[[package]] +name = "petgraph" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" +dependencies = [ + "fixedbitset", + "indexmap 2.2.6", + "serde", + "serde_derive", +] + [[package]] name = "pin-project-lite" -version = "0.2.6" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc0e1f259c92177c30a4c9d177246edd0a3568b25756a977d0632cf8fa37e905" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" [[package]] name = "pin-utils" @@ -1346,15 +1486,15 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkg-config" -version = "0.3.19" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c" +checksum = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb" [[package]] name = "ppv-lite86" -version = "0.2.10" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "proc-macro-error" @@ -1365,7 +1505,7 @@ dependencies = [ "proc-macro-error-attr", "proc-macro2", "quote", - "syn", + "syn 1.0.109", "version_check", ] @@ -1382,39 +1522,51 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.49" +version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57a8eca9f9c4ffde41714334dee777596264c7825420f521abc92b5b5deb63a5" +checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" dependencies = [ "unicode-ident", ] +[[package]] +name = "proc-macro2-diagnostics" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", + "version_check", + "yansi", +] + [[package]] name = "quote" -version = "1.0.9" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] [[package]] name = "rand" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ef9e7e66b4468674bfcb0c81af8b7fa0bb154fa9f28eb840da5c447baeb8d7e" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", "rand_core", - "rand_hc", ] [[package]] name = "rand_chacha" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e12735cf05c9e10bf21534da50a147b924d555dc7a547c42e6bb2d5b6017ae0d" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", "rand_core", @@ -1422,27 +1574,18 @@ dependencies = [ [[package]] name = "rand_core" -version = "0.6.2" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34cf66eb183df1c5876e2dcf6b13d57340741e8dc255b48e40a26de954d06ae7" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom", ] -[[package]] -name = "rand_hc" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3190ef7066a446f2e7f42e239d161e905420ccab01eb967c9eb27d21b2322a73" -dependencies = [ - "rand_core", -] - [[package]] name = "rayon" -version = "1.6.1" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ "either", "rayon-core", @@ -1450,45 +1593,61 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.10.1" +version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cac410af5d00ab6884528b4ab69d1e8e146e8d471201800fa1b4524126de6ad3" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ - "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "num_cpus", ] [[package]] name = "redox_syscall" -version = "0.2.10" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] name = "regex" -version = "1.6.0" +version = "1.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax 0.8.2", +] + +[[package]] +name = "regex-automata" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" +checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-syntax 0.8.2", ] [[package]] name = "regex-syntax" -version = "0.6.27" +version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" [[package]] name = "relay" -version = "15.0.0" +version = "17.0.0" dependencies = [ "clap", "common", @@ -1508,13 +1667,14 @@ name = "relay-codegen" version = "0.0.0" dependencies = [ "common", + "docblock-shared", "fixture-tests", "fnv", "graphql-ir", "graphql-syntax", "graphql-test-helpers", "hex", - "indexmap", + "indexmap 2.2.6", "intern", "lazy_static", "md-5", @@ -1523,6 +1683,7 @@ dependencies = [ "relay-test-schema", "relay-transforms", "schema", + "tokio", ] [[package]] @@ -1535,13 +1696,15 @@ dependencies = [ "common-path", "dashmap", "dependency-analyzer", + "docblock-shared", "docblock-syntax", "dunce", "errors", "extract-graphql", "fixture-tests", "fnv", - "futures 0.3.24", + "futures 0.3.30", + "futures-util", "glob", "graphql-cli", "graphql-ir", @@ -1550,13 +1713,14 @@ dependencies = [ "graphql-text-printer", "graphql-watchman", "hex", - "indexmap", + "indexmap 2.2.6", "intern", "js-config-loader", "lazy_static", "log", "md-5", "persist-query", + "petgraph", "rayon", "regex", "relay-codegen", @@ -1566,8 +1730,10 @@ dependencies = [ "relay-test-schema", "relay-transforms", "relay-typegen", + "rustc-hash", "schema", "schema-diff", + "schema-validate-lib", "serde", "serde_bser", "serde_json", @@ -1581,20 +1747,43 @@ dependencies = [ "zstd", ] +[[package]] +name = "relay-compiler-playground" +version = "0.0.3" +dependencies = [ + "common", + "console_error_panic_hook", + "fnv", + "graphql-ir", + "graphql-syntax", + "graphql-text-printer", + "intern", + "relay-codegen", + "relay-config", + "relay-schema", + "relay-transforms", + "relay-typegen", + "schema", + "serde", + "serde_json", + "wasm-bindgen", + "wasm-bindgen-test", + "wee_alloc", +] + [[package]] name = "relay-config" version = "0.0.0" dependencies = [ "common", "fnv", - "indexmap", + "indexmap 2.2.6", "intern", "pathdiff", "regex", "serde", "serde_json", "strum", - "strum_macros", ] [[package]] @@ -1604,6 +1793,7 @@ dependencies = [ "common", "docblock-shared", "docblock-syntax", + "errors", "extract-graphql", "fixture-tests", "graphql-cli", @@ -1616,7 +1806,9 @@ dependencies = [ "relay-schema", "relay-test-schema", "schema", + "serde", "thiserror", + "tokio", ] [[package]] @@ -1638,7 +1830,7 @@ dependencies = [ "graphql-text-printer", "graphql-watchman", "intern", - "itertools", + "itertools 0.11.0", "log", "lsp-server", "lsp-types", @@ -1651,6 +1843,7 @@ dependencies = [ "relay-transforms", "resolution-path", "schema", + "schema-diff", "schema-documentation", "schema-print", "serde", @@ -1663,6 +1856,8 @@ name = "relay-schema" version = "0.0.0" dependencies = [ "common", + "docblock-shared", + "graphql-syntax", "intern", "lazy_static", "schema", @@ -1694,18 +1889,20 @@ dependencies = [ "graphql-syntax", "graphql-test-helpers", "graphql-text-printer", - "indexmap", + "indexmap 2.2.6", "intern", - "itertools", + "itertools 0.11.0", "lazy_static", - "once_cell", - "parking_lot 0.11.2", + "parking_lot", "regex", "relay-config", + "relay-schema", "relay-test-schema", "rustc-hash", "schema", + "serde", "thiserror", + "tokio", ] [[package]] @@ -1719,9 +1916,9 @@ dependencies = [ "graphql-ir", "graphql-syntax", "graphql-test-helpers", - "indexmap", + "indexmap 2.2.6", "intern", - "itertools", + "itertools 0.11.0", "lazy_static", "relay-codegen", "relay-config", @@ -1729,15 +1926,7 @@ dependencies = [ "relay-test-schema", "relay-transforms", "schema", -] - -[[package]] -name = "remove_dir_all" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", + "tokio", ] [[package]] @@ -1752,6 +1941,12 @@ dependencies = [ "schema", ] +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + [[package]] name = "rustc-hash" version = "1.1.0" @@ -1760,23 +1955,42 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustix" -version = "0.35.11" +version = "0.37.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbb2fda4666def1433b1b05431ab402e42a1084285477222b72d6c564c417cef" +checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" dependencies = [ - "bitflags", + "bitflags 1.3.2", "errno", "io-lifetimes", "libc", - "linux-raw-sys", - "windows-sys 0.36.1", + "linux-raw-sys 0.3.8", + "windows-sys 0.48.0", ] +[[package]] +name = "rustix" +version = "0.38.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" +dependencies = [ + "bitflags 2.4.2", + "errno", + "libc", + "linux-raw-sys 0.4.13", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustversion" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" + [[package]] name = "ryu" -version = "1.0.5" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" +checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" [[package]] name = "same-file" @@ -1789,12 +2003,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.19" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f05ba609c234e60bee0d547fe94a4c7e9da733d1c962cf6e59efa4cd9c8bc75" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" dependencies = [ - "lazy_static", - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -1811,9 +2024,12 @@ dependencies = [ "intern", "lazy_static", "ouroboros", + "rayon", "schema-flatbuffer", + "serde", "strsim", "thiserror", + "tokio", ] [[package]] @@ -1826,6 +2042,7 @@ dependencies = [ "intern", "lazy_static", "relay-config", + "rustc-hash", "schema", ] @@ -1851,8 +2068,9 @@ dependencies = [ "fixture-tests", "fnv", "intern", - "itertools", + "itertools 0.11.0", "schema", + "tokio", ] [[package]] @@ -1863,28 +2081,36 @@ dependencies = [ "common", "fixture-tests", "fnv", + "graphql-cli", "intern", "lazy_static", "rayon", "regex", "schema", - "schema-print", + "serde", "thiserror", + "tokio", ] +[[package]] +name = "scoped-tls" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" + [[package]] name = "scopeguard" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "security-framework" -version = "2.1.2" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d493c5f39e02dfb062cd8f33301f90f9b13b650e8c1b1d0fd75c19dd64bff69d" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" dependencies = [ - "bitflags", + "bitflags 1.3.2", "core-foundation", "core-foundation-sys", "libc", @@ -1893,9 +2119,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.1.1" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee48cdde5ed250b0d3252818f646e174ab414036edb884dde62d80a3ac6082d" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" dependencies = [ "core-foundation-sys", "libc", @@ -1903,9 +2129,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.151" +version = "1.0.196" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97fed41fc1a24994d044e6db6935e69511a1153b52c15eb42493b26fa87feba0" +checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" dependencies = [ "serde_derive", ] @@ -1925,73 +2151,82 @@ dependencies = [ [[package]] name = "serde_bytes" -version = "0.11.5" +version = "0.11.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16ae07dd2f88a366f15bd0632ba725227018c69a1c8550a927324f8eb8368bb9" +checksum = "8b8497c313fd43ab992087548117643f6fcd935cbf36f176ffda0aacf9591734" dependencies = [ "serde", ] [[package]] name = "serde_derive" -version = "1.0.151" +version = "1.0.196" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "255abe9a125a985c05190d687b320c12f9b1f0b99445e608c21ba0782c719ad8" +checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.48", +] + +[[package]] +name = "serde_fmt" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d4ddca14104cd60529e8c7f7ba71a2c8acd8f7f5cfcdc2faf97eeb7c3010a4" +dependencies = [ + "serde", ] [[package]] name = "serde_json" -version = "1.0.79" +version = "1.0.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" +checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79" dependencies = [ - "itoa 1.0.1", + "itoa", "ryu", "serde", ] [[package]] name = "serde_repr" -version = "0.1.6" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dc6b7951b17b051f3210b063f12cc17320e2fe30ae05b0fe2a3abb068551c76" +checksum = "0b2e6b945e9d3df726b65d6ee24060aff8e3533d431f677a9695db04eff9dfdb" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.48", ] [[package]] name = "sha1" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "cpufeatures", "digest", ] [[package]] name = "sha2" -version = "0.10.6" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "cpufeatures", "digest", ] [[package]] name = "signal-hook-registry" -version = "1.3.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f1d0fef1604ba8f7a073c7e701f213e056707210e9020af4528e0101ce11a6" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" dependencies = [ "libc", ] @@ -2019,34 +2254,37 @@ dependencies = [ [[package]] name = "slab" -version = "0.4.2" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] [[package]] name = "smallvec" -version = "1.6.1" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e" +checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" dependencies = [ "serde", ] [[package]] name = "socket2" -version = "0.4.7" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" dependencies = [ "libc", - "winapi", + "windows-sys 0.48.0", ] [[package]] -name = "stable_deref_trait" -version = "1.2.0" +name = "static_assertions" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "strsim" @@ -2056,33 +2294,120 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "strum" -version = "0.21.0" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aaf86bbcfd1fa9670b7a129f64fc0c9fcbbfe4f1bc4210e9e98fe71ffc12cde2" +checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29" +dependencies = [ + "strum_macros", +] [[package]] name = "strum_macros" -version = "0.21.1" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d06aaeeee809dbc59eb4556183dd927df67db1540de5be8d3ec0b6636358a5ec" +checksum = "c6cf59daf282c0a494ba14fd21610a0325f9f90ec9d1231dea26bcb1d696c946" dependencies = [ - "heck 0.3.3", + "heck", "proc-macro2", "quote", - "syn", + "rustversion", + "syn 2.0.48", ] [[package]] name = "sval" -version = "1.0.0-alpha.5" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82a2386bea23a121e4e72450306b1dd01078b6399af11b93897bf84640a28a59" + +[[package]] +name = "sval_buffer" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b16c047898a0e19002005512243bc9ef1c1037aad7d03d6c594e234efec80795" +dependencies = [ + "sval", + "sval_ref", +] + +[[package]] +name = "sval_dynamic" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a74fb116e2ecdcb280b0108aa2ee4434df50606c3208c47ac95432730eaac20c" +dependencies = [ + "sval", +] + +[[package]] +name = "sval_fmt" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10837b4f0feccef271b2b1c03784e08f6d0bb6d23272ec9e8c777bfadbb8f1b8" +dependencies = [ + "itoa", + "ryu", + "sval", +] + +[[package]] +name = "sval_json" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891f5ecdf34ce61a8ab2d10f9cfdc303347b0afec4dad6702757419d2d8312a9" +dependencies = [ + "itoa", + "ryu", + "sval", +] + +[[package]] +name = "sval_nested" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63fcffb4b79c531f38e3090788b64f3f4d54a180aacf02d69c42fa4e4bf284c3" +dependencies = [ + "sval", + "sval_buffer", + "sval_ref", +] + +[[package]] +name = "sval_ref" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af725f9c2aa7cec4ca9c47da2cc90920c4c82d3fa537094c66c77a5459f5809d" +dependencies = [ + "sval", +] + +[[package]] +name = "sval_serde" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d7589c649a03d21df40b9a926787d2c64937fa1dccec8d87c6cd82989a2e0a4" +dependencies = [ + "serde", + "sval", + "sval_nested", +] + +[[package]] +name = "syn" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45f6ee7c7b87caf59549e9fe45d6a69c75c8019e79e212a835c5da0e92f0ba08" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] [[package]] name = "syn" -version = "1.0.107" +version = "2.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" +checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" dependencies = [ "proc-macro2", "quote", @@ -2091,35 +2416,33 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.3.0" +version = "3.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +checksum = "a365e8cd18e44762ef95d87f284f4b5cd04107fec2ff3052bd6a3e6069669e67" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "fastrand", - "libc", - "redox_syscall", - "remove_dir_all", - "winapi", + "rustix 0.38.31", + "windows-sys 0.52.0", ] [[package]] name = "termcolor" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4" +checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" dependencies = [ "winapi-util", ] [[package]] name = "terminal_size" -version = "0.2.1" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8440c860cf79def6164e4a0a983bcc2305d82419177a0e0c71930d049e3ac5a1" +checksum = "8e6bf6f19e9f8ed8d4048dc22981458ebcf406d67e94cd422e5ecd73d63b3237" dependencies = [ - "rustix", - "windows-sys 0.36.1", + "rustix 0.37.27", + "windows-sys 0.48.0", ] [[package]] @@ -2134,86 +2457,75 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.37" +version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e" +checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.37" +version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb" +checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" dependencies = [ "proc-macro2", "quote", - "syn", -] - -[[package]] -name = "time" -version = "0.1.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" -dependencies = [ - "libc", - "winapi", + "syn 2.0.48", ] [[package]] name = "tinyvec" -version = "1.1.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317cca572a0e89c3ce0ca1f1bdc9369547fe318a683418e42ac8f59d14701023" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" dependencies = [ "tinyvec_macros", ] [[package]] name = "tinyvec_macros" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.25.0" +version = "1.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8e00990ebabbe4c14c08aca901caed183ecd5c09562a12c824bb53d3c3fd3af" +checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" dependencies = [ - "autocfg", - "bytes 1.0.1", + "backtrace", + "bytes 1.5.0", "libc", - "memchr", "mio", "num_cpus", - "parking_lot 0.12.1", + "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", "tracing", - "windows-sys 0.42.0", + "windows-sys 0.48.0", ] [[package]] name = "tokio-macros" -version = "1.7.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b557f72f448c511a979e2564e55d74e6c4432fc96ff4f6241bc6bded342643b7" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.48", ] [[package]] name = "tokio-native-tls" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d995660bd2b7f8c1568414c1126076c13fbb725c40112dc0120b78eb9b717b" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" dependencies = [ "native-tls", "tokio", @@ -2221,11 +2533,11 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.6.4" +version = "0.6.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec31e5cc6b46e653cf57762f36f71d5e6386391d88a72fd6db4508f8f676fb29" +checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" dependencies = [ - "bytes 1.0.1", + "bytes 1.5.0", "futures-core", "futures-io", "futures-sink", @@ -2235,157 +2547,292 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-util" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +dependencies = [ + "bytes 1.5.0", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + [[package]] name = "tower-service" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.25" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01ebdc2bb4498ab1ab5f5b73c5803825e60199229ccba0698170e3be0e7f959f" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "cfg-if", "pin-project-lite", "tracing-core", ] [[package]] name = "tracing-core" -version = "0.1.17" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f50de3927f93d202783f4513cda820ab47ef17f624b03c096e86ef00c67e6b5f" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ - "lazy_static", + "once_cell", ] [[package]] name = "try-lock" -version = "0.2.3" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" -version = "1.15.0" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "typetag" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" +checksum = "c43148481c7b66502c48f35b8eef38b6ccdc7a9f04bd4cc294226d901ccc9bc7" +dependencies = [ + "erased-serde", + "inventory", + "once_cell", + "serde", + "typetag-impl", +] + +[[package]] +name = "typetag-impl" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291db8a81af4840c10d636e047cac67664e343be44e24dfdbd1492df9a5d3390" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] [[package]] name = "unicase" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" dependencies = [ "version_check", ] [[package]] name = "unicode-bidi" -version = "0.3.4" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" -dependencies = [ - "matches", -] +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-ident" -version = "1.0.0" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d22af068fba1eb5edcb4aea19d382b2a3deb4c8f9d475c589b6ada9e0fd493ee" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" -version = "0.1.17" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07fbfce1c8a97d547e8b5334978438d9d6ec8c20e38f56d4a4374d181493eaef" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" dependencies = [ "tinyvec", ] -[[package]] -name = "unicode-segmentation" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" - [[package]] name = "unicode-width" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" [[package]] name = "url" -version = "2.2.2" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", "idna", - "matches", "percent-encoding", "serde", ] [[package]] name = "value-bag" -version = "1.0.0-alpha.9" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2209b78d1249f7e6f3293657c9779fe31ced465df091bbd433a1cf88e916ec55" +checksum = "126e423afe2dd9ac52142e7e9d5ce4135d7e13776c529d27fd6bc49f19e3280b" +dependencies = [ + "value-bag-serde1", + "value-bag-sval2", +] + +[[package]] +name = "value-bag-serde1" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ede32f342edc46e84bd41fd394ce2192b553de11725dd83b6223150610c21b44" +dependencies = [ + "erased-serde", + "serde", + "serde_fmt", +] + +[[package]] +name = "value-bag-sval2" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0024e44b25144c2f4d0ed35d39688e0090d57753e20fef38d08e0c1a40bdf23d" dependencies = [ - "ctor", "sval", - "version_check", + "sval_buffer", + "sval_dynamic", + "sval_fmt", + "sval_json", + "sval_ref", + "sval_serde", ] [[package]] name = "vcpkg" -version = "0.2.11" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" -version = "0.9.2" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "walkdir" -version = "2.3.2" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" dependencies = [ "same-file", - "winapi", "winapi-util", ] [[package]] name = "want" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" dependencies = [ - "log", "try-lock", ] [[package]] name = "wasi" -version = "0.10.2+wasi-snapshot-preview1" +version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +name = "wasm-bindgen" +version = "0.2.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f" +dependencies = [ + "cfg-if 1.0.0", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.48", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877b9c3f61ceea0e56331985743b13f3d25c406a7098d45180fb5f09bc19ed97" +dependencies = [ + "cfg-if 1.0.0", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838" + +[[package]] +name = "wasm-bindgen-test" +version = "0.3.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "143ddeb4f833e2ed0d252e618986e18bfc7b0e52f2d28d77d05b2f045dd8eb61" +dependencies = [ + "console_error_panic_hook", + "js-sys", + "scoped-tls", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-bindgen-test-macro", +] + +[[package]] +name = "wasm-bindgen-test-macro" +version = "0.3.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5211b7550606857312bba1d978a8ec75692eae187becc5e680444fffc5e6f89" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] [[package]] name = "watchman_client" @@ -2394,14 +2841,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "839fea2d85719bb69089290d7970bba2131f544448db8f990ea75813c30775ca" dependencies = [ "anyhow", - "bytes 1.0.1", - "futures 0.3.24", + "bytes 1.5.0", + "futures 0.3.30", "maplit", "serde", "serde_bser", "thiserror", "tokio", - "tokio-util", + "tokio-util 0.6.10", + "winapi", +] + +[[package]] +name = "web-sys" +version = "0.3.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96565907687f7aceb35bc5fc03770a8a0471d82e479f25832f54a0e3f4b28446" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "wee_alloc" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbb3b5a6b2bb17cb6ad44a2e68a43e8d2722c997da10e928665c72ec6c0a0b8e" +dependencies = [ + "cfg-if 0.1.10", + "libc", + "memory_units", "winapi", ] @@ -2423,9 +2892,9 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" dependencies = [ "winapi", ] @@ -2436,131 +2905,197 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.0", +] + [[package]] name = "windows-sys" -version = "0.36.1" +version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows_aarch64_msvc 0.36.1", - "windows_i686_gnu 0.36.1", - "windows_i686_msvc 0.36.1", - "windows_x86_64_gnu 0.36.1", - "windows_x86_64_msvc 0.36.1", + "windows-targets 0.48.5", ] [[package]] name = "windows-sys" -version = "0.42.0" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.0", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc 0.42.0", - "windows_i686_gnu 0.42.0", - "windows_i686_msvc 0.42.0", - "windows_x86_64_gnu 0.42.0", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc 0.42.0", + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", ] [[package]] name = "windows_aarch64_gnullvm" -version = "0.42.0" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" [[package]] name = "windows_aarch64_msvc" -version = "0.36.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.42.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" [[package]] name = "windows_i686_gnu" -version = "0.36.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.42.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" [[package]] name = "windows_i686_msvc" -version = "0.36.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.42.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" [[package]] name = "windows_x86_64_gnu" -version = "0.36.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.42.0" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.42.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" [[package]] name = "windows_x86_64_msvc" -version = "0.36.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.42.0" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" + +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + +[[package]] +name = "zerocopy" +version = "0.7.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" +checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] [[package]] name = "zstd" -version = "0.11.2+zstd.1.5.2" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" +checksum = "bffb3309596d527cfcba7dfc6ed6052f1d39dfbd7c867aa2e865e4a449c10110" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" -version = "5.0.1+zstd.1.5.2" +version = "7.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c12659121420dd6365c5c3de4901f97145b79651fb1d25814020ed2ed0585ae" +checksum = "43747c7422e2924c11144d5229878b98180ef8b06cca4ab5af37afc8a8d8ea3e" dependencies = [ - "libc", "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.1+zstd.1.5.2" +version = "2.0.9+zstd.1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fd07cbbc53846d9145dbffdf6dd09a7a0aa52be46741825f5c97bdd4f73f12b" +checksum = "9e16efa8a874a0481a574084d34cc26fdb3b99627480f785888deb6386506656" dependencies = [ "cc", - "libc", + "pkg-config", ] diff --git a/compiler/Cargo.toml b/compiler/Cargo.toml index d9a1fdda6e819..e2efa8bd846c4 100644 --- a/compiler/Cargo.toml +++ b/compiler/Cargo.toml @@ -17,6 +17,7 @@ members = [ "crates/relay-bin", "crates/relay-codegen", "crates/relay-compiler", + "crates/relay-compiler-playground", "crates/relay-lsp", "crates/relay-schema", "crates/relay-test-schema", diff --git a/compiler/clippy.toml b/compiler/clippy.toml new file mode 100644 index 0000000000000..3c34d529adbb7 --- /dev/null +++ b/compiler/clippy.toml @@ -0,0 +1,5 @@ +too-many-lines-threshold = 200 +await-holding-invalid-types = [ + { path = "tracing::span::Entered", reason = "`Entered` is not aware when a function is suspended: https://docs.rs/tracing/latest/tracing/struct.Span.html#in-asynchronous-code" }, + { path = "tracing::span::EnteredSpan", reason = "`EnteredSpan` is not aware when a function is suspended: https://docs.rs/tracing/latest/tracing/struct.Span.html#in-asynchronous-code" }, +] diff --git a/compiler/crates/common/Cargo.toml b/compiler/crates/common/Cargo.toml index 696222259783c..5c1a89a1192dd 100644 --- a/compiler/crates/common/Cargo.toml +++ b/compiler/crates/common/Cargo.toml @@ -1,18 +1,21 @@ # @generated by autocargo from //relay/oss/crates/common:common + [package] name = "common" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] -colored = "1.9" -indexmap = { version = "1.9.2", features = ["arbitrary", "rayon", "serde-1"] } +colored = "2.1.0" +indexmap = { version = "2.2.6", features = ["arbitrary", "rayon", "serde"] } intern = { path = "../intern" } log = { version = "0.4.17", features = ["kv_unstable", "kv_unstable_std"] } -lsp-types = "0.93.2" +lsp-types = "0.94.1" md-5 = "0.10" -rayon = "1.2" -serde = { version = "1.0.136", features = ["derive", "rc"] } -serde_json = { version = "1.0.79", features = ["float_roundtrip", "unbounded_depth"] } +rayon = "1.9.0" +serde = { version = "1.0.185", features = ["derive", "rc"] } +serde_json = { version = "1.0.100", features = ["float_roundtrip", "unbounded_depth"] } +typetag = "0.2.15" diff --git a/compiler/crates/common/src/console_logger.rs b/compiler/crates/common/src/console_logger.rs index c1b73b896c8a4..cb41383e24862 100644 --- a/compiler/crates/common/src/console_logger.rs +++ b/compiler/crates/common/src/console_logger.rs @@ -22,6 +22,9 @@ impl PerfLogEvent for ConsoleLogEvent { fn number(&self, name: &'static str, number: usize) { debug!("{}: {}", name, number); } + fn bool(&self, name: &'static str, value: bool) { + debug!("{}: {}", name, value); + } fn string(&self, name: &'static str, value: String) { debug!("{}: {}", name, value); } diff --git a/compiler/crates/common/src/diagnostic.rs b/compiler/crates/common/src/diagnostic.rs index 872c222655413..187d766f2b04c 100644 --- a/compiler/crates/common/src/diagnostic.rs +++ b/compiler/crates/common/src/diagnostic.rs @@ -12,6 +12,7 @@ use std::fmt::Write; use lsp_types::DiagnosticSeverity; use lsp_types::DiagnosticTag; +use serde::ser::SerializeMap; use serde_json::Value; use crate::Location; @@ -46,7 +47,7 @@ pub fn diagnostics_result(result: T, diagnostics: Diagnostics) -> Diagnostics /// A diagnostic message as a result of validating some code. This struct is /// modeled after the LSP Diagnostic type: -/// https://microsoft.github.io/language-server-protocol/specification#diagnostic +/// /// /// Changes from LSP: /// - `location` is different from LSP in that it's a file + span instead of @@ -127,6 +128,23 @@ impl Diagnostic { Diagnostic::with_severity(DiagnosticSeverity::HINT, message, location, tags) } + pub fn hint_with_data( + message: T, + location: Location, + tags: Vec, + ) -> Self { + let data = message.get_data(); + Self(Box::new(DiagnosticData { + message: Box::new(message), + location, + tags, + severity: DiagnosticSeverity::HINT, + related_information: Vec::new(), + data, + machine_readable: BTreeMap::new(), + })) + } + /// Annotates this error with an additional location and associated message. pub fn annotate( mut self, @@ -225,19 +243,19 @@ impl Diagnostic { let mut result = String::new(); writeln!( result, - "{message}:{location:?}", + "{message}: {location}", message = &self.0.message, - location = self.0.location + location = self.0.location.source_location().path() ) .unwrap(); if !self.0.related_information.is_empty() { for (ix, related) in self.0.related_information.iter().enumerate() { writeln!( result, - "[related {ix}] {message}:{location:?}", + "[related {ix}] {message}:{location}", ix = ix + 1, message = related.message, - location = related.location + location = related.location.source_location().path() ) .unwrap(); } @@ -252,6 +270,18 @@ impl fmt::Display for Diagnostic { } } +impl serde::Serialize for Diagnostic { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let mut diagnostic = serializer.serialize_map(Some(2))?; + diagnostic.serialize_entry("message", &self.0.message)?; + diagnostic.serialize_entry("location", &self.0.location)?; + diagnostic.end() + } +} + impl Error for Diagnostic {} // statically verify that the Diagnostic type is thread safe @@ -302,11 +332,14 @@ pub trait WithDiagnosticData { /// Trait for diagnostic messages to allow structs that capture /// some data and can lazily convert it to a message. +#[typetag::serialize(tag = "type")] pub trait DiagnosticDisplay: fmt::Debug + fmt::Display + Send + Sync {} /// Automatically implement the trait if constraints are met, so that /// implementors don't need to. -impl DiagnosticDisplay for T where T: fmt::Debug + fmt::Display + Send + Sync {} +#[typetag::serialize] +impl DiagnosticDisplay for T where T: fmt::Debug + fmt::Display + Send + Sync + typetag::Serialize +{} impl From for Diagnostics { fn from(diagnostic: Diagnostic) -> Self { diff --git a/compiler/crates/common/src/diagnostic_check.rs b/compiler/crates/common/src/diagnostic_check.rs index db01ee425dc88..288202376300f 100644 --- a/compiler/crates/common/src/diagnostic_check.rs +++ b/compiler/crates/common/src/diagnostic_check.rs @@ -160,7 +160,7 @@ mod escalate_tests { escalate(DiagnosticSeverity::WARNING, &mut diagnostics); - let expected_severities = vec![ + let expected_severities = [ DiagnosticSeverity::ERROR, DiagnosticSeverity::ERROR, DiagnosticSeverity::HINT, diff --git a/compiler/crates/common/src/feature_flags.rs b/compiler/crates/common/src/feature_flags.rs index da0fa493fea5a..765db44c5811e 100644 --- a/compiler/crates/common/src/feature_flags.rs +++ b/compiler/crates/common/src/feature_flags.rs @@ -20,20 +20,15 @@ use crate::Rollout; #[derive(Default, Debug, Serialize, Deserialize, Clone)] #[serde(deny_unknown_fields)] pub struct FeatureFlags { - #[serde(default)] - pub enable_flight_transform: bool, - #[serde(default)] pub enable_relay_resolver_transform: bool, - /// Enable deprecated `@outputType` on Relay Resolvers. #[serde(default)] - pub relay_resolver_enable_output_type: FeatureFlag, + pub enable_catch_directive_transform: FeatureFlag, - /// Enable hashing of the `supported` argument of 3D fields. Partial - /// enabling of the feature flag checks the name based on the field type. #[serde(default)] - pub hash_supported_argument: FeatureFlag, + // Enable returning interfaces from Relay Resolvers without @outputType + pub relay_resolver_enable_interface_output_type: FeatureFlag, /// For now, this also disallows fragments with variable definitions /// This also makes @module to opt in using @no_inline internally @@ -54,9 +49,6 @@ pub struct FeatureFlags { #[serde(default)] pub text_artifacts: FeatureFlag, - #[serde(default)] - pub enable_client_edges: FeatureFlag, - #[serde(default)] pub skip_printing_nulls: FeatureFlag, @@ -64,19 +56,88 @@ pub struct FeatureFlags { #[serde(default)] pub enable_fragment_aliases: FeatureFlag, + /// Enforce that you must add `@alias` to a fragment if it may not match, + /// due to type mismatch or `@skip`/`@include` + #[serde(default)] + pub enforce_fragment_alias_where_ambiguous: FeatureFlag, + /// Print queries in compact form #[serde(default)] pub compact_query_text: FeatureFlag, /// Create normalization nodes for client edges to client objects - #[serde(default)] + #[serde(default = "default_as_true")] pub emit_normalization_nodes_for_client_edges: bool, + + /// Fully build the normalization AST for Resolvers + #[serde(default)] + pub enable_resolver_normalization_ast: bool, + + /// Allow relay resolvers to extend the Mutation type + #[serde(default)] + pub enable_relay_resolver_mutations: bool, + + /// Perform strict validations when custom scalar types are used + #[serde(default)] + pub enable_strict_custom_scalars: bool, + + /// Relay Resolvers are a read-time feature that are not actually handled in + /// our mutation APIs. We are in the process of removing any existing + /// examples, but this flag is part of a process of removing any existing + /// examples. + #[serde(default)] + pub allow_resolvers_in_mutation_response: FeatureFlag, + + /// @required with an action of THROW is read-time feature that is not + /// compatible with our mutation APIs. We are in the process of removing + /// any existing examples, but this flag is part of a process of removing + /// any existing examples. + #[serde(default)] + pub allow_required_in_mutation_response: FeatureFlag, + + /// Mirror of `enable_resolver_normalization_ast` + /// excludes resolver metadata from reader ast + #[serde(default)] + pub disable_resolver_reader_ast: bool, + + /// Add support for parsing and transforming variable definitions on fragment + /// definitions and arguments on fragment spreads. + #[serde(default)] + pub enable_fragment_argument_transform: bool, + + /// Allow non-nullable return types from resolvers. + #[serde(default)] + pub allow_resolver_non_nullable_return_type: FeatureFlag, + + /// Disable validating the composite schema (server, client schema + /// extensions, Relay Resolvers) after its built. + #[serde(default)] + pub disable_schema_validation: bool, + + /// Disallow the `@required` directive on fields that are already non-null + /// in the schema. + #[serde(default)] + pub disallow_required_on_non_null_fields: bool, + + /// Feature flag to prefer `fetch_MyType()` generatior over `node()` query generator + /// in @refetchable transform + #[serde(default)] + pub prefer_fetchable_in_refetch_queries: bool, + + /// Disable validation of the `edgeTypeName` argument on `@prependNode` and `@appendNode`. + #[serde(default)] + pub disable_edge_type_name_validation_on_declerative_connection_directives: FeatureFlag, } -#[derive(Debug, Deserialize, Clone, Serialize)] +fn default_as_true() -> bool { + true +} + +#[derive(Debug, Deserialize, Clone, Serialize, Default)] #[serde(tag = "kind", rename_all = "lowercase")] pub enum FeatureFlag { /// Fully disabled: developers may not use this feature + #[default] Disabled, /// Fully enabled: developers may use this feature @@ -89,12 +150,6 @@ pub enum FeatureFlag { Rollout { rollout: Rollout }, } -impl Default for FeatureFlag { - fn default() -> Self { - FeatureFlag::Disabled - } -} - impl FeatureFlag { pub fn is_enabled_for(&self, name: StringKey) -> bool { match self { diff --git a/compiler/crates/common/src/lib.rs b/compiler/crates/common/src/lib.rs index a4b03957cedca..c393e095904cb 100644 --- a/compiler/crates/common/src/lib.rs +++ b/compiler/crates/common/src/lib.rs @@ -30,7 +30,7 @@ pub use diagnostic::get_diagnostics_data; pub use diagnostic::Diagnostic; pub use diagnostic::DiagnosticDisplay; pub use diagnostic::DiagnosticRelatedInformation; -pub(crate) use diagnostic::Diagnostics; +pub use diagnostic::Diagnostics; pub use diagnostic::DiagnosticsResult; pub use diagnostic::WithDiagnosticData; pub use diagnostic::WithDiagnostics; @@ -54,6 +54,7 @@ pub use named_item::Named; pub use named_item::NamedItem; pub use named_item::ObjectName; pub use named_item::ScalarName; +pub use named_item::UnionName; pub use perf_logger::NoopPerfLogger; pub use perf_logger::NoopPerfLoggerEvent; pub use perf_logger::PerfLogEvent; diff --git a/compiler/crates/common/src/location.rs b/compiler/crates/common/src/location.rs index 68e30a70f0665..60d03a43aecd4 100644 --- a/compiler/crates/common/src/location.rs +++ b/compiler/crates/common/src/location.rs @@ -18,7 +18,19 @@ use crate::span::Span; /// The location of a source. Could be a standalone file (e.g. test.graphql), /// an embedded source (GraphQL tag in a JS file) or generated code without a /// location. -#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Copy, + Clone, + Debug, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize, + serde::Deserialize +)] +#[serde(tag = "type")] pub enum SourceLocationKey { /// A source embedded within a file. The 0-based index is an index into the /// embedded sources. E.g. the second graphql tag has index 1. @@ -72,7 +84,7 @@ impl SourceLocationKey { /// An absolute source location describing both the file and position (span) /// with that file. -#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, serde::Serialize)] pub struct Location { /// The source containing this location (e.g. embedded or standalone file). source_location: SourceLocationKey, diff --git a/compiler/crates/common/src/named_item.rs b/compiler/crates/common/src/named_item.rs index 516c092c60544..b5bd57c11b804 100644 --- a/compiler/crates/common/src/named_item.rs +++ b/compiler/crates/common/src/named_item.rs @@ -63,7 +63,18 @@ impl FromStr for DirectiveName { } impl_lookup!(DirectiveName); -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Copy, + Debug, + Deserialize, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + Serialize +)] pub struct ArgumentName(pub StringKey); impl fmt::Display for ArgumentName { @@ -94,7 +105,17 @@ impl fmt::Display for ScalarName { } } impl_lookup!(ArgumentName); -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Copy, + Debug, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] pub struct ObjectName(pub StringKey); impl fmt::Display for ObjectName { @@ -126,7 +147,17 @@ impl fmt::Display for EnumName { } impl_lookup!(EnumName); -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Copy, + Debug, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] pub struct InterfaceName(pub StringKey); impl fmt::Display for InterfaceName { @@ -136,3 +167,25 @@ impl fmt::Display for InterfaceName { } impl_lookup!(InterfaceName); + +#[derive( + Clone, + Copy, + Debug, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + Serialize, + Deserialize +)] +pub struct UnionName(pub StringKey); + +impl fmt::Display for UnionName { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(fmt, "{}", self.0) + } +} + +impl_lookup!(UnionName); diff --git a/compiler/crates/common/src/perf_logger.rs b/compiler/crates/common/src/perf_logger.rs index fc090189556fc..e640470a6e05c 100644 --- a/compiler/crates/common/src/perf_logger.rs +++ b/compiler/crates/common/src/perf_logger.rs @@ -17,6 +17,9 @@ pub trait PerfLogEvent: Send + Sync { /// Log number fn number(&self, name: &'static str, number: usize); + // Log boolean + fn bool(&self, name: &'static str, value: bool); + /// Provides a possibility to log additional fields describing current run (like, project name) fn string(&self, name: &'static str, value: String); @@ -53,6 +56,7 @@ pub struct NoopPerfLoggerEvent; impl PerfLogEvent for NoopPerfLoggerEvent { type Timer = (); fn number(&self, _name: &'static str, _number: usize) {} + fn bool(&self, _name: &'static str, _value: bool) {} fn string(&self, _name: &'static str, _value: String) {} fn start(&self, _name: &'static str) -> Self::Timer {} fn stop(&self, _timer: Self::Timer) {} diff --git a/compiler/crates/common/src/span.rs b/compiler/crates/common/src/span.rs index 6a3fe1706a356..6b19ee8a752be 100644 --- a/compiler/crates/common/src/span.rs +++ b/compiler/crates/common/src/span.rs @@ -7,7 +7,7 @@ use std::fmt; -#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, serde::Serialize)] pub struct Span { pub start: u32, pub end: u32, diff --git a/compiler/crates/common/src/text_source.rs b/compiler/crates/common/src/text_source.rs index e4b2063e76f1c..42f8410ac95b6 100644 --- a/compiler/crates/common/src/text_source.rs +++ b/compiler/crates/common/src/text_source.rs @@ -71,6 +71,10 @@ impl TextSource { Range::new(start_position, end_position) } + /** + * Converts span, which is the relative indices of characters within this text source, + * into the equivalent line and character number range. + */ pub fn to_span_range(&self, span: Span) -> lsp_types::Range { let start = span.start as usize; let end = span.end as usize; diff --git a/compiler/crates/dependency-analyzer/Cargo.toml b/compiler/crates/dependency-analyzer/Cargo.toml index f3b13318514a5..70a1ef61d5a38 100644 --- a/compiler/crates/dependency-analyzer/Cargo.toml +++ b/compiler/crates/dependency-analyzer/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/dependency-analyzer:[dependency-analyzer,dependency-analyzer-ast,dependency-analyzer-ir] + [package] name = "dependency-analyzer" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -15,14 +17,17 @@ name = "dependency_analyzer_ir" path = "tests/ir_test.rs" [dependencies] +common = { path = "../common" } graphql-ir = { path = "../graphql-ir" } graphql-syntax = { path = "../graphql-syntax" } relay-transforms = { path = "../relay-transforms" } rustc-hash = "1.1.0" schema = { path = "../schema" } +schema-diff = { path = "../schema-diff" } +serde = { version = "1.0.185", features = ["derive", "rc"] } [dev-dependencies] -common = { path = "../common" } fixture-tests = { path = "../fixture-tests" } intern = { path = "../intern" } relay-test-schema = { path = "../relay-test-schema" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/dependency-analyzer/src/ir.rs b/compiler/crates/dependency-analyzer/src/ir.rs index b1bcd5a197c1f..70d8780b4ab71 100644 --- a/compiler/crates/dependency-analyzer/src/ir.rs +++ b/compiler/crates/dependency-analyzer/src/ir.rs @@ -9,12 +9,16 @@ use std::collections::hash_map::Entry; use std::collections::HashMap; use std::fmt; +use common::PerfLogEvent; use graphql_ir::*; use relay_transforms::get_resolver_fragment_dependency_name; use rustc_hash::FxHashMap; use rustc_hash::FxHashSet; use schema::SDLSchema; use schema::Schema; +use schema_diff::check; + +use crate::schema_change_analyzer; pub type ExecutableDefinitionNameSet = FxHashSet; pub type ExecutableDefinitionNameMap = FxHashMap; @@ -48,38 +52,48 @@ pub fn get_reachable_ir( base_definition_names: ExecutableDefinitionNameSet, changed_names: ExecutableDefinitionNameSet, schema: &SDLSchema, + schema_changes: FxHashSet, + log_event: &impl PerfLogEvent, ) -> Vec { - if changed_names.is_empty() { - return vec![]; - } + let timer = log_event.start("get_reachable_ir_time"); + let result = if changed_names.is_empty() && schema_changes.is_empty() { + vec![] + } else { + let mut all_changed_names: ExecutableDefinitionNameSet = + schema_change_analyzer::get_affected_definitions(schema, &definitions, schema_changes); + all_changed_names.extend(changed_names); - // For each executable definition, define a `Node` indicating its parents and children - // Note: There are situations where a name in `changed_names` may not appear - // in `definitions`, and thus would be missing from `dependency_graph`. This can arise - // if you change a file which contains a fragment which is present in the - // base project, but is not reachable from any of the project's own - // queries/mutations. - let dependency_graph = build_dependency_graph(schema, definitions); + // For each executable definition, define a `Node` indicating its parents and children + // Note: There are situations where a name in `changed_names` may not appear + // in `definitions`, and thus would be missing from `dependency_graph`. This can arise + // if you change a file which contains a fragment which is present in the + // base project, but is not reachable from any of the project's own + // queries/mutations. + let dependency_graph = build_dependency_graph(schema, definitions); - let mut visited = Default::default(); - let mut filtered_definitions = Default::default(); + let mut visited = Default::default(); + let mut filtered_definitions = Default::default(); - for key in changed_names.into_iter() { - if dependency_graph.contains_key(&key) { - add_related_nodes( - &mut visited, - &mut filtered_definitions, - &dependency_graph, - &base_definition_names, - key, - ); + for key in all_changed_names.into_iter() { + if dependency_graph.contains_key(&key) { + add_related_nodes( + &mut visited, + &mut filtered_definitions, + &dependency_graph, + &base_definition_names, + key, + ); + } } - } - filtered_definitions - .drain() - .map(|(_, definition)| definition) - .collect() + filtered_definitions + .drain() + .map(|(_, definition)| definition) + .collect() + }; + + log_event.stop(timer); + result } // Build a dependency graph of that nodes are "doubly linked" @@ -189,7 +203,6 @@ fn visit_selections( Selection::LinkedField(linked_field) => { if let Some(fragment_name) = get_resolver_fragment_dependency_name( schema.field(linked_field.definition.item), - schema, ) { update_dependency_graph( fragment_name.into(), @@ -209,7 +222,6 @@ fn visit_selections( Selection::ScalarField(scalar_field) => { if let Some(fragment_name) = get_resolver_fragment_dependency_name( schema.field(scalar_field.definition.item), - schema, ) { update_dependency_graph( fragment_name.into(), @@ -293,3 +305,55 @@ fn add_descendants( } } } + +/// Get fragment references of each definition +pub fn get_ir_definition_references<'a>( + schema: &SDLSchema, + definitions: impl IntoIterator, +) -> ExecutableDefinitionNameMap { + let mut result: ExecutableDefinitionNameMap = Default::default(); + for definition in definitions { + let name = definition.name_with_location().item; + let name = match definition { + ExecutableDefinition::Operation(_) => OperationDefinitionName(name).into(), + ExecutableDefinition::Fragment(_) => FragmentDefinitionName(name).into(), + }; + let mut selections: Vec<_> = match definition { + ExecutableDefinition::Operation(definition) => &definition.selections, + ExecutableDefinition::Fragment(definition) => &definition.selections, + } + .iter() + .collect(); + let mut references: ExecutableDefinitionNameSet = Default::default(); + while let Some(selection) = selections.pop() { + match selection { + Selection::FragmentSpread(selection) => { + references.insert(selection.fragment.item.into()); + } + Selection::LinkedField(selection) => { + if let Some(fragment_name) = get_resolver_fragment_dependency_name( + schema.field(selection.definition.item), + ) { + references.insert(fragment_name.into()); + } + selections.extend(&selection.selections); + } + Selection::InlineFragment(selection) => { + selections.extend(&selection.selections); + } + Selection::Condition(selection) => { + selections.extend(&selection.selections); + } + Selection::ScalarField(selection) => { + if let Some(fragment_name) = get_resolver_fragment_dependency_name( + schema.field(selection.definition.item), + ) { + references.insert(fragment_name.into()); + } + } + } + } + result.insert(name, references); + } + result +} diff --git a/compiler/crates/dependency-analyzer/src/lib.rs b/compiler/crates/dependency-analyzer/src/lib.rs index 2a116a959a0f3..f34d5b28d220a 100644 --- a/compiler/crates/dependency-analyzer/src/lib.rs +++ b/compiler/crates/dependency-analyzer/src/lib.rs @@ -12,11 +12,15 @@ mod ast; mod ir; +mod minimized_executable; +mod schema_change_analyzer; pub use ast::get_definition_references; pub use ast::get_reachable_ast; pub use ast::ReachableAst; +pub use ir::get_ir_definition_references; pub use ir::get_reachable_ir; pub use ir::ExecutableDefinitionNameMap; pub use ir::ExecutableDefinitionNameSet; pub use ir::ExecutableDefinitionNameVec; +pub use minimized_executable::MinProgram; diff --git a/compiler/crates/dependency-analyzer/src/minimized_executable.rs b/compiler/crates/dependency-analyzer/src/minimized_executable.rs new file mode 100644 index 0000000000000..b50793e989fa7 --- /dev/null +++ b/compiler/crates/dependency-analyzer/src/minimized_executable.rs @@ -0,0 +1,231 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::Location; +use graphql_ir::*; +use relay_transforms::Programs; +use schema::SDLSchema; +use schema::Schema; +use serde::Serialize; + +#[derive(Serialize)] +pub struct MinProgram { + pub definitions: Vec, +} + +impl MinProgram { + pub fn from_programs(programs: &Programs) -> Self { + MinProgram { + definitions: programs + .source + .operations + .iter() + .map(|o| { + MinExecutableDefinition::Operation( + MinOperationDefinition::from_operation_definition( + o, + &programs.source.schema, + ), + ) + }) + .chain(programs.source.fragments.values().map(|f| { + MinExecutableDefinition::Fragment( + MinFragmentDefinition::from_fragment_definition(f, &programs.source.schema), + ) + })) + .collect(), + } + } +} +#[derive(Serialize)] +pub enum MinExecutableDefinition { + Operation(MinOperationDefinition), + Fragment(MinFragmentDefinition), +} + +#[derive(Serialize)] +pub struct MinOperationDefinition { + pub operation: String, + pub name: String, + pub location: Location, + pub type_: String, + pub selections: Vec, +} + +impl MinOperationDefinition { + fn from_operation_definition(op: &Arc, schema: &Arc) -> Self { + MinOperationDefinition { + operation: op.kind.to_string(), + name: op.name.item.to_string(), + location: op.name.location, + type_: schema.get_type_name(op.type_).to_string(), + selections: op + .selections + .iter() + .map(|s| MinSelection::from_selection(s, schema)) + .collect::>(), + } + } +} + +#[derive(Serialize)] +pub struct MinFragmentDefinition { + pub name: String, + pub location: Location, + pub type_: String, + pub selections: Vec, +} + +impl MinFragmentDefinition { + fn from_fragment_definition( + fragment: &Arc, + schema: &Arc, + ) -> Self { + MinFragmentDefinition { + name: fragment.name.item.0.to_string(), + location: fragment.name.location, + type_: schema.get_type_name(fragment.type_condition).to_string(), + selections: fragment + .selections + .iter() + .map(|s| MinSelection::from_selection(s, schema)) + .collect::>(), + } + } +} + +#[derive(Serialize)] +pub struct MinInlineFragment { + pub type_: Option, + pub selections: Vec, +} + +impl MinInlineFragment { + fn from_inline_fragment(fragment: &InlineFragment, schema: &Arc) -> Self { + MinInlineFragment { + type_: fragment + .type_condition + .map(|t| schema.get_type_name(t).to_string()), + selections: fragment + .selections + .iter() + .map(|s| MinSelection::from_selection(s, schema)) + .collect::>(), + } + } +} + +#[derive(Serialize)] +pub struct MinLinkedField { + pub name: String, + pub type_: String, + pub selections: Vec, +} + +impl MinLinkedField { + fn from_linked_field(field: &LinkedField, schema: &Arc) -> Self { + MinLinkedField { + name: schema.field(field.definition.item).name.item.to_string(), + type_: schema + .get_type_name(schema.field(field.definition.item).type_.inner()) + .to_string(), + selections: field + .selections + .iter() + .map(|s| MinSelection::from_selection(s, schema)) + .collect::>(), + } + } +} + +#[derive(Serialize)] +pub enum MinSelection { + FragmentSpread(MinFragmentSpread), + InlineFragment(MinInlineFragment), + LinkedField(MinLinkedField), + ScalarField(MinScalarField), + Condition(MinCondition), +} + +impl MinSelection { + fn from_selection(selection: &Selection, schema: &Arc) -> Self { + match selection { + Selection::FragmentSpread(fragment_spread) => MinSelection::FragmentSpread( + MinFragmentSpread::from_fragment_spread(fragment_spread), + ), + Selection::InlineFragment(inline_fragment) => MinSelection::InlineFragment( + MinInlineFragment::from_inline_fragment(inline_fragment, schema), + ), + Selection::LinkedField(linked_field) => { + MinSelection::LinkedField(MinLinkedField::from_linked_field(linked_field, schema)) + } + Selection::ScalarField(scalar_field) => { + MinSelection::ScalarField(MinScalarField::from_scalar_field(scalar_field, schema)) + } + Selection::Condition(condition) => { + MinSelection::Condition(MinCondition::from_condition(condition, schema)) + } + } + } +} + +#[derive(Serialize)] +pub struct MinScalarField { + pub name: String, +} + +impl MinScalarField { + fn from_scalar_field(field: &ScalarField, schema: &SDLSchema) -> Self { + MinScalarField { + name: schema.field(field.definition.item).name.item.to_string(), + } + } +} + +#[derive(Serialize)] +pub struct MinFragmentSpread { + pub name: String, +} + +impl MinFragmentSpread { + fn from_fragment_spread(fragment_spread: &FragmentSpread) -> Self { + MinFragmentSpread { + name: fragment_spread.fragment.item.0.to_string(), + } + } +} + +#[derive(Serialize)] +pub struct MinCondition { + pub name: Option, + pub type_: Option, + pub selections: Vec, +} + +impl MinCondition { + fn from_condition(condition: &Condition, schema: &Arc) -> Self { + MinCondition { + name: match &condition.value { + ConditionValue::Constant(_) => None, + ConditionValue::Variable(v) => Some(v.name.item.0.to_string()), + }, + type_: match &condition.value { + ConditionValue::Constant(_) => None, + ConditionValue::Variable(v) => { + Some(schema.get_type_name(v.type_.inner()).to_string()) + } + }, + selections: condition + .selections + .iter() + .map(|s| MinSelection::from_selection(s, schema)) + .collect::>(), + } + } +} diff --git a/compiler/crates/dependency-analyzer/src/schema_change_analyzer.rs b/compiler/crates/dependency-analyzer/src/schema_change_analyzer.rs new file mode 100644 index 0000000000000..822a22b8d5d2b --- /dev/null +++ b/compiler/crates/dependency-analyzer/src/schema_change_analyzer.rs @@ -0,0 +1,150 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::collections::HashSet; + +use graphql_ir::*; +use rustc_hash::FxHashSet; +use schema::definitions::Type; +use schema::SDLSchema; +use schema::Schema; +use schema_diff::check::IncrementalBuildSchemaChange; + +use crate::ExecutableDefinitionNameSet; + +pub fn get_affected_definitions( + schema: &SDLSchema, + definitions: &[ExecutableDefinition], + schema_changes: FxHashSet, +) -> ExecutableDefinitionNameSet { + SchemaChangeDefinitionFinder::get_definitions(schema, definitions, schema_changes) +} + +struct SchemaChangeDefinitionFinder<'a, 'b> { + changed_definitions: ExecutableDefinitionNameSet, + current_executable: &'a ExecutableDefinition, + schema: &'b SDLSchema, + schema_changes: FxHashSet, +} + +impl SchemaChangeDefinitionFinder<'_, '_> { + fn get_definitions( + schema: &SDLSchema, + definitions: &[ExecutableDefinition], + schema_changes: FxHashSet, + ) -> ExecutableDefinitionNameSet { + if definitions.is_empty() || schema_changes.is_empty() { + return HashSet::default(); + } + + let mut finder = SchemaChangeDefinitionFinder { + changed_definitions: HashSet::default(), + current_executable: &definitions[0], + schema, + schema_changes, + }; + for def in definitions.iter() { + finder.current_executable = def; + match def { + ExecutableDefinition::Operation(operation) => finder.visit_operation(operation), + ExecutableDefinition::Fragment(fragment) => finder.visit_fragment(fragment), + }; + } + finder.changed_definitions + } + + fn get_name_from_executable(&self) -> ExecutableDefinitionName { + match self.current_executable { + ExecutableDefinition::Operation(node) => { + ExecutableDefinitionName::OperationDefinitionName(node.name.item) + } + ExecutableDefinition::Fragment(node) => { + ExecutableDefinitionName::FragmentDefinitionName(node.name.item) + } + } + } + + fn add_type_changes(&mut self, type_: Type) { + match type_ { + Type::Object(id) => { + let object_type = self.schema.object(id); + let key = object_type.name.item.0; + if self + .schema_changes + .contains(&IncrementalBuildSchemaChange::Object(key)) + { + self.changed_definitions + .insert(self.get_name_from_executable()); + } + } + Type::Union(id) => { + let union_name = self.schema.union(id).name.item.0; + if self + .schema_changes + .contains(&IncrementalBuildSchemaChange::Union(union_name)) + { + self.changed_definitions + .insert(self.get_name_from_executable()); + } + } + Type::Interface(id) => { + let interface_name = self.schema.interface(id).name.item.0; + if self + .schema_changes + .contains(&IncrementalBuildSchemaChange::Interface(interface_name)) + { + self.changed_definitions + .insert(self.get_name_from_executable()); + } + } + Type::Enum(id) => { + let enum_type = self.schema.enum_(id); + let key = enum_type.name.item.0; + if self + .schema_changes + .contains(&IncrementalBuildSchemaChange::Enum(key)) + { + self.changed_definitions + .insert(self.get_name_from_executable()); + } + } + Type::InputObject(_) | Type::Scalar(_) => (), + } + } +} + +impl Visitor for SchemaChangeDefinitionFinder<'_, '_> { + const NAME: &'static str = "DependencyAnalyzerSchemaChangeDefinitionFinder"; + const VISIT_ARGUMENTS: bool = false; + const VISIT_DIRECTIVES: bool = false; + + fn visit_linked_field(&mut self, field: &LinkedField) { + let id = field.definition.item; + let type_ = self.schema.field(id).type_.inner(); + self.add_type_changes(type_); + self.default_visit_linked_field(field); + } + + fn visit_fragment(&mut self, fragment: &FragmentDefinition) { + self.add_type_changes(fragment.type_condition); + self.default_visit_fragment(fragment); + } + + fn visit_inline_fragment(&mut self, fragment: &InlineFragment) { + if let Some(type_) = fragment.type_condition { + self.add_type_changes(type_); + } + self.default_visit_inline_fragment(fragment); + } + + fn visit_scalar_field(&mut self, field: &ScalarField) { + let id = field.definition.item; + let type_ = self.schema.field(id).type_.inner(); + self.add_type_changes(type_); + self.default_visit_scalar_field(field); + } +} diff --git a/compiler/crates/dependency-analyzer/tests/ast.rs b/compiler/crates/dependency-analyzer/tests/ast.rs new file mode 100644 index 0000000000000..484fe7347ac94 --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ast.rs @@ -0,0 +1,43 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use dependency_analyzer::get_reachable_ast; +use dependency_analyzer::ReachableAst; +use fixture_tests::Fixture; +use graphql_syntax::*; +use intern::Lookup; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<&str> = fixture.content.split("%definitions%").collect(); + + let source_location = SourceLocationKey::standalone(fixture.file_name); + let definitions = parse_executable(parts[0], source_location).unwrap(); + let base_definitions = parts + .iter() + .skip(1) + .flat_map(|part| parse_executable(part, source_location).unwrap().definitions) + .collect(); + let ReachableAst { + definitions: result, + base_fragment_names, + } = get_reachable_ast(definitions.definitions, base_definitions); + + let mut texts = result + .into_iter() + .map(|def| def.name().unwrap().to_string()) + .collect::>(); + texts.sort_unstable(); + texts.push("========== Base definitions ==========".to_string()); + let mut defs = base_fragment_names + .iter() + .map(|key| key.0.lookup()) + .collect::>(); + defs.sort_unstable(); + texts.push(defs.join(", ")); + Ok(texts.join("\n")) +} diff --git a/compiler/crates/dependency-analyzer/tests/ast/mod.rs b/compiler/crates/dependency-analyzer/tests/ast/mod.rs deleted file mode 100644 index 5c038f305d45d..0000000000000 --- a/compiler/crates/dependency-analyzer/tests/ast/mod.rs +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::SourceLocationKey; -use dependency_analyzer::get_reachable_ast; -use dependency_analyzer::ReachableAst; -use fixture_tests::Fixture; -use graphql_syntax::*; -use intern::Lookup; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<&str> = fixture.content.split("%definitions%").collect(); - - let source_location = SourceLocationKey::standalone(fixture.file_name); - let definitions = parse_executable(parts[0], source_location).unwrap(); - let base_definitions = parts - .iter() - .skip(1) - .flat_map(|part| parse_executable(part, source_location).unwrap().definitions) - .collect(); - let ReachableAst { - definitions: result, - base_fragment_names, - } = get_reachable_ast(definitions.definitions, base_definitions); - - let mut texts = result - .into_iter() - .map(|def| def.name().unwrap().to_string()) - .collect::>(); - texts.sort_unstable(); - texts.push("========== Base definitions ==========".to_string()); - let mut defs = base_fragment_names - .iter() - .map(|key| key.0.lookup()) - .collect::>(); - defs.sort_unstable(); - texts.push(defs.join(", ")); - Ok(texts.join("\n")) -} diff --git a/compiler/crates/dependency-analyzer/tests/ast_test.rs b/compiler/crates/dependency-analyzer/tests/ast_test.rs index 9902f3bf84213..d31a468eaaeaa 100644 --- a/compiler/crates/dependency-analyzer/tests/ast_test.rs +++ b/compiler/crates/dependency-analyzer/tests/ast_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<3ecc95204640f547290f1ab25b144c25>> + * @generated SignedSource<> */ mod ast; @@ -12,30 +12,30 @@ mod ast; use ast::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn base_definitions() { +#[tokio::test] +async fn base_definitions() { let input = include_str!("ast/fixtures/base-definitions.graphql"); let expected = include_str!("ast/fixtures/base-definitions.expected"); - test_fixture(transform_fixture, "base-definitions.graphql", "ast/fixtures/base-definitions.expected", input, expected); + test_fixture(transform_fixture, file!(), "base-definitions.graphql", "ast/fixtures/base-definitions.expected", input, expected).await; } -#[test] -fn definitions_only() { +#[tokio::test] +async fn definitions_only() { let input = include_str!("ast/fixtures/definitions-only.graphql"); let expected = include_str!("ast/fixtures/definitions-only.expected"); - test_fixture(transform_fixture, "definitions-only.graphql", "ast/fixtures/definitions-only.expected", input, expected); + test_fixture(transform_fixture, file!(), "definitions-only.graphql", "ast/fixtures/definitions-only.expected", input, expected).await; } -#[test] -fn missing_fragments() { +#[tokio::test] +async fn missing_fragments() { let input = include_str!("ast/fixtures/missing-fragments.graphql"); let expected = include_str!("ast/fixtures/missing-fragments.expected"); - test_fixture(transform_fixture, "missing-fragments.graphql", "ast/fixtures/missing-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "missing-fragments.graphql", "ast/fixtures/missing-fragments.expected", input, expected).await; } -#[test] -fn multiple_base_definitions() { +#[tokio::test] +async fn multiple_base_definitions() { let input = include_str!("ast/fixtures/multiple-base-definitions.graphql"); let expected = include_str!("ast/fixtures/multiple-base-definitions.expected"); - test_fixture(transform_fixture, "multiple-base-definitions.graphql", "ast/fixtures/multiple-base-definitions.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple-base-definitions.graphql", "ast/fixtures/multiple-base-definitions.expected", input, expected).await; } diff --git a/compiler/crates/dependency-analyzer/tests/ir.rs b/compiler/crates/dependency-analyzer/tests/ir.rs new file mode 100644 index 0000000000000..b76ee30954807 --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir.rs @@ -0,0 +1,156 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::collections::HashSet; +use std::sync::Arc; + +use common::NoopPerfLoggerEvent; +use common::SourceLocationKey; +use dependency_analyzer::*; +use fixture_tests::Fixture; +use graphql_ir::*; +use graphql_syntax::parse_executable; +use intern::string_key::Intern; +use relay_test_schema::get_test_schema; +use relay_test_schema::get_test_schema_with_extensions; +use rustc_hash::FxHashSet; +use schema::SDLSchema; +use schema_diff::check::IncrementalBuildSchemaChange; + +fn format_definition(def: ExecutableDefinition) -> String { + match def { + ExecutableDefinition::Operation(operation) => { + format!("Operation: {}", operation.name.item) + } + ExecutableDefinition::Fragment(fragment) => format!("Fragment: {}", fragment.name.item), + } +} + +// Parses the schema extension changes. +// The schema extension section should begin with: +// # : changed1, changed2 +// # : changed3 +fn parse_schema_changes(extension_content: &str) -> FxHashSet { + let mut changes: FxHashSet = HashSet::default(); + let lines: Vec<&str> = extension_content.lines().collect(); + for line in lines.iter() { + if line.trim() == "" { + continue; + } + if !line.trim().starts_with('#') { + break; + } + let line_parts: Vec<&str> = line.split(':').collect(); + let type_ = line_parts[0]; + let changed_names = line_parts[1].split(',').map(|name| name.trim().intern()); + // slice to skip the "#" + match type_[1..].trim() { + "Enum" => { + for key in changed_names { + changes.insert(IncrementalBuildSchemaChange::Enum(key)); + } + } + "Object" => { + for key in changed_names { + changes.insert(IncrementalBuildSchemaChange::Object(key)); + } + } + "Union" => { + for key in changed_names { + changes.insert(IncrementalBuildSchemaChange::Union(key)); + } + } + "Interface" => { + for key in changed_names { + changes.insert(IncrementalBuildSchemaChange::Interface(key)); + } + } + _ => panic!( + "Included an incremental change for a schema type that does not have incremental builds" + ), + } + } + changes +} + +// TODO: Test without using snapshot tests +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts = fixture.content.split("%extensions%").collect::>(); + + let (content, schema, schema_changes): ( + &str, + Arc, + FxHashSet, + ) = match parts.as_slice() { + [content] => (content, get_test_schema(), HashSet::default()), + [content, extension_content] => { + let schema_changes = parse_schema_changes(extension_content); + ( + content, + get_test_schema_with_extensions(extension_content), + schema_changes, + ) + } + _ => panic!("Expected one optional \"%extensions%\" section in the fixture."), + }; + + let parts: Vec<&str> = content.split("%definitions%").collect(); + let first_line: &str = content.lines().next().unwrap(); + + let changed_names = first_line[1..] + .trim() + .split(',') + .map(|name| name.trim()) + .filter(|name| !name.is_empty()) + .flat_map(|name| { + // Note: this is a bit of a hack! Here, we don't know whether the stringkey represents + // a fragment or operation name, so we mark both "a fragment named X" and "a query named + // X" as having changed. + vec![ + FragmentDefinitionName(name.intern()).into(), + OperationDefinitionName(name.intern()).into(), + ] + }) + .collect(); + + let source_location = SourceLocationKey::standalone(fixture.file_name); + let mut asts = parse_executable(parts[0], source_location) + .unwrap() + .definitions; + let mut base_names: ExecutableDefinitionNameSet = Default::default(); + for part in parts.iter().skip(1) { + let defs = parse_executable(part, source_location).unwrap().definitions; + for def in defs { + base_names.insert(match &def { + graphql_syntax::ExecutableDefinition::Operation(node) => { + OperationDefinitionName(node.name.clone().unwrap().value).into() + } + graphql_syntax::ExecutableDefinition::Fragment(node) => { + FragmentDefinitionName(node.name.value).into() + } + }); + asts.push(def); + } + } + + let definitions = build(&schema, &asts).unwrap(); + let result = get_reachable_ir( + definitions, + base_names, + changed_names, + &schema, + schema_changes, + &NoopPerfLoggerEvent, + ); + + let mut texts = result + .into_iter() + .map(format_definition) + .collect::>(); + texts.sort(); + Ok(texts.join("\n\n")) +} diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/new-resolver-model-field-with-custom-fragment.expected b/compiler/crates/dependency-analyzer/tests/ir/fixtures/new-resolver-model-field-with-custom-fragment.expected new file mode 100644 index 0000000000000..8ee8021814d7f --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/new-resolver-model-field-with-custom-fragment.expected @@ -0,0 +1,38 @@ +==================================== INPUT ==================================== +# F1 +query Q1 { + client_user { + ...F1 + } +} +fragment F1 on ClientUser { + pop_star_name +} + +fragment ClientUserPopStarName on ClientUser { + id +} + +# %definitions% + +%extensions% + +extend type Query { + client_user: ClientUser +} + +type ClientUser @__RelayResolverModel { + id: ID! +} + +# This resolver's fragment is _not_ generated even though it's on a @__RelayResolverModel type. +# Dependency tracker _should_ look for this fragment. +extend type ClientUser { + pop_star_name: String @relay_resolver(fragment_name: "ClientUserPopStarName", import_path: "PopStarNameResolver") +} +==================================== OUTPUT =================================== +Fragment: ClientUserPopStarName + +Fragment: F1 + +Operation: Q1 diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/new-resolver-model-field-with-custom-fragment.graphql b/compiler/crates/dependency-analyzer/tests/ir/fixtures/new-resolver-model-field-with-custom-fragment.graphql new file mode 100644 index 0000000000000..33a2749f3bedc --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/new-resolver-model-field-with-custom-fragment.graphql @@ -0,0 +1,31 @@ +# F1 +query Q1 { + client_user { + ...F1 + } +} +fragment F1 on ClientUser { + pop_star_name +} + +fragment ClientUserPopStarName on ClientUser { + id +} + +# %definitions% + +%extensions% + +extend type Query { + client_user: ClientUser +} + +type ClientUser @__RelayResolverModel { + id: ID! +} + +# This resolver's fragment is _not_ generated even though it's on a @__RelayResolverModel type. +# Dependency tracker _should_ look for this fragment. +extend type ClientUser { + pop_star_name: String @relay_resolver(fragment_name: "ClientUserPopStarName", import_path: "PopStarNameResolver") +} diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/new-resolver-model-field.expected b/compiler/crates/dependency-analyzer/tests/ir/fixtures/new-resolver-model-field.expected index 633c5202de5ca..93ed16142a188 100644 --- a/compiler/crates/dependency-analyzer/tests/ir/fixtures/new-resolver-model-field.expected +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/new-resolver-model-field.expected @@ -21,10 +21,12 @@ type ClientUser @__RelayResolverModel { id: ID! } -# This resolver's fragment is generated because it's on a @__RelayResolverModel type. +# This resolver's fragment is generated because it's on a @__RelayResolverModel +# type and does not define its own fragment using @rootFragment. This is indicated with +# the `generated_fragment` directive argument. # Dependency tracker should not look for this fragment. extend type ClientUser { - pop_star_name: String @relay_resolver(fragment_name: "this___is___generated", import_path: "PopStarNameResolver") + pop_star_name: String @relay_resolver(fragment_name: "this___is___generated", import_path: "PopStarNameResolver", generated_fragment: true) } ==================================== OUTPUT =================================== Fragment: F1 diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/new-resolver-model-field.graphql b/compiler/crates/dependency-analyzer/tests/ir/fixtures/new-resolver-model-field.graphql index 343c1a8186749..6901dbd793b5d 100644 --- a/compiler/crates/dependency-analyzer/tests/ir/fixtures/new-resolver-model-field.graphql +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/new-resolver-model-field.graphql @@ -20,8 +20,10 @@ type ClientUser @__RelayResolverModel { id: ID! } -# This resolver's fragment is generated because it's on a @__RelayResolverModel type. +# This resolver's fragment is generated because it's on a @__RelayResolverModel +# type and does not define its own fragment using @rootFragment. This is indicated with +# the `generated_fragment` directive argument. # Dependency tracker should not look for this fragment. extend type ClientUser { - pop_star_name: String @relay_resolver(fragment_name: "this___is___generated", import_path: "PopStarNameResolver") + pop_star_name: String @relay_resolver(fragment_name: "this___is___generated", import_path: "PopStarNameResolver", generated_fragment: true) } diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-and-definitions-change.expected b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-and-definitions-change.expected new file mode 100644 index 0000000000000..489d7d47a85f4 --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-and-definitions-change.expected @@ -0,0 +1,55 @@ +==================================== INPUT ==================================== +# F1, Q2 +query Q1 { + me { + ...F1 + ...F2 + } +} + +fragment F1 on User { + userEnum +} + +fragment F2 on User { + name +} + +query Q2 { + me { + ...F2 + ...F3 + } +} + +fragment F3 on User { + alternate_name +} + +query QUnrelated { + me { + ...F3 + } +} + +%extensions% +# Enum: TestEnum + +enum TestEnum { + A + B +} + +extend type User { + userEnum: TestEnum +} +==================================== OUTPUT =================================== +Fragment: F1 + +Fragment: F2 + +Fragment: F3 + +Operation: Q1 + +Operation: Q2 diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-and-definitions-change.graphql b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-and-definitions-change.graphql new file mode 100644 index 0000000000000..158f70d414f21 --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-and-definitions-change.graphql @@ -0,0 +1,44 @@ +# F1, Q2 +query Q1 { + me { + ...F1 + ...F2 + } +} + +fragment F1 on User { + userEnum +} + +fragment F2 on User { + name +} + +query Q2 { + me { + ...F2 + ...F3 + } +} + +fragment F3 on User { + alternate_name +} + +query QUnrelated { + me { + ...F3 + } +} + +%extensions% +# Enum: TestEnum + +enum TestEnum { + A + B +} + +extend type User { + userEnum: TestEnum +} diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-enum-and-object-change.expected b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-enum-and-object-change.expected new file mode 100644 index 0000000000000..f508728127928 --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-enum-and-object-change.expected @@ -0,0 +1,42 @@ +==================================== INPUT ==================================== +# +query Q1 { + me { + userTestObj { + field1 + } + } +} + +fragment F1 on User { + userTestEnum +} + +query QUnrelated { + me { + alternate_name + } +} + +%extensions% +# Enum: TestEnum +# Object: TestObj + +enum TestEnum { + A + B +} + +type TestObj { + field1: String + field2: Int +} + +extend type User { + userTestEnum: TestEnum + userTestObj: TestObj +} +==================================== OUTPUT =================================== +Fragment: F1 + +Operation: Q1 diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-enum-and-object-change.graphql b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-enum-and-object-change.graphql new file mode 100644 index 0000000000000..fe735c51ddcd4 --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-enum-and-object-change.graphql @@ -0,0 +1,37 @@ +# +query Q1 { + me { + userTestObj { + field1 + } + } +} + +fragment F1 on User { + userTestEnum +} + +query QUnrelated { + me { + alternate_name + } +} + +%extensions% +# Enum: TestEnum +# Object: TestObj + +enum TestEnum { + A + B +} + +type TestObj { + field1: String + field2: Int +} + +extend type User { + userTestEnum: TestEnum + userTestObj: TestObj +} diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-enum-change.expected b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-enum-change.expected new file mode 100644 index 0000000000000..e397b4626fd54 --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-enum-change.expected @@ -0,0 +1,45 @@ +==================================== INPUT ==================================== +# +query Q1 { + me { + ...F1 + ...F2 + } +} + +fragment F1 on User { + userEnum +} + +fragment F2 on User { + name +} + +query QUnrelated { + me { + ...F2 + ...F3 + } +} + +fragment F3 on User { + alternate_name +} + +%extensions% +# Enum: TestEnum + +enum TestEnum { + A + B +} + +extend type User { + userEnum: TestEnum +} +==================================== OUTPUT =================================== +Fragment: F1 + +Fragment: F2 + +Operation: Q1 diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-enum-change.graphql b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-enum-change.graphql new file mode 100644 index 0000000000000..1b99e9b45fdb6 --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-enum-change.graphql @@ -0,0 +1,38 @@ +# +query Q1 { + me { + ...F1 + ...F2 + } +} + +fragment F1 on User { + userEnum +} + +fragment F2 on User { + name +} + +query QUnrelated { + me { + ...F2 + ...F3 + } +} + +fragment F3 on User { + alternate_name +} + +%extensions% +# Enum: TestEnum + +enum TestEnum { + A + B +} + +extend type User { + userEnum: TestEnum +} diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-change.expected b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-change.expected new file mode 100644 index 0000000000000..387d0567f7668 --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-change.expected @@ -0,0 +1,48 @@ +==================================== INPUT ==================================== +# +query Q1 { + me { + ...F1 + ...F2 + } +} + +fragment F1 on User { + userTestObj { + field1 + field2 + } +} + +fragment F2 on User { + name +} + +query QUnrelated { + me { + ...F2 + ...F3 + } +} + +fragment F3 on User { + alternate_name +} + +%extensions% +# Object: TestObj + +type TestObj { + field1: String + field2: Int +} + +extend type User { + userTestObj: TestObj +} +==================================== OUTPUT =================================== +Fragment: F1 + +Fragment: F2 + +Operation: Q1 diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-change.graphql b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-change.graphql new file mode 100644 index 0000000000000..27636cb53a8b8 --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-change.graphql @@ -0,0 +1,41 @@ +# +query Q1 { + me { + ...F1 + ...F2 + } +} + +fragment F1 on User { + userTestObj { + field1 + field2 + } +} + +fragment F2 on User { + name +} + +query QUnrelated { + me { + ...F2 + ...F3 + } +} + +fragment F3 on User { + alternate_name +} + +%extensions% +# Object: TestObj + +type TestObj { + field1: String + field2: Int +} + +extend type User { + userTestObj: TestObj +} diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-in-union-change.expected b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-in-union-change.expected new file mode 100644 index 0000000000000..899bafebb9e54 --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-in-union-change.expected @@ -0,0 +1,57 @@ +==================================== INPUT ==================================== +# +query Q1 { + me { + userTestUnion { + ... on B { + B1 + } + } + } +} + +query QueryA { + me { + fieldA { + A1 + } + } +} + +query QueryB { + me { + fieldB { + B1 + } + } +} + +fragment F1 on U { + ... on B { + B1 + } +} + +%extensions% +# Object: A +# Union: U + +union U = A | B +type A { + A1: String +} +type B { + B1: String +} + +extend type User { + userTestUnion: U + fieldA: A + fieldB: B +} +==================================== OUTPUT =================================== +Fragment: F1 + +Operation: Q1 + +Operation: QueryA diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-in-union-change.graphql b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-in-union-change.graphql new file mode 100644 index 0000000000000..f99f260e52c6c --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-in-union-change.graphql @@ -0,0 +1,50 @@ +# +query Q1 { + me { + userTestUnion { + ... on B { + B1 + } + } + } +} + +query QueryA { + me { + fieldA { + A1 + } + } +} + +query QueryB { + me { + fieldB { + B1 + } + } +} + +fragment F1 on U { + ... on B { + B1 + } +} + +%extensions% +# Object: A +# Union: U + +union U = A | B +type A { + A1: String +} +type B { + B1: String +} + +extend type User { + userTestUnion: U + fieldA: A + fieldB: B +} diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-with-interface-change.expected b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-with-interface-change.expected new file mode 100644 index 0000000000000..09b85cbd57046 --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-with-interface-change.expected @@ -0,0 +1,57 @@ +==================================== INPUT ==================================== +# +query Q1 { + me { + fieldA { + A1 + } + } +} + +query Q2 { + me { + fieldI { + field1 + } + } +} + +query Q3 { + me { + fieldI { + field1 + ... on A { + A1 + } + } + } +} + +fragment F1 on I { + field1 +} + +%extensions% +# Object: A +# Interface: I + +interface I { + field1: String +} +type A implements I { + A1: String + field1: String +} + +extend type User { + fieldA: A + fieldI: I +} +==================================== OUTPUT =================================== +Fragment: F1 + +Operation: Q1 + +Operation: Q2 + +Operation: Q3 diff --git a/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-with-interface-change.graphql b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-with-interface-change.graphql new file mode 100644 index 0000000000000..26b2ce6cf7b2c --- /dev/null +++ b/compiler/crates/dependency-analyzer/tests/ir/fixtures/schema-object-with-interface-change.graphql @@ -0,0 +1,48 @@ +# +query Q1 { + me { + fieldA { + A1 + } + } +} + +query Q2 { + me { + fieldI { + field1 + } + } +} + +query Q3 { + me { + fieldI { + field1 + ... on A { + A1 + } + } + } +} + +fragment F1 on I { + field1 +} + +%extensions% +# Object: A +# Interface: I + +interface I { + field1: String +} +type A implements I { + A1: String + field1: String +} + +extend type User { + fieldA: A + fieldI: I +} diff --git a/compiler/crates/dependency-analyzer/tests/ir/mod.rs b/compiler/crates/dependency-analyzer/tests/ir/mod.rs deleted file mode 100644 index 23330eaa26c44..0000000000000 --- a/compiler/crates/dependency-analyzer/tests/ir/mod.rs +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use dependency_analyzer::*; -use fixture_tests::Fixture; -use graphql_ir::*; -use graphql_syntax::parse_executable; -use intern::string_key::Intern; -use relay_test_schema::get_test_schema; -use relay_test_schema::get_test_schema_with_extensions; -use schema::SDLSchema; - -fn format_definition(def: ExecutableDefinition) -> String { - match def { - ExecutableDefinition::Operation(operation) => { - format!("Operation: {}", operation.name.item) - } - ExecutableDefinition::Fragment(fragment) => format!("Fragment: {}", fragment.name.item), - } -} - -// TODO: Test without using snapshot tests -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts = fixture.content.split("%extensions%").collect::>(); - - let (content, schema): (&str, Arc) = match parts.as_slice() { - [content] => (content, get_test_schema()), - [content, extension_content] => { - (content, get_test_schema_with_extensions(extension_content)) - } - _ => panic!("Expected one optional \"%extensions%\" section in the fxiture."), - }; - - let parts: Vec<&str> = content.split("%definitions%").collect(); - let first_line: &str = content.lines().next().unwrap(); - - let changed_names = first_line[1..] - .trim() - .split(',') - .map(|name| name.trim()) - .filter(|name| !name.is_empty()) - .flat_map(|name| { - // Note: this is a bit of a hack! Here, we don't know whether the stringkey represents - // a fragment or operation name, so we mark both "a fragment named X" and "a query named - // X" as having changed. - vec![ - FragmentDefinitionName(name.intern()).into(), - OperationDefinitionName(name.intern()).into(), - ] - }) - .collect(); - - let source_location = SourceLocationKey::standalone(fixture.file_name); - let mut asts = parse_executable(parts[0], source_location) - .unwrap() - .definitions; - let mut base_names: ExecutableDefinitionNameSet = Default::default(); - for part in parts.iter().skip(1) { - let defs = parse_executable(part, source_location).unwrap().definitions; - for def in defs { - base_names.insert(match &def { - graphql_syntax::ExecutableDefinition::Operation(node) => { - OperationDefinitionName(node.name.clone().unwrap().value).into() - } - graphql_syntax::ExecutableDefinition::Fragment(node) => { - FragmentDefinitionName(node.name.value).into() - } - }); - asts.push(def); - } - } - - let definitions = build(&schema, &asts).unwrap(); - let result = get_reachable_ir(definitions, base_names, changed_names, &schema); - - let mut texts = result - .into_iter() - .map(format_definition) - .collect::>(); - texts.sort(); - Ok(texts.join("\n\n")) -} diff --git a/compiler/crates/dependency-analyzer/tests/ir_test.rs b/compiler/crates/dependency-analyzer/tests/ir_test.rs index 8f9ddb7e6eb2a..573c1d1fc4c03 100644 --- a/compiler/crates/dependency-analyzer/tests/ir_test.rs +++ b/compiler/crates/dependency-analyzer/tests/ir_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<080b76ec4b2c6ea6cde4bc6b7c0fea2b>> + * @generated SignedSource<> */ mod ir; @@ -12,86 +12,135 @@ mod ir; use ir::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn base_definitions_change_fragment() { +#[tokio::test] +async fn base_definitions_change_fragment() { let input = include_str!("ir/fixtures/base-definitions-change-fragment.graphql"); let expected = include_str!("ir/fixtures/base-definitions-change-fragment.expected"); - test_fixture(transform_fixture, "base-definitions-change-fragment.graphql", "ir/fixtures/base-definitions-change-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "base-definitions-change-fragment.graphql", "ir/fixtures/base-definitions-change-fragment.expected", input, expected).await; } -#[test] -fn base_definitions_change_fragment2() { +#[tokio::test] +async fn base_definitions_change_fragment2() { let input = include_str!("ir/fixtures/base-definitions-change-fragment2.graphql"); let expected = include_str!("ir/fixtures/base-definitions-change-fragment2.expected"); - test_fixture(transform_fixture, "base-definitions-change-fragment2.graphql", "ir/fixtures/base-definitions-change-fragment2.expected", input, expected); + test_fixture(transform_fixture, file!(), "base-definitions-change-fragment2.graphql", "ir/fixtures/base-definitions-change-fragment2.expected", input, expected).await; } -#[test] -fn base_definitions_change_query() { +#[tokio::test] +async fn base_definitions_change_query() { let input = include_str!("ir/fixtures/base-definitions-change-query.graphql"); let expected = include_str!("ir/fixtures/base-definitions-change-query.expected"); - test_fixture(transform_fixture, "base-definitions-change-query.graphql", "ir/fixtures/base-definitions-change-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "base-definitions-change-query.graphql", "ir/fixtures/base-definitions-change-query.expected", input, expected).await; } -#[test] -fn definitions_only_change_fragment() { +#[tokio::test] +async fn definitions_only_change_fragment() { let input = include_str!("ir/fixtures/definitions-only-change-fragment.graphql"); let expected = include_str!("ir/fixtures/definitions-only-change-fragment.expected"); - test_fixture(transform_fixture, "definitions-only-change-fragment.graphql", "ir/fixtures/definitions-only-change-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "definitions-only-change-fragment.graphql", "ir/fixtures/definitions-only-change-fragment.expected", input, expected).await; } -#[test] -fn definitions_only_change_query() { +#[tokio::test] +async fn definitions_only_change_query() { let input = include_str!("ir/fixtures/definitions-only-change-query.graphql"); let expected = include_str!("ir/fixtures/definitions-only-change-query.expected"); - test_fixture(transform_fixture, "definitions-only-change-query.graphql", "ir/fixtures/definitions-only-change-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "definitions-only-change-query.graphql", "ir/fixtures/definitions-only-change-query.expected", input, expected).await; } -#[test] -fn definitions_only_no_change() { +#[tokio::test] +async fn definitions_only_no_change() { let input = include_str!("ir/fixtures/definitions-only-no-change.graphql"); let expected = include_str!("ir/fixtures/definitions-only-no-change.expected"); - test_fixture(transform_fixture, "definitions-only-no-change.graphql", "ir/fixtures/definitions-only-no-change.expected", input, expected); + test_fixture(transform_fixture, file!(), "definitions-only-no-change.graphql", "ir/fixtures/definitions-only-no-change.expected", input, expected).await; } -#[test] -fn implicit_dependencies_parent_child() { +#[tokio::test] +async fn implicit_dependencies_parent_child() { let input = include_str!("ir/fixtures/implicit-dependencies-parent-child.graphql"); let expected = include_str!("ir/fixtures/implicit-dependencies-parent-child.expected"); - test_fixture(transform_fixture, "implicit-dependencies-parent-child.graphql", "ir/fixtures/implicit-dependencies-parent-child.expected", input, expected); + test_fixture(transform_fixture, file!(), "implicit-dependencies-parent-child.graphql", "ir/fixtures/implicit-dependencies-parent-child.expected", input, expected).await; } -#[test] -fn new_resolver_field() { +#[tokio::test] +async fn new_resolver_field() { let input = include_str!("ir/fixtures/new-resolver-field.graphql"); let expected = include_str!("ir/fixtures/new-resolver-field.expected"); - test_fixture(transform_fixture, "new-resolver-field.graphql", "ir/fixtures/new-resolver-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "new-resolver-field.graphql", "ir/fixtures/new-resolver-field.expected", input, expected).await; } -#[test] -fn new_resolver_model_field() { +#[tokio::test] +async fn new_resolver_model_field() { let input = include_str!("ir/fixtures/new-resolver-model-field.graphql"); let expected = include_str!("ir/fixtures/new-resolver-model-field.expected"); - test_fixture(transform_fixture, "new-resolver-model-field.graphql", "ir/fixtures/new-resolver-model-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "new-resolver-model-field.graphql", "ir/fixtures/new-resolver-model-field.expected", input, expected).await; } -#[test] -fn query_then_fragment() { +#[tokio::test] +async fn new_resolver_model_field_with_custom_fragment() { + let input = include_str!("ir/fixtures/new-resolver-model-field-with-custom-fragment.graphql"); + let expected = include_str!("ir/fixtures/new-resolver-model-field-with-custom-fragment.expected"); + test_fixture(transform_fixture, file!(), "new-resolver-model-field-with-custom-fragment.graphql", "ir/fixtures/new-resolver-model-field-with-custom-fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn query_then_fragment() { let input = include_str!("ir/fixtures/query-then-fragment.graphql"); let expected = include_str!("ir/fixtures/query-then-fragment.expected"); - test_fixture(transform_fixture, "query-then-fragment.graphql", "ir/fixtures/query-then-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-then-fragment.graphql", "ir/fixtures/query-then-fragment.expected", input, expected).await; } -#[test] -fn recursive_fragments() { +#[tokio::test] +async fn recursive_fragments() { let input = include_str!("ir/fixtures/recursive-fragments.graphql"); let expected = include_str!("ir/fixtures/recursive-fragments.expected"); - test_fixture(transform_fixture, "recursive-fragments.graphql", "ir/fixtures/recursive-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "recursive-fragments.graphql", "ir/fixtures/recursive-fragments.expected", input, expected).await; +} + +#[tokio::test] +async fn schema_and_definitions_change() { + let input = include_str!("ir/fixtures/schema-and-definitions-change.graphql"); + let expected = include_str!("ir/fixtures/schema-and-definitions-change.expected"); + test_fixture(transform_fixture, file!(), "schema-and-definitions-change.graphql", "ir/fixtures/schema-and-definitions-change.expected", input, expected).await; +} + +#[tokio::test] +async fn schema_enum_and_object_change() { + let input = include_str!("ir/fixtures/schema-enum-and-object-change.graphql"); + let expected = include_str!("ir/fixtures/schema-enum-and-object-change.expected"); + test_fixture(transform_fixture, file!(), "schema-enum-and-object-change.graphql", "ir/fixtures/schema-enum-and-object-change.expected", input, expected).await; +} + +#[tokio::test] +async fn schema_enum_change() { + let input = include_str!("ir/fixtures/schema-enum-change.graphql"); + let expected = include_str!("ir/fixtures/schema-enum-change.expected"); + test_fixture(transform_fixture, file!(), "schema-enum-change.graphql", "ir/fixtures/schema-enum-change.expected", input, expected).await; +} + +#[tokio::test] +async fn schema_object_change() { + let input = include_str!("ir/fixtures/schema-object-change.graphql"); + let expected = include_str!("ir/fixtures/schema-object-change.expected"); + test_fixture(transform_fixture, file!(), "schema-object-change.graphql", "ir/fixtures/schema-object-change.expected", input, expected).await; +} + +#[tokio::test] +async fn schema_object_in_union_change() { + let input = include_str!("ir/fixtures/schema-object-in-union-change.graphql"); + let expected = include_str!("ir/fixtures/schema-object-in-union-change.expected"); + test_fixture(transform_fixture, file!(), "schema-object-in-union-change.graphql", "ir/fixtures/schema-object-in-union-change.expected", input, expected).await; +} + +#[tokio::test] +async fn schema_object_with_interface_change() { + let input = include_str!("ir/fixtures/schema-object-with-interface-change.graphql"); + let expected = include_str!("ir/fixtures/schema-object-with-interface-change.expected"); + test_fixture(transform_fixture, file!(), "schema-object-with-interface-change.graphql", "ir/fixtures/schema-object-with-interface-change.expected", input, expected).await; } -#[test] -fn transitive_implicit_dependency() { +#[tokio::test] +async fn transitive_implicit_dependency() { let input = include_str!("ir/fixtures/transitive-implicit-dependency.graphql"); let expected = include_str!("ir/fixtures/transitive-implicit-dependency.expected"); - test_fixture(transform_fixture, "transitive-implicit-dependency.graphql", "ir/fixtures/transitive-implicit-dependency.expected", input, expected); + test_fixture(transform_fixture, file!(), "transitive-implicit-dependency.graphql", "ir/fixtures/transitive-implicit-dependency.expected", input, expected).await; } diff --git a/compiler/crates/docblock-shared/Cargo.toml b/compiler/crates/docblock-shared/Cargo.toml index a61574c2a36a5..6b28e5837e2d3 100644 --- a/compiler/crates/docblock-shared/Cargo.toml +++ b/compiler/crates/docblock-shared/Cargo.toml @@ -1,12 +1,17 @@ # @generated by autocargo from //relay/oss/crates/docblock-shared:docblock-shared + [package] name = "docblock-shared" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] common = { path = "../common" } +hex = "0.4.3" intern = { path = "../intern" } lazy_static = "1.4" +md-5 = "0.10" +serde = { version = "1.0.185", features = ["derive", "rc"] } diff --git a/compiler/crates/docblock-shared/src/lib.rs b/compiler/crates/docblock-shared/src/lib.rs index 996d65d0f2962..00aea843115a1 100644 --- a/compiler/crates/docblock-shared/src/lib.rs +++ b/compiler/crates/docblock-shared/src/lib.rs @@ -5,12 +5,15 @@ * LICENSE file in the root directory of this source tree. */ +mod resolver_source_hash; + use common::ArgumentName; use common::DirectiveName; use common::ScalarName; use intern::string_key::Intern; use intern::string_key::StringKey; use lazy_static::lazy_static; +pub use resolver_source_hash::ResolverSourceHash; lazy_static! { pub static ref RELAY_RESOLVER_DIRECTIVE_NAME: DirectiveName = @@ -31,12 +34,18 @@ lazy_static! { pub static ref IMPORT_PATH_ARGUMENT_NAME: ArgumentName = ArgumentName("import_path".intern()); pub static ref INJECT_FRAGMENT_DATA_ARGUMENT_NAME: ArgumentName = ArgumentName("inject_fragment_data".intern()); + pub static ref GENERATED_FRAGMENT_ARGUMENT_NAME: ArgumentName = + ArgumentName("generated_fragment".intern()); pub static ref FIELD_NAME_FIELD: StringKey = "fieldName".intern(); pub static ref ON_TYPE_FIELD: StringKey = "onType".intern(); pub static ref ON_INTERFACE_FIELD: StringKey = "onInterface".intern(); pub static ref EDGE_TO_FIELD: StringKey = "edgeTo".intern(); pub static ref DEPRECATED_FIELD: StringKey = "deprecated".intern(); pub static ref LIVE_FIELD: StringKey = "live".intern(); + pub static ref SEMANTIC_NON_NULL_FIELD: StringKey = "semanticNonNull".intern(); + // Using a longer name version for this "special" field + // help us avoid potential collision with product code (__self, __instance can be used for something else) + pub static ref RELAY_RESOLVER_MODEL_INSTANCE_FIELD: StringKey = "__relay_model_instance".intern(); pub static ref ROOT_FRAGMENT_FIELD: StringKey = "rootFragment".intern(); pub static ref OUTPUT_TYPE_FIELD: StringKey = "outputType".intern(); pub static ref WEAK_FIELD: StringKey = "weak".intern(); @@ -49,4 +58,8 @@ lazy_static! { // Note: this should **only** be used for resolvers! The id field for server // types is configurable in the config, and thus cannot be hard-coded. pub static ref KEY_RESOLVER_ID_FIELD: StringKey = "id".intern(); + + pub static ref RELAY_RESOLVER_SOURCE_HASH: DirectiveName = DirectiveName("resolver_source_hash".intern()); + pub static ref RELAY_RESOLVER_SOURCE_HASH_VALUE: ArgumentName = ArgumentName("value".intern()); + } diff --git a/compiler/crates/docblock-shared/src/resolver_source_hash.rs b/compiler/crates/docblock-shared/src/resolver_source_hash.rs new file mode 100644 index 0000000000000..af8ea812cd3c1 --- /dev/null +++ b/compiler/crates/docblock-shared/src/resolver_source_hash.rs @@ -0,0 +1,37 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use intern::string_key::Intern; +use intern::string_key::StringKey; +use md5::Digest; +use md5::Md5; +use serde::Deserialize; +use serde::Serialize; + +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)] + +pub struct ResolverSourceHash(StringKey); + +impl ResolverSourceHash { + pub fn new(source: &str) -> Self { + Self(md5(source).intern()) + } + + pub fn from_raw(source: StringKey) -> Self { + Self(source) + } + + pub fn value(&self) -> StringKey { + self.0 + } +} + +fn md5(data: &str) -> String { + let mut md5 = Md5::new(); + md5.update(data); + hex::encode(md5.finalize()) +} diff --git a/compiler/crates/docblock-syntax/Cargo.toml b/compiler/crates/docblock-syntax/Cargo.toml index f6501de5bcff5..151e145c11384 100644 --- a/compiler/crates/docblock-syntax/Cargo.toml +++ b/compiler/crates/docblock-syntax/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/docblock-syntax:[docblock-syntax,docblock-syntax_parse_test] + [package] name = "docblock-syntax" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -12,10 +14,12 @@ path = "tests/parse_test.rs" [dependencies] common = { path = "../common" } +docblock-shared = { path = "../docblock-shared" } intern = { path = "../intern" } -serde = { version = "1.0.136", features = ["derive", "rc"] } -thiserror = "1.0.36" +serde = { version = "1.0.185", features = ["derive", "rc"] } +thiserror = "1.0.49" [dev-dependencies] fixture-tests = { path = "../fixture-tests" } graphql-test-helpers = { path = "../graphql-test-helpers" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/docblock-syntax/src/ast.rs b/compiler/crates/docblock-syntax/src/ast.rs index a58437c6fee14..f54066c10f9eb 100644 --- a/compiler/crates/docblock-syntax/src/ast.rs +++ b/compiler/crates/docblock-syntax/src/ast.rs @@ -8,6 +8,7 @@ use common::Location; use common::Named; use common::WithLocation; +use docblock_shared::ResolverSourceHash; use intern::string_key::StringKey; #[derive(Debug, PartialEq)] pub struct DocblockField { @@ -32,6 +33,7 @@ pub enum DocblockSection { pub struct DocblockAST { pub location: Location, pub sections: Vec, + pub source_hash: ResolverSourceHash, } impl DocblockAST { diff --git a/compiler/crates/docblock-syntax/src/errors.rs b/compiler/crates/docblock-syntax/src/errors.rs index 9d81892317b28..1129cb7ea3eae 100644 --- a/compiler/crates/docblock-syntax/src/errors.rs +++ b/compiler/crates/docblock-syntax/src/errors.rs @@ -7,7 +7,19 @@ use thiserror::Error; -#[derive(Clone, Copy, Debug, Error, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Copy, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +#[serde(tag = "type")] pub enum SyntaxError { #[error("Expected \"{expected}\".")] ExpectedString { expected: &'static str }, diff --git a/compiler/crates/docblock-syntax/src/lib.rs b/compiler/crates/docblock-syntax/src/lib.rs index bdb96313a9988..b64e6da1641f1 100644 --- a/compiler/crates/docblock-syntax/src/lib.rs +++ b/compiler/crates/docblock-syntax/src/lib.rs @@ -21,6 +21,7 @@ use common::SourceLocationKey; use common::Span; use common::TextSource; use common::WithLocation; +use docblock_shared::ResolverSourceHash; use errors::SyntaxError; use intern::string_key::Intern; use intern::string_key::StringKey; @@ -94,6 +95,7 @@ struct DocblockParser<'a> { errors: Vec, in_progress_text: Option, sections: Vec, + source_hash: ResolverSourceHash, } impl<'a> DocblockParser<'a> { @@ -106,6 +108,7 @@ impl<'a> DocblockParser<'a> { chars, in_progress_text: None, sections: Vec::new(), + source_hash: ResolverSourceHash::new(source), } } @@ -146,6 +149,7 @@ impl<'a> DocblockParser<'a> { */ Span::new(start, end - 1), ), + source_hash: self.source_hash, }) } else { Err(self.errors) @@ -327,7 +331,7 @@ impl SpanString { Self { span, string } } fn append_line(&mut self, other: Self) { - self.string.push_str("\n"); + self.string.push('\n'); self.string.push_str(&other.string); self.span.end = other.span.end; } diff --git a/compiler/crates/docblock-syntax/tests/parse.rs b/compiler/crates/docblock-syntax/tests/parse.rs new file mode 100644 index 0000000000000..e0ede5581a153 --- /dev/null +++ b/compiler/crates/docblock-syntax/tests/parse.rs @@ -0,0 +1,25 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use docblock_syntax::parse_docblock; +use fixture_tests::Fixture; +use graphql_test_helpers::diagnostics_to_sorted_string; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let mut content = fixture.content; + if !content.starts_with("/*") { + panic!("Expected fixture to start with \"/*\".") + }; + if !content.ends_with("*/\n") { + panic!("Expected fixture to end with \"*/\" followed by a newline.") + } + content = &content[2..content.len() - 3]; + parse_docblock(content, SourceLocationKey::standalone(fixture.file_name)) + .map(|x| format!("{:#?}", x)) + .map_err(|diagnostics| diagnostics_to_sorted_string(content, &diagnostics)) +} diff --git a/compiler/crates/docblock-syntax/tests/parse/fixtures/docblock-ends-mid-line.expected b/compiler/crates/docblock-syntax/tests/parse/fixtures/docblock-ends-mid-line.expected index e73216a3e7002..50bb99db38661 100644 --- a/compiler/crates/docblock-syntax/tests/parse/fixtures/docblock-ends-mid-line.expected +++ b/compiler/crates/docblock-syntax/tests/parse/fixtures/docblock-ends-mid-line.expected @@ -12,4 +12,7 @@ DocblockAST { }, ), ], + source_hash: ResolverSourceHash( + "4595a2a06568991a6d7594afbbe370ab", + ), } diff --git a/compiler/crates/docblock-syntax/tests/parse/fixtures/empty-block.expected b/compiler/crates/docblock-syntax/tests/parse/fixtures/empty-block.expected index 219e0bd3ef845..3c58b519fd26c 100644 --- a/compiler/crates/docblock-syntax/tests/parse/fixtures/empty-block.expected +++ b/compiler/crates/docblock-syntax/tests/parse/fixtures/empty-block.expected @@ -5,4 +5,7 @@ DocblockAST { location: empty-block.ecmascript:0:2, sections: [], + source_hash: ResolverSourceHash( + "75966d011d530b35f16483422368e364", + ), } diff --git a/compiler/crates/docblock-syntax/tests/parse/fixtures/field-followed-by-free-text.expected b/compiler/crates/docblock-syntax/tests/parse/fixtures/field-followed-by-free-text.expected index 9299c97617b26..b398ee16afbb1 100644 --- a/compiler/crates/docblock-syntax/tests/parse/fixtures/field-followed-by-free-text.expected +++ b/compiler/crates/docblock-syntax/tests/parse/fixtures/field-followed-by-free-text.expected @@ -23,4 +23,7 @@ DocblockAST { }, ), ], + source_hash: ResolverSourceHash( + "b408df07614b47f8b522bf4e528448d3", + ), } diff --git a/compiler/crates/docblock-syntax/tests/parse/fixtures/free-text-starting-with-star.expected b/compiler/crates/docblock-syntax/tests/parse/fixtures/free-text-starting-with-star.expected index 8a8f02fa565ab..3e0f5e6a1000a 100644 --- a/compiler/crates/docblock-syntax/tests/parse/fixtures/free-text-starting-with-star.expected +++ b/compiler/crates/docblock-syntax/tests/parse/fixtures/free-text-starting-with-star.expected @@ -15,4 +15,7 @@ DocblockAST { }, ), ], + source_hash: ResolverSourceHash( + "a49d092bbb5fbad6582962e62fac5d2c", + ), } diff --git a/compiler/crates/docblock-syntax/tests/parse/fixtures/multiple-fields.expected b/compiler/crates/docblock-syntax/tests/parse/fixtures/multiple-fields.expected index 540aaeb18453d..594d5520be89f 100644 --- a/compiler/crates/docblock-syntax/tests/parse/fixtures/multiple-fields.expected +++ b/compiler/crates/docblock-syntax/tests/parse/fixtures/multiple-fields.expected @@ -36,4 +36,7 @@ DocblockAST { }, ), ], + source_hash: ResolverSourceHash( + "c8f9872da22c679b3a9385d453c2d7d8", + ), } diff --git a/compiler/crates/docblock-syntax/tests/parse/fixtures/simple_docblock.expected b/compiler/crates/docblock-syntax/tests/parse/fixtures/simple_docblock.expected index 4e2abcb1b6aae..c4355642b3b24 100644 --- a/compiler/crates/docblock-syntax/tests/parse/fixtures/simple_docblock.expected +++ b/compiler/crates/docblock-syntax/tests/parse/fixtures/simple_docblock.expected @@ -13,4 +13,7 @@ DocblockAST { }, ), ], + source_hash: ResolverSourceHash( + "39bf0ba989cb8ca55688669aa89f1f43", + ), } diff --git a/compiler/crates/docblock-syntax/tests/parse/mod.rs b/compiler/crates/docblock-syntax/tests/parse/mod.rs deleted file mode 100644 index c809cd811b9d1..0000000000000 --- a/compiler/crates/docblock-syntax/tests/parse/mod.rs +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::SourceLocationKey; -use docblock_syntax::parse_docblock; -use fixture_tests::Fixture; -use graphql_test_helpers::diagnostics_to_sorted_string; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let mut content = fixture.content; - if !content.starts_with("/*") { - panic!("Expected fixture to start with \"/*\".") - }; - if !content.ends_with("*/\n") { - panic!("Expected fixture to end with \"*/\" followed by a newline.") - } - content = &content[2..content.len() - 3]; - parse_docblock(content, SourceLocationKey::standalone(fixture.file_name)) - .map(|x| format!("{:#?}", x)) - .map_err(|diagnostics| diagnostics_to_sorted_string(content, &diagnostics)) -} diff --git a/compiler/crates/docblock-syntax/tests/parse_test.rs b/compiler/crates/docblock-syntax/tests/parse_test.rs index 3860b0783e9ad..ef4b26204d25a 100644 --- a/compiler/crates/docblock-syntax/tests/parse_test.rs +++ b/compiler/crates/docblock-syntax/tests/parse_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<51c96537c8af1ebfd9e5d3e44443df65>> */ mod parse; @@ -12,65 +12,65 @@ mod parse; use parse::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn docblock_ends_mid_line() { +#[tokio::test] +async fn docblock_ends_mid_line() { let input = include_str!("parse/fixtures/docblock-ends-mid-line.ecmascript"); let expected = include_str!("parse/fixtures/docblock-ends-mid-line.expected"); - test_fixture(transform_fixture, "docblock-ends-mid-line.ecmascript", "parse/fixtures/docblock-ends-mid-line.expected", input, expected); + test_fixture(transform_fixture, file!(), "docblock-ends-mid-line.ecmascript", "parse/fixtures/docblock-ends-mid-line.expected", input, expected).await; } -#[test] -fn empty_block() { +#[tokio::test] +async fn empty_block() { let input = include_str!("parse/fixtures/empty-block.ecmascript"); let expected = include_str!("parse/fixtures/empty-block.expected"); - test_fixture(transform_fixture, "empty-block.ecmascript", "parse/fixtures/empty-block.expected", input, expected); + test_fixture(transform_fixture, file!(), "empty-block.ecmascript", "parse/fixtures/empty-block.expected", input, expected).await; } -#[test] -fn field_followed_by_free_text() { +#[tokio::test] +async fn field_followed_by_free_text() { let input = include_str!("parse/fixtures/field-followed-by-free-text.ecmascript"); let expected = include_str!("parse/fixtures/field-followed-by-free-text.expected"); - test_fixture(transform_fixture, "field-followed-by-free-text.ecmascript", "parse/fixtures/field-followed-by-free-text.expected", input, expected); + test_fixture(transform_fixture, file!(), "field-followed-by-free-text.ecmascript", "parse/fixtures/field-followed-by-free-text.expected", input, expected).await; } -#[test] -fn free_text_starting_with_star() { +#[tokio::test] +async fn free_text_starting_with_star() { let input = include_str!("parse/fixtures/free-text-starting-with-star.ecmascript"); let expected = include_str!("parse/fixtures/free-text-starting-with-star.expected"); - test_fixture(transform_fixture, "free-text-starting-with-star.ecmascript", "parse/fixtures/free-text-starting-with-star.expected", input, expected); + test_fixture(transform_fixture, file!(), "free-text-starting-with-star.ecmascript", "parse/fixtures/free-text-starting-with-star.expected", input, expected).await; } -#[test] -fn invalid_docblock_invalid() { +#[tokio::test] +async fn invalid_docblock_invalid() { let input = include_str!("parse/fixtures/invalid_docblock.invalid.ecmascript"); let expected = include_str!("parse/fixtures/invalid_docblock.invalid.expected"); - test_fixture(transform_fixture, "invalid_docblock.invalid.ecmascript", "parse/fixtures/invalid_docblock.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid_docblock.invalid.ecmascript", "parse/fixtures/invalid_docblock.invalid.expected", input, expected).await; } -#[test] -fn invalid_field_name_invalid() { +#[tokio::test] +async fn invalid_field_name_invalid() { let input = include_str!("parse/fixtures/invalid_field_name.invalid.ecmascript"); let expected = include_str!("parse/fixtures/invalid_field_name.invalid.expected"); - test_fixture(transform_fixture, "invalid_field_name.invalid.ecmascript", "parse/fixtures/invalid_field_name.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid_field_name.invalid.ecmascript", "parse/fixtures/invalid_field_name.invalid.expected", input, expected).await; } -#[test] -fn missing_star_invalid() { +#[tokio::test] +async fn missing_star_invalid() { let input = include_str!("parse/fixtures/missing_star.invalid.ecmascript"); let expected = include_str!("parse/fixtures/missing_star.invalid.expected"); - test_fixture(transform_fixture, "missing_star.invalid.ecmascript", "parse/fixtures/missing_star.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "missing_star.invalid.ecmascript", "parse/fixtures/missing_star.invalid.expected", input, expected).await; } -#[test] -fn multiple_fields() { +#[tokio::test] +async fn multiple_fields() { let input = include_str!("parse/fixtures/multiple-fields.ecmascript"); let expected = include_str!("parse/fixtures/multiple-fields.expected"); - test_fixture(transform_fixture, "multiple-fields.ecmascript", "parse/fixtures/multiple-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple-fields.ecmascript", "parse/fixtures/multiple-fields.expected", input, expected).await; } -#[test] -fn simple_docblock() { +#[tokio::test] +async fn simple_docblock() { let input = include_str!("parse/fixtures/simple_docblock.ecmascript"); let expected = include_str!("parse/fixtures/simple_docblock.expected"); - test_fixture(transform_fixture, "simple_docblock.ecmascript", "parse/fixtures/simple_docblock.expected", input, expected); + test_fixture(transform_fixture, file!(), "simple_docblock.ecmascript", "parse/fixtures/simple_docblock.expected", input, expected).await; } diff --git a/compiler/crates/errors/Cargo.toml b/compiler/crates/errors/Cargo.toml index 7d8f6282ea4f3..7a450ec16ea0c 100644 --- a/compiler/crates/errors/Cargo.toml +++ b/compiler/crates/errors/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/errors:errors + [package] name = "errors" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] diff --git a/compiler/crates/extract-graphql/Cargo.toml b/compiler/crates/extract-graphql/Cargo.toml index 860677a74e702..172c5b5a40959 100644 --- a/compiler/crates/extract-graphql/Cargo.toml +++ b/compiler/crates/extract-graphql/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/extract-graphql:[extract-graphql,extract-graphql-test] + [package] name = "extract-graphql" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -17,3 +19,4 @@ graphql-syntax = { path = "../graphql-syntax" } [dev-dependencies] fixture-tests = { path = "../fixture-tests" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/extract-graphql/src/lib.rs b/compiler/crates/extract-graphql/src/lib.rs index 0009946c7722a..64880e2e4cf29 100644 --- a/compiler/crates/extract-graphql/src/lib.rs +++ b/compiler/crates/extract-graphql/src/lib.rs @@ -174,12 +174,15 @@ pub fn extract(input: &str) -> Vec { // } '"' => consume_string(&mut it, '"'), '\'' => consume_string(&mut it, '\''), + '\\' => consume_escaped_char(&mut it), '/' => { - match it.next() { + match it.chars.peek() { Some((_, '/')) => { + it.next(); consume_line_comment(&mut it); } Some((_, '*')) => { + it.next(); let start = i; let line_index = it.line_index; let column_index = it.column_index; @@ -213,13 +216,17 @@ pub fn extract(input: &str) -> Vec { res } +fn consume_escaped_char(it: &mut CharReader<'_>) { + it.next(); +} + fn consume_identifier(it: &mut CharReader<'_>) { - for (_, c) in it { - match c { - 'a'..='z' | 'A'..='Z' | '_' | '0'..='9' => {} - _ => { - return; + while it.chars.peek().is_some() { + match it.chars.peek() { + Some((_, 'a'..='z' | 'A'..='Z' | '_' | '0'..='9')) => { + it.next(); } + _ => break, } } } diff --git a/compiler/crates/extract-graphql/tests/extract.rs b/compiler/crates/extract-graphql/tests/extract.rs new file mode 100644 index 0000000000000..b7abbc2ceba29 --- /dev/null +++ b/compiler/crates/extract-graphql/tests/extract.rs @@ -0,0 +1,34 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use extract_graphql::extract; +use extract_graphql::JavaScriptSourceFeature; +use fixture_tests::Fixture; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let features = extract(fixture.content); + Ok(features + .into_iter() + .map(|feature| match feature { + JavaScriptSourceFeature::Docblock(docblock_source) => { + let s = docblock_source.text_source(); + format!( + "docblock - line: {}, column: {}, text: <{}>", + s.line_index, s.column_index, s.text + ) + } + JavaScriptSourceFeature::GraphQL(graphql_source) => { + let s = graphql_source.text_source(); + format!( + "graphql - line: {}, column: {}, text: <{}>", + s.line_index, s.column_index, s.text + ) + } + }) + .collect::>() + .join("\n")) +} diff --git a/compiler/crates/extract-graphql/tests/extract/fixtures/complex_regex.expected b/compiler/crates/extract-graphql/tests/extract/fixtures/complex_regex.expected new file mode 100644 index 0000000000000..4cdab377ea272 --- /dev/null +++ b/compiler/crates/extract-graphql/tests/extract/fixtures/complex_regex.expected @@ -0,0 +1,26 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +const complexRegex = /(\/*)\\\//; +const textRegex = /text\/*/; +const specialRegex = /\d+\s/; + +function MyComponent() { + useFragment(graphql` + fragment Test on User { + __typename + } + `, user) + return
Test
; + } +==================================== OUTPUT =================================== +graphql - line: 12, column: 24, text: < + fragment Test on User { + __typename + } + > diff --git a/compiler/crates/extract-graphql/tests/extract/fixtures/complex_regex.js b/compiler/crates/extract-graphql/tests/extract/fixtures/complex_regex.js new file mode 100644 index 0000000000000..37f726632af23 --- /dev/null +++ b/compiler/crates/extract-graphql/tests/extract/fixtures/complex_regex.js @@ -0,0 +1,19 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +const complexRegex = /(\/*)\\\//; +const textRegex = /text\/*/; +const specialRegex = /\d+\s/; + +function MyComponent() { + useFragment(graphql` + fragment Test on User { + __typename + } + `, user) + return
Test
; + } diff --git a/compiler/crates/extract-graphql/tests/extract/fixtures/division.expected b/compiler/crates/extract-graphql/tests/extract/fixtures/division.expected new file mode 100644 index 0000000000000..d012ed09f3231 --- /dev/null +++ b/compiler/crates/extract-graphql/tests/extract/fixtures/division.expected @@ -0,0 +1,24 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +const math = 5 / 2; + +function MyComponent() { + useFragment(graphql` + fragment Test on User { + __typename + } + `, user) + return
Test
; + } +==================================== OUTPUT =================================== +graphql - line: 10, column: 24, text: < + fragment Test on User { + __typename + } + > diff --git a/compiler/crates/extract-graphql/tests/extract/fixtures/division.js b/compiler/crates/extract-graphql/tests/extract/fixtures/division.js new file mode 100644 index 0000000000000..d8149af51419b --- /dev/null +++ b/compiler/crates/extract-graphql/tests/extract/fixtures/division.js @@ -0,0 +1,17 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +const math = 5 / 2; + +function MyComponent() { + useFragment(graphql` + fragment Test on User { + __typename + } + `, user) + return
Test
; + } diff --git a/compiler/crates/extract-graphql/tests/extract/fixtures/regex.expected b/compiler/crates/extract-graphql/tests/extract/fixtures/regex.expected new file mode 100644 index 0000000000000..90eb0de70f95a --- /dev/null +++ b/compiler/crates/extract-graphql/tests/extract/fixtures/regex.expected @@ -0,0 +1,24 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +const regex = /\/*/; + +function MyComponent() { + useFragment(graphql` + fragment Test on User { + __typename + } + `, user) + return
Test
; + } +==================================== OUTPUT =================================== +graphql - line: 10, column: 24, text: < + fragment Test on User { + __typename + } + > diff --git a/compiler/crates/extract-graphql/tests/extract/fixtures/regex.js b/compiler/crates/extract-graphql/tests/extract/fixtures/regex.js new file mode 100644 index 0000000000000..387d012ae289d --- /dev/null +++ b/compiler/crates/extract-graphql/tests/extract/fixtures/regex.js @@ -0,0 +1,17 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +const regex = /\/*/; + +function MyComponent() { + useFragment(graphql` + fragment Test on User { + __typename + } + `, user) + return
Test
; + } diff --git a/compiler/crates/extract-graphql/tests/extract/fixtures/slashes.expected b/compiler/crates/extract-graphql/tests/extract/fixtures/slashes.expected new file mode 100644 index 0000000000000..669eabc81c63b --- /dev/null +++ b/compiler/crates/extract-graphql/tests/extract/fixtures/slashes.expected @@ -0,0 +1,28 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** + * escape in docblock + * \\n + */ +const newline = "\\/escape in string \n newline in string "; + +function MyComponent() { + useFragment(graphql` + fragment Test on User { + __typename + } + `, user) + return
Test
; + } +==================================== OUTPUT =================================== +graphql - line: 14, column: 24, text: < + fragment Test on User { + __typename + } + > diff --git a/compiler/crates/extract-graphql/tests/extract/fixtures/slashes.js b/compiler/crates/extract-graphql/tests/extract/fixtures/slashes.js new file mode 100644 index 0000000000000..8ad1ef38441d1 --- /dev/null +++ b/compiler/crates/extract-graphql/tests/extract/fixtures/slashes.js @@ -0,0 +1,21 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** + * escape in docblock + * \\n + */ +const newline = "\\/escape in string \n newline in string "; + +function MyComponent() { + useFragment(graphql` + fragment Test on User { + __typename + } + `, user) + return
Test
; + } diff --git a/compiler/crates/extract-graphql/tests/extract/mod.rs b/compiler/crates/extract-graphql/tests/extract/mod.rs deleted file mode 100644 index 969570a396c70..0000000000000 --- a/compiler/crates/extract-graphql/tests/extract/mod.rs +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use extract_graphql::extract; -use extract_graphql::JavaScriptSourceFeature; -use fixture_tests::Fixture; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let features = extract(fixture.content); - Ok(features - .into_iter() - .map(|feature| match feature { - JavaScriptSourceFeature::Docblock(docblock_source) => { - let s = docblock_source.text_source(); - format!( - "docblock - line: {}, column: {}, text: <{}>", - s.line_index, s.column_index, s.text - ) - } - JavaScriptSourceFeature::GraphQL(graphql_source) => { - let s = graphql_source.text_source(); - format!( - "graphql - line: {}, column: {}, text: <{}>", - s.line_index, s.column_index, s.text - ) - } - }) - .collect::>() - .join("\n")) -} diff --git a/compiler/crates/extract-graphql/tests/extract_test.rs b/compiler/crates/extract-graphql/tests/extract_test.rs index f1637a142c2eb..6fb6aef7502b3 100644 --- a/compiler/crates/extract-graphql/tests/extract_test.rs +++ b/compiler/crates/extract-graphql/tests/extract_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<7414a7d21db69b967b49c36af99ace12>> + * @generated SignedSource<> */ mod extract; @@ -12,79 +12,107 @@ mod extract; use extract::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn comments() { +#[tokio::test] +async fn comments() { let input = include_str!("extract/fixtures/comments.js"); let expected = include_str!("extract/fixtures/comments.expected"); - test_fixture(transform_fixture, "comments.js", "extract/fixtures/comments.expected", input, expected); + test_fixture(transform_fixture, file!(), "comments.js", "extract/fixtures/comments.expected", input, expected).await; } -#[test] -fn inline() { +#[tokio::test] +async fn complex_regex() { + let input = include_str!("extract/fixtures/complex_regex.js"); + let expected = include_str!("extract/fixtures/complex_regex.expected"); + test_fixture(transform_fixture, file!(), "complex_regex.js", "extract/fixtures/complex_regex.expected", input, expected).await; +} + +#[tokio::test] +async fn division() { + let input = include_str!("extract/fixtures/division.js"); + let expected = include_str!("extract/fixtures/division.expected"); + test_fixture(transform_fixture, file!(), "division.js", "extract/fixtures/division.expected", input, expected).await; +} + +#[tokio::test] +async fn inline() { let input = include_str!("extract/fixtures/inline.js"); let expected = include_str!("extract/fixtures/inline.expected"); - test_fixture(transform_fixture, "inline.js", "extract/fixtures/inline.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline.js", "extract/fixtures/inline.expected", input, expected).await; } -#[test] -fn nested_template_literals() { +#[tokio::test] +async fn nested_template_literals() { let input = include_str!("extract/fixtures/nested_template_literals.js"); let expected = include_str!("extract/fixtures/nested_template_literals.expected"); - test_fixture(transform_fixture, "nested_template_literals.js", "extract/fixtures/nested_template_literals.expected", input, expected); + test_fixture(transform_fixture, file!(), "nested_template_literals.js", "extract/fixtures/nested_template_literals.expected", input, expected).await; } -#[test] -fn no_graphql() { +#[tokio::test] +async fn no_graphql() { let input = include_str!("extract/fixtures/no_graphql.js"); let expected = include_str!("extract/fixtures/no_graphql.expected"); - test_fixture(transform_fixture, "no_graphql.js", "extract/fixtures/no_graphql.expected", input, expected); + test_fixture(transform_fixture, file!(), "no_graphql.js", "extract/fixtures/no_graphql.expected", input, expected).await; } -#[test] -fn quote_in_jsx() { +#[tokio::test] +async fn quote_in_jsx() { let input = include_str!("extract/fixtures/quote_in_jsx.js"); let expected = include_str!("extract/fixtures/quote_in_jsx.expected"); - test_fixture(transform_fixture, "quote_in_jsx.js", "extract/fixtures/quote_in_jsx.expected", input, expected); + test_fixture(transform_fixture, file!(), "quote_in_jsx.js", "extract/fixtures/quote_in_jsx.expected", input, expected).await; } -#[test] -fn relay_resolver() { +#[tokio::test] +async fn regex() { + let input = include_str!("extract/fixtures/regex.js"); + let expected = include_str!("extract/fixtures/regex.expected"); + test_fixture(transform_fixture, file!(), "regex.js", "extract/fixtures/regex.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver() { let input = include_str!("extract/fixtures/relay_resolver.js"); let expected = include_str!("extract/fixtures/relay_resolver.expected"); - test_fixture(transform_fixture, "relay_resolver.js", "extract/fixtures/relay_resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay_resolver.js", "extract/fixtures/relay_resolver.expected", input, expected).await; } -#[test] -fn relay_resolver_and_graphql() { +#[tokio::test] +async fn relay_resolver_and_graphql() { let input = include_str!("extract/fixtures/relay_resolver_and_graphql.js"); let expected = include_str!("extract/fixtures/relay_resolver_and_graphql.expected"); - test_fixture(transform_fixture, "relay_resolver_and_graphql.js", "extract/fixtures/relay_resolver_and_graphql.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay_resolver_and_graphql.js", "extract/fixtures/relay_resolver_and_graphql.expected", input, expected).await; } -#[test] -fn simple() { +#[tokio::test] +async fn simple() { let input = include_str!("extract/fixtures/simple.flow"); let expected = include_str!("extract/fixtures/simple.expected"); - test_fixture(transform_fixture, "simple.flow", "extract/fixtures/simple.expected", input, expected); + test_fixture(transform_fixture, file!(), "simple.flow", "extract/fixtures/simple.expected", input, expected).await; +} + +#[tokio::test] +async fn slashes() { + let input = include_str!("extract/fixtures/slashes.js"); + let expected = include_str!("extract/fixtures/slashes.expected"); + test_fixture(transform_fixture, file!(), "slashes.js", "extract/fixtures/slashes.expected", input, expected).await; } -#[test] -fn tabbed() { +#[tokio::test] +async fn tabbed() { let input = include_str!("extract/fixtures/tabbed.js"); let expected = include_str!("extract/fixtures/tabbed.expected"); - test_fixture(transform_fixture, "tabbed.js", "extract/fixtures/tabbed.expected", input, expected); + test_fixture(transform_fixture, file!(), "tabbed.js", "extract/fixtures/tabbed.expected", input, expected).await; } -#[test] -fn template_literal() { +#[tokio::test] +async fn template_literal() { let input = include_str!("extract/fixtures/template_literal.js"); let expected = include_str!("extract/fixtures/template_literal.expected"); - test_fixture(transform_fixture, "template_literal.js", "extract/fixtures/template_literal.expected", input, expected); + test_fixture(transform_fixture, file!(), "template_literal.js", "extract/fixtures/template_literal.expected", input, expected).await; } -#[test] -fn with_space() { +#[tokio::test] +async fn with_space() { let input = include_str!("extract/fixtures/with_space.js"); let expected = include_str!("extract/fixtures/with_space.expected"); - test_fixture(transform_fixture, "with_space.js", "extract/fixtures/with_space.expected", input, expected); + test_fixture(transform_fixture, file!(), "with_space.js", "extract/fixtures/with_space.expected", input, expected).await; } diff --git a/compiler/crates/fixture-tests/Cargo.toml b/compiler/crates/fixture-tests/Cargo.toml index 4357b4a1af3a5..639f942430ad3 100644 --- a/compiler/crates/fixture-tests/Cargo.toml +++ b/compiler/crates/fixture-tests/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/fixture-tests:[fixture-tests,fixture-tests-bin,fixture-tests-tests] + [package] name = "fixture-tests" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[bin]] @@ -15,9 +17,9 @@ name = "fixture_tests_tests" path = "tests/uppercase_test.rs" [dependencies] -clap = { version = "3.2.23", features = ["derive", "env", "regex", "unicode", "wrap_help"] } -colored = "1.9" +clap = { version = "3.2.25", features = ["derive", "env", "regex", "unicode", "wrap_help"] } +colored = "2.1.0" diff = "0.1" lazy_static = "1.4" -parking_lot = { version = "0.11.2", features = ["send_guard"] } signedsource = { path = "../signedsource" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/fixture-tests/src/lib.rs b/compiler/crates/fixture-tests/src/lib.rs index aca576b6cd4de..02b3544a12a71 100644 --- a/compiler/crates/fixture-tests/src/lib.rs +++ b/compiler/crates/fixture-tests/src/lib.rs @@ -32,7 +32,7 @@ //! `tests/first_transform/mod.rs` exports the transform to test, for example: //! //! ```ignore -//! pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { +//! pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { //! Ok(fixture.to_uppercase()) //! } //! ``` @@ -46,19 +46,20 @@ //! //! *FB-internal: see `scripts/generate_fixture_tests.sh` to generate all.* // -//! *FB-internal: use buck run //relay/oss/crates/fixture-tests:fixture-tests-bin -- -//! *FB-internal: if you don't want to use cargo run. This is useful for development on a dev-server or -//! *FB-internal: or machines w/o cargo installed. +//! *FB-internal: use buck run //relay/oss/crates/fixture-tests:fixture-tests-bin -- \ mod print_diff; use std::env; use std::fs::File; +use std::future::Future; use std::io::prelude::*; +use std::path::PathBuf; +use std::process::Command; use std::sync::Arc; use lazy_static::lazy_static; -use parking_lot::Mutex; +use tokio::sync::Mutex; lazy_static! { static ref LOCK: Arc> = Arc::new(Mutex::new(0)); @@ -72,16 +73,31 @@ pub struct Fixture<'a> { pub content: &'a str, } +// https://stackoverflow.com/a/70511636 +pub trait AsyncFn: Fn(T) -> >::Fut { + type Fut: Future>::Output>; + type Output; +} +impl AsyncFn for F +where + F: Fn(T) -> Fut, + Fut: Future, +{ + type Fut = Fut; + type Output = Fut::Output; +} + /// This is an internal function and is typically called from generated code /// containing one test per fixture. -pub fn test_fixture( +pub async fn test_fixture( transform: T, + source_file_path: &str, input_file_name: &str, expected_file_name: &str, input: &str, expected: &str, ) where - T: FnOnce(&Fixture<'_>) -> Result, + T: for<'b> AsyncFn<&'b Fixture<'b>, Output = Result>, U: std::fmt::Display, V: std::fmt::Display, { @@ -92,9 +108,9 @@ pub fn test_fixture( let expect_ok = !input.contains("expected-to-throw"); let actual_result: Result; { - let _guard = LOCK.lock(); + let _guard = LOCK.lock().await; colored::control::set_override(false); - actual_result = transform(&fixture); + actual_result = transform(&fixture).await; colored::control::unset_override(); } @@ -146,19 +162,38 @@ pub fn test_fixture( } if env::var_os("UPDATE_SNAPSHOTS").is_some() { - let file_name = format!("tests/{}", expected_file_name); - File::create(&file_name) - .unwrap_or_else(|_| { + let expected_file_path = workspace_root() + .join(source_file_path) + .with_file_name(expected_file_name); + File::create(&expected_file_path) + .unwrap_or_else(|e| { panic!( - "Unable to create {}/{}", - env::current_dir().unwrap().to_str().unwrap(), - file_name + "Unable to create {} due to error: {:?}", + expected_file_path.display(), + e ) }) .write_all(actual.as_bytes()) .unwrap(); } else { - panic!("Snapshot did not match. Run with UPDATE_SNAPSHOTS=1 to update."); + panic!( + "Snapshot did not match. Run with UPDATE_SNAPSHOTS=1 to update.\nIf using Buck you can use `buck test -- --env UPDATE_SNAPSHOTS=1" + ); } } } + +fn workspace_root() -> PathBuf { + if let Ok(cargo) = std::env::var("CARGO") { + let stdout = Command::new(cargo) + .args(["locate-project", "--workspace", "--message-format=plain"]) + .output() + .unwrap() + .stdout; + let workspace_cargo_toml = PathBuf::from(&std::str::from_utf8(&stdout).unwrap().trim()); + workspace_cargo_toml.parent().unwrap().to_path_buf() + } else { + // Assuming we're building via Meta-internal BUCK setup, which executes tests from workspace root + std::env::current_dir().unwrap() + } +} diff --git a/compiler/crates/fixture-tests/src/main.rs b/compiler/crates/fixture-tests/src/main.rs index c630f79b1b3a4..fca883018bd3f 100644 --- a/compiler/crates/fixture-tests/src/main.rs +++ b/compiler/crates/fixture-tests/src/main.rs @@ -26,8 +26,13 @@ struct Options { /// from which a test file will be generated #[clap(name = "DIR")] dirs: Vec, -} + #[clap(long)] + customized_header: Option, + + #[clap(long)] + customized_snapshot_fixer: Option, +} #[derive(Debug)] struct TestCase { name: String, @@ -52,7 +57,7 @@ fn main() { continue; } let name = sanitize_identifier(path.file_stem().unwrap().to_str().unwrap()); - let mut test_case = test_cases.entry(name.clone()).or_insert_with(|| TestCase { + let test_case = test_cases.entry(name.clone()).or_insert_with(|| TestCase { name, input: None, expected: None, @@ -69,18 +74,18 @@ fn main() { test_case.input = Some(path); } } - for mut test_case in test_cases.values_mut() { + for test_case in test_cases.values_mut() { if test_case.expected.is_none() { if let Some(ref input) = test_case.input { let mut expected = input.clone(); expected.set_extension(EXPECTED_EXTENSION); - File::create(&expected) - .unwrap() - .write_all( - "\x40nocommit\nRun snapshot tests with UPDATE_SNAPSHOTS=1 to update this new file.\n" - .as_bytes(), - ) - .unwrap(); + let fixer = match &opt.customized_snapshot_fixer { + Some(customized) => customized.as_str().as_bytes(), + None => { + "\x40nocommit\nRun snapshot tests with UPDATE_SNAPSHOTS=1 to update this new file.\n".as_bytes() + } + }; + File::create(&expected).unwrap().write_all(fixer).unwrap(); test_case.expected = Some(expected); } } @@ -93,11 +98,11 @@ fn main() { .map(|(_, test_case)| { let test_case_name = &test_case.name; format!( - r#"#[test] -fn {0}() {{ + r#"#[tokio::test] +async fn {0}() {{ let input = include_str!("{1}/fixtures/{2}"); let expected = include_str!("{1}/fixtures/{3}"); - test_fixture(transform_fixture, "{2}", "{1}/fixtures/{3}", input, expected); + test_fixture(transform_fixture, file!(), "{2}", "{1}/fixtures/{3}", input, expected).await; }}"#, test_case.name, &test_name, @@ -150,6 +155,12 @@ fn {0}() {{ ", signing_token = SIGNING_TOKEN, ) + } else if let Some(customized_header) = &opt.customized_header { + format!( + "// {signing_token}\n// {customized_header}\n", + signing_token = SIGNING_TOKEN, + customized_header = customized_header + ) } else { format!( "// {signing_token} diff --git a/compiler/crates/fixture-tests/tests/uppercase.rs b/compiler/crates/fixture-tests/tests/uppercase.rs new file mode 100644 index 0000000000000..02520acf29604 --- /dev/null +++ b/compiler/crates/fixture-tests/tests/uppercase.rs @@ -0,0 +1,12 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + Ok(fixture.content.to_uppercase()) +} diff --git a/compiler/crates/fixture-tests/tests/uppercase/mod.rs b/compiler/crates/fixture-tests/tests/uppercase/mod.rs deleted file mode 100644 index 19ebfbeed9fe0..0000000000000 --- a/compiler/crates/fixture-tests/tests/uppercase/mod.rs +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - Ok(fixture.content.to_uppercase()) -} diff --git a/compiler/crates/fixture-tests/tests/uppercase_test.rs b/compiler/crates/fixture-tests/tests/uppercase_test.rs index b38b563119fda..f6ff9f495f20e 100644 --- a/compiler/crates/fixture-tests/tests/uppercase_test.rs +++ b/compiler/crates/fixture-tests/tests/uppercase_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<759fa81dcfd359632faaf5a2bd8f9555>> + * @generated SignedSource<<145e5ac1c3724879a13da87ee69f26b5>> */ mod uppercase; @@ -12,16 +12,16 @@ mod uppercase; use uppercase::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn hello() { +#[tokio::test] +async fn hello() { let input = include_str!("uppercase/fixtures/hello.txt"); let expected = include_str!("uppercase/fixtures/hello.expected"); - test_fixture(transform_fixture, "hello.txt", "uppercase/fixtures/hello.expected", input, expected); + test_fixture(transform_fixture, file!(), "hello.txt", "uppercase/fixtures/hello.expected", input, expected).await; } -#[test] -fn world() { +#[tokio::test] +async fn world() { let input = include_str!("uppercase/fixtures/world.txt"); let expected = include_str!("uppercase/fixtures/world.expected"); - test_fixture(transform_fixture, "world.txt", "uppercase/fixtures/world.expected", input, expected); + test_fixture(transform_fixture, file!(), "world.txt", "uppercase/fixtures/world.expected", input, expected).await; } diff --git a/compiler/crates/graphql-cli/Cargo.toml b/compiler/crates/graphql-cli/Cargo.toml index c636c4c320044..ae040e4dd2393 100644 --- a/compiler/crates/graphql-cli/Cargo.toml +++ b/compiler/crates/graphql-cli/Cargo.toml @@ -1,11 +1,13 @@ # @generated by autocargo from //relay/oss/crates/graphql-cli:graphql-cli + [package] name = "graphql-cli" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] -colored = "1.9" +colored = "2.1.0" common = { path = "../common" } diff --git a/compiler/crates/graphql-cli/src/diagnostic_printer.rs b/compiler/crates/graphql-cli/src/diagnostic_printer.rs index 4e263a4a6ad01..4a73c332bca96 100644 --- a/compiler/crates/graphql-cli/src/diagnostic_printer.rs +++ b/compiler/crates/graphql-cli/src/diagnostic_printer.rs @@ -79,13 +79,13 @@ impl DiagnosticPrinter { location: Location, highlight_color: Style, ) -> std::fmt::Result { - let source_printer = SourcePrinter::default(); + let source_printer = SourcePrinter; if let Some(source) = self.sources.get(location.source_location()) { let range = source.to_span_range(location.span()); writeln!( writer, " {}{}", - location.source_location().path().underline(), + normalize_path(location.source_location().path()).underline(), format!(":{}:{}", range.start.line + 1, range.start.character + 1).dimmed() )?; source_printer.write_span_with_highlight_style( @@ -99,7 +99,7 @@ impl DiagnosticPrinter { writeln!( writer, "{}: ", - location.source_location().path() + normalize_path(location.source_location().path()) )?; } Ok(()) @@ -118,3 +118,9 @@ where self(source_location) } } + +/// Normalize Windows paths to Unix style. This is important for stable test +/// output across Mac/Windows/Linux. +fn normalize_path(path: &str) -> String { + path.replace("\\", "/") +} diff --git a/compiler/crates/graphql-cli/src/source_printer.rs b/compiler/crates/graphql-cli/src/source_printer.rs index 2474841495976..3694361e830c6 100644 --- a/compiler/crates/graphql-cli/src/source_printer.rs +++ b/compiler/crates/graphql-cli/src/source_printer.rs @@ -74,14 +74,14 @@ impl SourcePrinter { let end = line_end_byte_indices .get(line_index) .cloned() - .unwrap_or_else(|| source.len()); + .unwrap_or(source.len()); let start = if line_index == 0 { 0 } else { line_end_byte_indices .get(line_index - 1) .cloned() - .unwrap_or_else(|| source.len()) + .unwrap_or(source.len()) }; start..end }; diff --git a/compiler/crates/graphql-ir-validations/Cargo.toml b/compiler/crates/graphql-ir-validations/Cargo.toml index ec733e979b449..cc0429c73f923 100644 --- a/compiler/crates/graphql-ir-validations/Cargo.toml +++ b/compiler/crates/graphql-ir-validations/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/graphql-ir-validations:[graphql-ir-validations,graphql-validate_selection_conflict-test] + [package] name = "graphql-ir-validations" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -12,13 +14,15 @@ path = "tests/validate_selection_conflict_test.rs" [dependencies] common = { path = "../common" } -dashmap = { version = "5.4", features = ["raw-api", "rayon", "serde"] } +dashmap = { version = "5.5.3", features = ["rayon", "serde"] } errors = { path = "../errors" } graphql-ir = { path = "../graphql-ir" } graphql-text-printer = { path = "../graphql-text-printer" } intern = { path = "../intern" } +relay-config = { path = "../relay-config" } schema = { path = "../schema" } -thiserror = "1.0.36" +serde = { version = "1.0.185", features = ["derive", "rc"] } +thiserror = "1.0.49" [dev-dependencies] fixture-tests = { path = "../fixture-tests" } @@ -26,3 +30,4 @@ graphql-cli = { path = "../graphql-cli" } graphql-syntax = { path = "../graphql-syntax" } graphql-test-helpers = { path = "../graphql-test-helpers" } relay-test-schema = { path = "../relay-test-schema" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/graphql-ir-validations/src/validate_selection_conflict.rs b/compiler/crates/graphql-ir-validations/src/validate_selection_conflict.rs index d305010104daa..d9eeab02abf4f 100644 --- a/compiler/crates/graphql-ir-validations/src/validate_selection_conflict.rs +++ b/compiler/crates/graphql-ir-validations/src/validate_selection_conflict.rs @@ -14,7 +14,9 @@ use std::sync::Arc; use common::Diagnostic; use common::DiagnosticsResult; use common::Location; +use common::NamedItem; use common::PointerAddress; +use common::WithLocation; use dashmap::DashMap; use dashmap::DashSet; use errors::par_try_map; @@ -24,6 +26,8 @@ use graphql_ir::Argument; use graphql_ir::Field as IRField; use graphql_ir::FragmentDefinition; use graphql_ir::FragmentDefinitionName; +use graphql_ir::FragmentSpread; +use graphql_ir::InlineFragment; use graphql_ir::LinkedField; use graphql_ir::OperationDefinition; use graphql_ir::Program; @@ -31,6 +35,8 @@ use graphql_ir::ScalarField; use graphql_ir::Selection; use intern::string_key::StringKey; use intern::Lookup; +use relay_config::ProjectConfig; +use schema::FieldID; use schema::SDLSchema; use schema::Schema; use schema::Type; @@ -42,32 +48,42 @@ use self::ignoring_type_and_location::arguments_equals; /// Note:set `further_optimization` will enable: (1) cache the paired-fields; and (2) avoid duplicate fragment validations in multi-core machines. pub fn validate_selection_conflict( program: &Program, + project_config: &ProjectConfig, further_optimization: bool, ) -> DiagnosticsResult<()> { - ValidateSelectionConflict::::new(program, further_optimization).validate_program(program) + ValidateSelectionConflict::::new(program, project_config, further_optimization) + .validate_program(program) } #[derive(Clone, PartialEq, Debug)] -enum Field<'s> { +enum NamedSelection<'s> { LinkedField(&'s LinkedField), ScalarField(&'s ScalarField), + AliasedFragmentSpread(&'s FragmentSpread, WithLocation), + AliasedInlineFragment(&'s InlineFragment, WithLocation), } -type Fields<'s> = HashMap>, intern::BuildIdHasher>; +type NamedSelections<'s> = HashMap>, intern::BuildIdHasher>; struct ValidateSelectionConflict<'s, TBehavior: LocationAgnosticBehavior> { program: &'s Program, - fragment_cache: DashMap>, intern::BuildIdHasher>, - fields_cache: DashMap>>, + project_config: &'s ProjectConfig, + fragment_cache: DashMap>, intern::BuildIdHasher>, + fields_cache: DashMap>>, further_optimization: bool, verified_fields_pair: DashSet<(PointerAddress, PointerAddress, bool)>, _behavior: PhantomData, } impl<'s, B: LocationAgnosticBehavior + Sync> ValidateSelectionConflict<'s, B> { - fn new(program: &'s Program, further_optimization: bool) -> Self { + fn new( + program: &'s Program, + project_config: &'s ProjectConfig, + further_optimization: bool, + ) -> Self { Self { program, + project_config, fragment_cache: Default::default(), fields_cache: Default::default(), further_optimization, @@ -119,13 +135,11 @@ impl<'s, B: LocationAgnosticBehavior + Sync> ValidateSelectionConflict<'s, B> { let dummy_hashset = HashSet::new(); while let Some(visiting) = unclaimed_fragment_queue.pop_front() { - if let Err(e) = self.validate_and_collect_fragment( + self.validate_and_collect_fragment( program .fragment(visiting) .expect("fragment must have been registered"), - ) { - return Err(e); - } + )?; for used_by in dag_used_by.get(&visiting).unwrap_or(&dummy_hashset) { // fragment "used_by" now can assume "...now" cached. @@ -144,7 +158,10 @@ impl<'s, B: LocationAgnosticBehavior + Sync> ValidateSelectionConflict<'s, B> { Ok(()) } - fn validate_selections(&self, selections: &'s [Selection]) -> DiagnosticsResult> { + fn validate_selections( + &self, + selections: &'s [Selection], + ) -> DiagnosticsResult> { let mut fields = Default::default(); validate_map(selections, |selection| { self.validate_selection(&mut fields, selection) @@ -154,18 +171,24 @@ impl<'s, B: LocationAgnosticBehavior + Sync> ValidateSelectionConflict<'s, B> { fn validate_selection( &self, - fields: &mut Fields<'s>, + fields: &mut NamedSelections<'s>, selection: &'s Selection, ) -> DiagnosticsResult<()> { match selection { Selection::LinkedField(field) => { + if !self.should_validate_selection(field.definition.item) { + return Ok(()); + } self.validate_linked_field_selections(field)?; - let field = Field::LinkedField(field.as_ref()); - self.validate_and_insert_field_selection(fields, &field, false) + let field = NamedSelection::LinkedField(field.as_ref()); + self.validate_and_insert_named_selection(fields, &field, false) } Selection::ScalarField(field) => { - let field = Field::ScalarField(field.as_ref()); - self.validate_and_insert_field_selection(fields, &field, false) + if !self.should_validate_selection(field.definition.item) { + return Ok(()); + } + let field = NamedSelection::ScalarField(field.as_ref()); + self.validate_and_insert_named_selection(fields, &field, false) } Selection::Condition(condition) => { let new_fields = self.validate_selections(&condition.selections)?; @@ -173,20 +196,54 @@ impl<'s, B: LocationAgnosticBehavior + Sync> ValidateSelectionConflict<'s, B> { } Selection::InlineFragment(fragment) => { let new_fields = self.validate_selections(&fragment.selections)?; + if self + .project_config + .feature_flags + .enable_fragment_aliases + .is_fully_enabled() + { + if let Some(alias) = fragment.alias(&self.program.schema)? { + let field = NamedSelection::AliasedInlineFragment(fragment, alias); + self.validate_and_insert_named_selection(fields, &field, false)?; + } + } self.validate_and_merge_fields(fields, &new_fields, false) } Selection::FragmentSpread(spread) => { let fragment = self.program.fragment(spread.fragment.item).unwrap(); let new_fields = self.validate_and_collect_fragment(fragment)?; + if self + .project_config + .feature_flags + .enable_fragment_aliases + .is_fully_enabled() + { + if let Some(alias) = spread.alias()? { + let field = NamedSelection::AliasedFragmentSpread(spread, alias); + self.validate_and_insert_named_selection(fields, &field, false)?; + } + } self.validate_and_merge_fields(fields, &new_fields, false) } } } + fn should_validate_selection(&self, field_id: FieldID) -> bool { + let schema_field = self.program.schema.field(field_id); + let unselectable_directive_name = self + .project_config + .schema_config + .unselectable_directive_name; + schema_field + .directives + .named(unselectable_directive_name) + .is_none() + } + fn validate_and_collect_fragment( &self, fragment: &'s FragmentDefinition, - ) -> DiagnosticsResult>> { + ) -> DiagnosticsResult>> { if let Some(cached) = self.fragment_cache.get(&fragment.name.item.0) { return Ok(Arc::clone(&cached)); } @@ -199,7 +256,7 @@ impl<'s, B: LocationAgnosticBehavior + Sync> ValidateSelectionConflict<'s, B> { fn validate_linked_field_selections( &self, field: &'s LinkedField, - ) -> DiagnosticsResult>> { + ) -> DiagnosticsResult>> { let key = PointerAddress::new(field); if let Some(fields) = self.fields_cache.get(&key) { return Ok(Arc::clone(&fields)); @@ -211,32 +268,35 @@ impl<'s, B: LocationAgnosticBehavior + Sync> ValidateSelectionConflict<'s, B> { fn validate_and_merge_fields( &self, - left: &mut Fields<'s>, - right: &Fields<'s>, + left: &mut NamedSelections<'s>, + right: &NamedSelections<'s>, parent_fields_mutually_exclusive: bool, ) -> DiagnosticsResult<()> { validate_map(right.values().flatten(), |field| { - self.validate_and_insert_field_selection(left, field, parent_fields_mutually_exclusive) + self.validate_and_insert_named_selection(left, field, parent_fields_mutually_exclusive) }) } - fn validate_and_insert_field_selection( + fn validate_and_insert_named_selection( &self, - fields: &mut Fields<'s>, - field: &Field<'s>, + named_selections: &mut NamedSelections<'s>, + named_selection: &NamedSelection<'s>, parent_fields_mutually_exclusive: bool, ) -> DiagnosticsResult<()> { - let key = field.get_response_key(&self.program.schema); - if !fields.contains_key(&key) { - fields.entry(key).or_default().push(field.clone()); + let key = named_selection.get_response_key(&self.program.schema); + if !named_selections.contains_key(&key) { + named_selections + .entry(key) + .or_default() + .push(named_selection.clone()); return Ok(()); } let mut errors = vec![]; - let addr1 = field.pointer_address(); + let addr1 = named_selection.pointer_address(); - for existing_field in fields.get(&key).unwrap() { - if field == existing_field { + for existing_named_selection in named_selections.get(&key).unwrap() { + if named_selection == existing_named_selection { return if errors.is_empty() { Ok(()) } else { @@ -244,7 +304,7 @@ impl<'s, B: LocationAgnosticBehavior + Sync> ValidateSelectionConflict<'s, B> { }; } - let addr2 = existing_field.pointer_address(); + let addr2 = existing_named_selection.pointer_address(); if self.further_optimization && self.verified_fields_pair.contains(&( addr1, @@ -255,8 +315,23 @@ impl<'s, B: LocationAgnosticBehavior + Sync> ValidateSelectionConflict<'s, B> { continue; } - let l_definition = existing_field.get_field_definition(&self.program.schema); - let r_definition = field.get_field_definition(&self.program.schema); + let l_named_definition = + existing_named_selection.get_field_definition(&self.program.schema); + let r_named_definition = named_selection.get_field_definition(&self.program.schema); + + let (l_definition, r_definition) = match (l_named_definition, r_named_definition) { + (NameDefinition::Field(l), NameDefinition::Field(r)) => (l, r), + _ => { + errors.push( + Diagnostic::error( + ValidationMessage::AmbiguousFragmentAlias { response_key: key }, + existing_named_selection.loc(), + ) + .annotate("the other field", named_selection.loc()), + ); + continue; + } + }; let is_parent_fields_mutually_exclusive = || { parent_fields_mutually_exclusive @@ -267,8 +342,8 @@ impl<'s, B: LocationAgnosticBehavior + Sync> ValidateSelectionConflict<'s, B> { ) }; - match (existing_field, &field) { - (Field::LinkedField(l), Field::LinkedField(r)) => { + match (existing_named_selection, &named_selection) { + (NamedSelection::LinkedField(l), NamedSelection::LinkedField(r)) => { let fields_mutually_exclusive = is_parent_fields_mutually_exclusive(); if !fields_mutually_exclusive { if let Err(err) = self.validate_same_field( @@ -314,7 +389,7 @@ impl<'s, B: LocationAgnosticBehavior + Sync> ValidateSelectionConflict<'s, B> { ); } } - (Field::ScalarField(l), Field::ScalarField(r)) => { + (NamedSelection::ScalarField(l), NamedSelection::ScalarField(r)) => { if !is_parent_fields_mutually_exclusive() { if let Err(err) = self.validate_same_field( key, @@ -365,7 +440,7 @@ impl<'s, B: LocationAgnosticBehavior + Sync> ValidateSelectionConflict<'s, B> { }, existing_field.loc(), ) - .annotate("the other field", field.loc()), + .annotate("the other field", named_selection.loc()), ); } } @@ -378,7 +453,10 @@ impl<'s, B: LocationAgnosticBehavior + Sync> ValidateSelectionConflict<'s, B> { } } if errors.is_empty() { - fields.entry(key).or_default().push(field.clone()); + named_selections + .entry(key) + .or_default() + .push(named_selection.clone()); Ok(()) } else { Err(errors) @@ -494,32 +572,49 @@ fn has_same_type_reference_wrapping(l: &TypeReference, r: &TypeReference Field<'s> { +enum NameDefinition<'s> { + Field(&'s schema::definitions::Field), + Fragment, +} + +impl<'s> NamedSelection<'s> { fn get_response_key(&self, schema: &SDLSchema) -> StringKey { match self { - Field::LinkedField(f) => f.alias_or_name(schema), - Field::ScalarField(f) => f.alias_or_name(schema), + NamedSelection::LinkedField(f) => f.alias_or_name(schema), + NamedSelection::ScalarField(f) => f.alias_or_name(schema), + NamedSelection::AliasedFragmentSpread(_, alias) => alias.item, + NamedSelection::AliasedInlineFragment(_, alias) => alias.item, } } - fn get_field_definition(&self, schema: &'s SDLSchema) -> &'s schema::definitions::Field { + fn get_field_definition(&self, schema: &'s SDLSchema) -> NameDefinition<'s> { match self { - Field::LinkedField(f) => schema.field(f.definition.item), - Field::ScalarField(f) => schema.field(f.definition.item), + NamedSelection::LinkedField(f) => { + NameDefinition::Field(schema.field(f.definition.item)) + } + NamedSelection::ScalarField(f) => { + NameDefinition::Field(schema.field(f.definition.item)) + } + NamedSelection::AliasedFragmentSpread(_spread, _) => NameDefinition::Fragment, + NamedSelection::AliasedInlineFragment(_fragment, _) => NameDefinition::Fragment, } } fn loc(&self) -> Location { match self { - Field::LinkedField(f) => f.definition.location, - Field::ScalarField(f) => f.definition.location, + NamedSelection::LinkedField(f) => f.alias_or_name_location(), + NamedSelection::ScalarField(f) => f.alias_or_name_location(), + NamedSelection::AliasedFragmentSpread(_, alias) => alias.location, + NamedSelection::AliasedInlineFragment(_, alias) => alias.location, } } fn pointer_address(&self) -> PointerAddress { match self { - Field::LinkedField(f) => PointerAddress::new(&f.definition), - Field::ScalarField(f) => PointerAddress::new(&f.definition), + NamedSelection::LinkedField(f) => PointerAddress::new(&f.definition), + NamedSelection::ScalarField(f) => PointerAddress::new(&f.definition), + NamedSelection::AliasedFragmentSpread(spread, _) => PointerAddress::new(spread), + NamedSelection::AliasedInlineFragment(fragment, _) => PointerAddress::new(fragment), } } } @@ -581,8 +676,24 @@ mod ignoring_type_and_location { } } -#[derive(Clone, Debug, Error, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +#[serde(tag = "type")] enum ValidationMessage { + #[error( + "Fragment alias '{response_key}' is ambiguous. It conflicts with another named selection" + )] + AmbiguousFragmentAlias { response_key: StringKey }, + #[error( "Field '{response_key}' is ambiguous because it references two different fields: '{l_name}' and '{r_name}'" )] @@ -612,12 +723,12 @@ enum ValidationMessage { }, #[error( - "Field '{response_key}' is marked with @stream in one place, and not marked in another place. Please use alias to distinguish the 2 fields.'" + "Field '{response_key}' is marked with @stream in one place, and not marked in another place. Please use an alias to distinguish the two fields." )] StreamConflictOnlyUsedInOnePlace { response_key: StringKey }, #[error( - "Field '{response_key}' is marked with @stream in multiple places. Please use an alias to distinguish them'" + "Field '{response_key}' is marked with @stream in multiple places. Please use an alias to distinguish them." )] StreamConflictUsedInMultiplePlaces { response_key: StringKey }, } diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict.rs b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict.rs new file mode 100644 index 0000000000000..0da7782d6231d --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict.rs @@ -0,0 +1,86 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::DirectiveName; +use common::FeatureFlag; +use common::FeatureFlags; +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use graphql_cli::DiagnosticPrinter; +use graphql_ir::build; +use graphql_ir::node_identifier::LocationAgnosticBehavior; +use graphql_ir::Program; +use graphql_ir_validations::validate_selection_conflict; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_config::ProjectConfig; +use relay_test_schema::get_test_schema_with_located_extensions; +use relay_test_schema::TEST_SCHEMA; + +#[derive(Clone)] +struct LocationAgnosticBehaviorForTestOnly; +impl LocationAgnosticBehavior for LocationAgnosticBehaviorForTestOnly { + fn should_skip_in_node_identifier(_name: DirectiveName) -> bool { + false + } + fn hash_for_name_only(_name: DirectiveName) -> bool { + false + } +} + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let mut schema = TEST_SCHEMA.to_owned(); + let mut source = fixture.content; + let source_location = SourceLocationKey::embedded(fixture.file_name, 0); + if fixture.content.contains("%extensions%") { + let extension_location = SourceLocationKey::embedded(fixture.file_name, 1); + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + if let [base, extensions] = parts.as_slice() { + source = base; + schema = get_test_schema_with_located_extensions(extensions, extension_location); + } else { + panic!("Expected exactly one %extensions% section marker.") + } + } + let ast = parse_executable(source, source_location).unwrap(); + let ir_result = build(&schema, &ast.definitions); + let ir = match ir_result { + Ok(res) => res, + Err(errors) => { + let mut errs = errors + .into_iter() + .map(|err| { + let printer = DiagnosticPrinter::new(|_| { + Some(TextSource::from_whole_document(fixture.content.to_string())) + }); + printer.diagnostic_to_string(&err) + }) + .collect::>(); + errs.sort(); + return Err(errs.join("\n\n")); + } + }; + + let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); + validate_selection_conflict::( + &program, + &ProjectConfig { + feature_flags: Arc::new(FeatureFlags { + enable_fragment_aliases: FeatureFlag::Enabled, + ..Default::default() + }), + ..Default::default() + }, + true, + ) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) +} diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/default_inline_fragment_alias_conflicts_with_field.expected b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/default_inline_fragment_alias_conflicts_with_field.expected new file mode 100644 index 0000000000000..5fb8f494afc7d --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/default_inline_fragment_alias_conflicts_with_field.expected @@ -0,0 +1,27 @@ +==================================== INPUT ==================================== +# expected-to-throw + +query RelayReaderNamedFragmentsTest2Query { + me { + User: name + ... on User @alias { + name + } + } +} +==================================== ERROR ==================================== +✖︎ Fragment alias 'User' is ambiguous. It conflicts with another named selection + + default_inline_fragment_alias_conflicts_with_field.graphql:5:5 + 4 │ me { + 5 │ User: name + │ ^^^^ + 6 │ ... on User @alias { + + ℹ︎ the other field + + default_inline_fragment_alias_conflicts_with_field.graphql:6:17 + 5 │ User: name + 6 │ ... on User @alias { + │ ^^^^^^ + 7 │ name diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/default_inline_fragment_alias_conflicts_with_field.graphql b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/default_inline_fragment_alias_conflicts_with_field.graphql new file mode 100644 index 0000000000000..81758a8db5483 --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/default_inline_fragment_alias_conflicts_with_field.graphql @@ -0,0 +1,10 @@ +# expected-to-throw + +query RelayReaderNamedFragmentsTest2Query { + me { + User: name + ... on User @alias { + name + } + } +} \ No newline at end of file diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/default_named_fragment_alias_conflicts_with_field.expected b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/default_named_fragment_alias_conflicts_with_field.expected new file mode 100644 index 0000000000000..036e8aa66eb60 --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/default_named_fragment_alias_conflicts_with_field.expected @@ -0,0 +1,28 @@ +==================================== INPUT ==================================== +# expected-to-throw +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query { + me { + RelayReaderNamedFragmentsTest_user: name + ...RelayReaderNamedFragmentsTest_user @alias + } +} +==================================== ERROR ==================================== +✖︎ Fragment alias 'RelayReaderNamedFragmentsTest_user' is ambiguous. It conflicts with another named selection + + default_named_fragment_alias_conflicts_with_field.graphql:8:5 + 7 │ me { + 8 │ RelayReaderNamedFragmentsTest_user: name + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 9 │ ...RelayReaderNamedFragmentsTest_user @alias + + ℹ︎ the other field + + default_named_fragment_alias_conflicts_with_field.graphql:9:43 + 8 │ RelayReaderNamedFragmentsTest_user: name + 9 │ ...RelayReaderNamedFragmentsTest_user @alias + │ ^^^^^^ + 10 │ } diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/default_named_fragment_alias_conflicts_with_field.graphql b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/default_named_fragment_alias_conflicts_with_field.graphql new file mode 100644 index 0000000000000..481a5860265ab --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/default_named_fragment_alias_conflicts_with_field.graphql @@ -0,0 +1,11 @@ +# expected-to-throw +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query { + me { + RelayReaderNamedFragmentsTest_user: name + ...RelayReaderNamedFragmentsTest_user @alias + } +} \ No newline at end of file diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/field_names_still_conflict_in_alaised_fragment.expected b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/field_names_still_conflict_in_alaised_fragment.expected new file mode 100644 index 0000000000000..15afe3aabb37b --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/field_names_still_conflict_in_alaised_fragment.expected @@ -0,0 +1,27 @@ +==================================== INPUT ==================================== +# expected-to-throw + +query RelayReaderNamedFragmentsTest2Query { + me { + ... on User @alias(as: "myAlias") { + same_alias: name + } + same_alias: canViewerComment + } +} +==================================== ERROR ==================================== +✖︎ Field 'same_alias' is ambiguous because it references two different fields: 'name' and 'canViewerComment' + + field_names_still_conflict_in_alaised_fragment.graphql:6:19 + 5 │ ... on User @alias(as: "myAlias") { + 6 │ same_alias: name + │ ^^^^ + 7 │ } + + ℹ︎ the other field + + field_names_still_conflict_in_alaised_fragment.graphql:8:17 + 7 │ } + 8 │ same_alias: canViewerComment + │ ^^^^^^^^^^^^^^^^ + 9 │ } diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/field_names_still_conflict_in_alaised_fragment.graphql b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/field_names_still_conflict_in_alaised_fragment.graphql new file mode 100644 index 0000000000000..258c72e60624d --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/field_names_still_conflict_in_alaised_fragment.graphql @@ -0,0 +1,10 @@ +# expected-to-throw + +query RelayReaderNamedFragmentsTest2Query { + me { + ... on User @alias(as: "myAlias") { + same_alias: name + } + same_alias: canViewerComment + } +} \ No newline at end of file diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field.expected b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field.expected new file mode 100644 index 0000000000000..e2aff80c97b8d --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field.expected @@ -0,0 +1,28 @@ +==================================== INPUT ==================================== +# expected-to-throw +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query { + me { + name + ...RelayReaderNamedFragmentsTest_user @alias(as: "name") + } +} +==================================== ERROR ==================================== +✖︎ Fragment alias 'name' is ambiguous. It conflicts with another named selection + + fragment_alias_name_conflicts_with_field.graphql:8:5 + 7 │ me { + 8 │ name + │ ^^^^ + 9 │ ...RelayReaderNamedFragmentsTest_user @alias(as: "name") + + ℹ︎ the other field + + fragment_alias_name_conflicts_with_field.graphql:9:54 + 8 │ name + 9 │ ...RelayReaderNamedFragmentsTest_user @alias(as: "name") + │ ^^^^^^ + 10 │ } diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field.graphql b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field.graphql new file mode 100644 index 0000000000000..dfb1311d11697 --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field.graphql @@ -0,0 +1,11 @@ +# expected-to-throw +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query { + me { + name + ...RelayReaderNamedFragmentsTest_user @alias(as: "name") + } +} \ No newline at end of file diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field_in_inline_fragment.expected b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field_in_inline_fragment.expected new file mode 100644 index 0000000000000..98e79d9d3c4a5 --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field_in_inline_fragment.expected @@ -0,0 +1,30 @@ +==================================== INPUT ==================================== +# expected-to-throw +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query { + me { + ... on User { + name + } + ...RelayReaderNamedFragmentsTest_user @alias(as: "name") + } +} +==================================== ERROR ==================================== +✖︎ Fragment alias 'name' is ambiguous. It conflicts with another named selection + + fragment_alias_name_conflicts_with_field_in_inline_fragment.graphql:9:7 + 8 │ ... on User { + 9 │ name + │ ^^^^ + 10 │ } + + ℹ︎ the other field + + fragment_alias_name_conflicts_with_field_in_inline_fragment.graphql:11:54 + 10 │ } + 11 │ ...RelayReaderNamedFragmentsTest_user @alias(as: "name") + │ ^^^^^^ + 12 │ } diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field_in_inline_fragment.graphql b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field_in_inline_fragment.graphql new file mode 100644 index 0000000000000..88f8ac84a0adf --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field_in_inline_fragment.graphql @@ -0,0 +1,13 @@ +# expected-to-throw +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query { + me { + ... on User { + name + } + ...RelayReaderNamedFragmentsTest_user @alias(as: "name") + } +} \ No newline at end of file diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_other_fragment_alias.expected b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_other_fragment_alias.expected new file mode 100644 index 0000000000000..d2eaed69c1229 --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_other_fragment_alias.expected @@ -0,0 +1,27 @@ +==================================== INPUT ==================================== +# expected-to-throw + +query RelayReaderNamedFragmentsTest2Query { + ... on Query @alias(as: "myAlias") { + __typename + } + ... on Query @alias(as: "myAlias") { + __typename + } +} +==================================== ERROR ==================================== +✖︎ Fragment alias 'myAlias' is ambiguous. It conflicts with another named selection + + fragment_alias_name_conflicts_with_other_fragment_alias.graphql:4:27 + 3 │ query RelayReaderNamedFragmentsTest2Query { + 4 │ ... on Query @alias(as: "myAlias") { + │ ^^^^^^^^^ + 5 │ __typename + + ℹ︎ the other field + + fragment_alias_name_conflicts_with_other_fragment_alias.graphql:7:27 + 6 │ } + 7 │ ... on Query @alias(as: "myAlias") { + │ ^^^^^^^^^ + 8 │ __typename diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_other_fragment_alias.graphql b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_other_fragment_alias.graphql new file mode 100644 index 0000000000000..01727db983b39 --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_other_fragment_alias.graphql @@ -0,0 +1,10 @@ +# expected-to-throw + +query RelayReaderNamedFragmentsTest2Query { + ... on Query @alias(as: "myAlias") { + __typename + } + ... on Query @alias(as: "myAlias") { + __typename + } +} \ No newline at end of file diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/relay_resolver_value.expected b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/relay_resolver_value.expected new file mode 100644 index 0000000000000..3d32f455776b8 --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/relay_resolver_value.expected @@ -0,0 +1,27 @@ +==================================== INPUT ==================================== +fragment RelayModelInstanceOnConcreteTypesOfInterface on IAnimal { + ... on RedFish { + __relay_model_instance + } + ... on BlueFish { + __relay_model_instance + } +} + +# %extensions% + +interface IAnimal { + color: String +} + +type RedFish implements IAnimal @__RelayResolverModel { + color: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "RedFishResolver", fragment_name: "RedFish__id", inject_fragment_data: "id") +} + +type BlueFish implements IAnimal @__RelayResolverModel { + color: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "BlueFishResolver", fragment_name: "BlueFish__id", inject_fragment_data: "id") +} +==================================== OUTPUT =================================== +OK diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/relay_resolver_value.graphql b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/relay_resolver_value.graphql new file mode 100644 index 0000000000000..eb60e5fd10164 --- /dev/null +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/fixtures/relay_resolver_value.graphql @@ -0,0 +1,24 @@ +fragment RelayModelInstanceOnConcreteTypesOfInterface on IAnimal { + ... on RedFish { + __relay_model_instance + } + ... on BlueFish { + __relay_model_instance + } +} + +# %extensions% + +interface IAnimal { + color: String +} + +type RedFish implements IAnimal @__RelayResolverModel { + color: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "RedFishResolver", fragment_name: "RedFish__id", inject_fragment_data: "id") +} + +type BlueFish implements IAnimal @__RelayResolverModel { + color: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "BlueFishResolver", fragment_name: "BlueFish__id", inject_fragment_data: "id") +} diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/mod.rs b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/mod.rs deleted file mode 100644 index b085d608726b4..0000000000000 --- a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict/mod.rs +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::DirectiveName; -use common::SourceLocationKey; -use common::TextSource; -use fixture_tests::Fixture; -use graphql_cli::DiagnosticPrinter; -use graphql_ir::build; -use graphql_ir::node_identifier::LocationAgnosticBehavior; -use graphql_ir::Program; -use graphql_ir_validations::validate_selection_conflict; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::TEST_SCHEMA; - -#[derive(Clone)] -struct LocationAgnosticBehaviorForTestOnly; -impl LocationAgnosticBehavior for LocationAgnosticBehaviorForTestOnly { - fn should_skip_in_node_identifier(_name: DirectiveName) -> bool { - false - } - fn hash_for_name_only(_name: DirectiveName) -> bool { - false - } -} - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir_result = build(&TEST_SCHEMA, &ast.definitions); - let ir = match ir_result { - Ok(res) => res, - Err(errors) => { - let mut errs = errors - .into_iter() - .map(|err| { - let printer = DiagnosticPrinter::new(|_| { - Some(TextSource::from_whole_document(fixture.content.to_string())) - }); - printer.diagnostic_to_string(&err) - }) - .collect::>(); - errs.sort(); - return Err(errs.join("\n\n")); - } - }; - - let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); - validate_selection_conflict::(&program, true) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) -} diff --git a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict_test.rs b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict_test.rs index 3b81bda581464..f4437f8235758 100644 --- a/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict_test.rs +++ b/compiler/crates/graphql-ir-validations/tests/validate_selection_conflict_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<82816cf559b0847f27b559ede51c9022>> + * @generated SignedSource<<16462ea52607752fbd8299818d84ad35>> */ mod validate_selection_conflict; @@ -12,51 +12,100 @@ mod validate_selection_conflict; use validate_selection_conflict::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn same_alias_list_non_list() { +#[tokio::test] +async fn default_inline_fragment_alias_conflicts_with_field() { + let input = include_str!("validate_selection_conflict/fixtures/default_inline_fragment_alias_conflicts_with_field.graphql"); + let expected = include_str!("validate_selection_conflict/fixtures/default_inline_fragment_alias_conflicts_with_field.expected"); + test_fixture(transform_fixture, file!(), "default_inline_fragment_alias_conflicts_with_field.graphql", "validate_selection_conflict/fixtures/default_inline_fragment_alias_conflicts_with_field.expected", input, expected).await; +} + +#[tokio::test] +async fn default_named_fragment_alias_conflicts_with_field() { + let input = include_str!("validate_selection_conflict/fixtures/default_named_fragment_alias_conflicts_with_field.graphql"); + let expected = include_str!("validate_selection_conflict/fixtures/default_named_fragment_alias_conflicts_with_field.expected"); + test_fixture(transform_fixture, file!(), "default_named_fragment_alias_conflicts_with_field.graphql", "validate_selection_conflict/fixtures/default_named_fragment_alias_conflicts_with_field.expected", input, expected).await; +} + +#[tokio::test] +async fn field_names_still_conflict_in_alaised_fragment() { + let input = include_str!("validate_selection_conflict/fixtures/field_names_still_conflict_in_alaised_fragment.graphql"); + let expected = include_str!("validate_selection_conflict/fixtures/field_names_still_conflict_in_alaised_fragment.expected"); + test_fixture(transform_fixture, file!(), "field_names_still_conflict_in_alaised_fragment.graphql", "validate_selection_conflict/fixtures/field_names_still_conflict_in_alaised_fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_alias_name_conflicts_with_field() { + let input = include_str!("validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field.graphql"); + let expected = include_str!("validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field.expected"); + test_fixture(transform_fixture, file!(), "fragment_alias_name_conflicts_with_field.graphql", "validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_alias_name_conflicts_with_field_in_inline_fragment() { + let input = include_str!("validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field_in_inline_fragment.graphql"); + let expected = include_str!("validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field_in_inline_fragment.expected"); + test_fixture(transform_fixture, file!(), "fragment_alias_name_conflicts_with_field_in_inline_fragment.graphql", "validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_field_in_inline_fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_alias_name_conflicts_with_other_fragment_alias() { + let input = include_str!("validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_other_fragment_alias.graphql"); + let expected = include_str!("validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_other_fragment_alias.expected"); + test_fixture(transform_fixture, file!(), "fragment_alias_name_conflicts_with_other_fragment_alias.graphql", "validate_selection_conflict/fixtures/fragment_alias_name_conflicts_with_other_fragment_alias.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_value() { + let input = include_str!("validate_selection_conflict/fixtures/relay_resolver_value.graphql"); + let expected = include_str!("validate_selection_conflict/fixtures/relay_resolver_value.expected"); + test_fixture(transform_fixture, file!(), "relay_resolver_value.graphql", "validate_selection_conflict/fixtures/relay_resolver_value.expected", input, expected).await; +} + +#[tokio::test] +async fn same_alias_list_non_list() { let input = include_str!("validate_selection_conflict/fixtures/same-alias-list-non-list.graphql"); let expected = include_str!("validate_selection_conflict/fixtures/same-alias-list-non-list.expected"); - test_fixture(transform_fixture, "same-alias-list-non-list.graphql", "validate_selection_conflict/fixtures/same-alias-list-non-list.expected", input, expected); + test_fixture(transform_fixture, file!(), "same-alias-list-non-list.graphql", "validate_selection_conflict/fixtures/same-alias-list-non-list.expected", input, expected).await; } -#[test] -fn same_alias_nested() { +#[tokio::test] +async fn same_alias_nested() { let input = include_str!("validate_selection_conflict/fixtures/same-alias-nested.graphql"); let expected = include_str!("validate_selection_conflict/fixtures/same-alias-nested.expected"); - test_fixture(transform_fixture, "same-alias-nested.graphql", "validate_selection_conflict/fixtures/same-alias-nested.expected", input, expected); + test_fixture(transform_fixture, file!(), "same-alias-nested.graphql", "validate_selection_conflict/fixtures/same-alias-nested.expected", input, expected).await; } -#[test] -fn same_alias_nested_mutually_exclusive() { +#[tokio::test] +async fn same_alias_nested_mutually_exclusive() { let input = include_str!("validate_selection_conflict/fixtures/same-alias-nested-mutually-exclusive.graphql"); let expected = include_str!("validate_selection_conflict/fixtures/same-alias-nested-mutually-exclusive.expected"); - test_fixture(transform_fixture, "same-alias-nested-mutually-exclusive.graphql", "validate_selection_conflict/fixtures/same-alias-nested-mutually-exclusive.expected", input, expected); + test_fixture(transform_fixture, file!(), "same-alias-nested-mutually-exclusive.graphql", "validate_selection_conflict/fixtures/same-alias-nested-mutually-exclusive.expected", input, expected).await; } -#[test] -fn same_alias_on_different_types() { +#[tokio::test] +async fn same_alias_on_different_types() { let input = include_str!("validate_selection_conflict/fixtures/same-alias-on-different-types.graphql"); let expected = include_str!("validate_selection_conflict/fixtures/same-alias-on-different-types.expected"); - test_fixture(transform_fixture, "same-alias-on-different-types.graphql", "validate_selection_conflict/fixtures/same-alias-on-different-types.expected", input, expected); + test_fixture(transform_fixture, file!(), "same-alias-on-different-types.graphql", "validate_selection_conflict/fixtures/same-alias-on-different-types.expected", input, expected).await; } -#[test] -fn same_alias_on_different_types_inline_fragments() { +#[tokio::test] +async fn same_alias_on_different_types_inline_fragments() { let input = include_str!("validate_selection_conflict/fixtures/same-alias-on-different-types-inline-fragments.graphql"); let expected = include_str!("validate_selection_conflict/fixtures/same-alias-on-different-types-inline-fragments.expected"); - test_fixture(transform_fixture, "same-alias-on-different-types-inline-fragments.graphql", "validate_selection_conflict/fixtures/same-alias-on-different-types-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "same-alias-on-different-types-inline-fragments.graphql", "validate_selection_conflict/fixtures/same-alias-on-different-types-inline-fragments.expected", input, expected).await; } -#[test] -fn same_alias_under_different_inline_fragments() { +#[tokio::test] +async fn same_alias_under_different_inline_fragments() { let input = include_str!("validate_selection_conflict/fixtures/same-alias-under-different-inline-fragments.graphql"); let expected = include_str!("validate_selection_conflict/fixtures/same-alias-under-different-inline-fragments.expected"); - test_fixture(transform_fixture, "same-alias-under-different-inline-fragments.graphql", "validate_selection_conflict/fixtures/same-alias-under-different-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "same-alias-under-different-inline-fragments.graphql", "validate_selection_conflict/fixtures/same-alias-under-different-inline-fragments.expected", input, expected).await; } -#[test] -fn same_alias_under_duplicated_linked_field() { +#[tokio::test] +async fn same_alias_under_duplicated_linked_field() { let input = include_str!("validate_selection_conflict/fixtures/same-alias-under-duplicated-linked-field.graphql"); let expected = include_str!("validate_selection_conflict/fixtures/same-alias-under-duplicated-linked-field.expected"); - test_fixture(transform_fixture, "same-alias-under-duplicated-linked-field.graphql", "validate_selection_conflict/fixtures/same-alias-under-duplicated-linked-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "same-alias-under-duplicated-linked-field.graphql", "validate_selection_conflict/fixtures/same-alias-under-duplicated-linked-field.expected", input, expected).await; } diff --git a/compiler/crates/graphql-ir/Cargo.toml b/compiler/crates/graphql-ir/Cargo.toml index ba720c3e95145..8c93b16851ceb 100644 --- a/compiler/crates/graphql-ir/Cargo.toml +++ b/compiler/crates/graphql-ir/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/graphql-ir:[graphql-ir,graphql-ir_test,graphql-ir_test_with_extensions,graphql-ir_test_with_provider] + [package] name = "graphql-ir" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -23,15 +25,16 @@ common = { path = "../common" } errors = { path = "../errors" } fnv = "1.0" graphql-syntax = { path = "../graphql-syntax" } -indexmap = { version = "1.9.2", features = ["arbitrary", "rayon", "serde-1"] } +indexmap = { version = "2.2.6", features = ["arbitrary", "rayon", "serde"] } intern = { path = "../intern" } lazy_static = "1.4" once_cell = "1.12" schema = { path = "../schema" } -serde = { version = "1.0.136", features = ["derive", "rc"] } -thiserror = "1.0.36" +serde = { version = "1.0.185", features = ["derive", "rc"] } +thiserror = "1.0.49" [dev-dependencies] fixture-tests = { path = "../fixture-tests" } graphql-cli = { path = "../graphql-cli" } relay-test-schema = { path = "../relay-test-schema" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/graphql-ir/src/associated_data.rs b/compiler/crates/graphql-ir/src/associated_data.rs index a7f77ced3678c..7392b3c6a0a5f 100644 --- a/compiler/crates/graphql-ir/src/associated_data.rs +++ b/compiler/crates/graphql-ir/src/associated_data.rs @@ -101,7 +101,13 @@ macro_rules! associated_data_impl { #[allow(dead_code)] pub fn find(directives: &[$crate::Directive]) -> Option<&Self> { use $crate::reexport::NamedItem; - directives.named(Self::directive_name()).map(|directive| { + directives + .named(Self::directive_name()) + .map(|directive| $name::from(directive).unwrap()) + } + + pub fn from(directive: &$crate::Directive) -> Option<&Self> { + Some( directive .data .as_ref() @@ -115,8 +121,8 @@ macro_rules! associated_data_impl { "data on @__", stringify!($name), " directive not of right type" - )) - }) + )), + ) } } }; @@ -138,7 +144,6 @@ impl AsAny for T { mod tests { use std::collections::hash_map::RandomState; use std::hash::BuildHasher; - use std::hash::Hasher; use once_cell::sync::Lazy; @@ -160,9 +165,7 @@ mod tests { static BUILD_HASHER: Lazy = Lazy::new(RandomState::new); fn hash(x: T) -> u64 { - let mut hasher = BUILD_HASHER.build_hasher(); - x.hash(&mut hasher); - hasher.finish() + BUILD_HASHER.hash_one(&x) } assert_eq!( diff --git a/compiler/crates/graphql-ir/src/build.rs b/compiler/crates/graphql-ir/src/build.rs index 37d1e92983a92..f9b2b501f14b4 100644 --- a/compiler/crates/graphql-ir/src/build.rs +++ b/compiler/crates/graphql-ir/src/build.rs @@ -12,14 +12,17 @@ use common::ArgumentName; use common::Diagnostic; use common::DiagnosticsResult; use common::DirectiveName; +use common::FeatureFlags; use common::Location; use common::NamedItem; +use common::ScalarName; use common::Span; use common::WithLocation; use errors::par_try_map; use errors::try2; use errors::try3; use errors::try_map; +use graphql_syntax::DefaultValue; use graphql_syntax::DirectiveLocation; use graphql_syntax::Identifier; use graphql_syntax::List; @@ -101,6 +104,10 @@ pub struct BuilderOptions { /// but operations without name are valid, and can be executed on a server. /// This option allows `build_ir` to use a default name for anonymous operations. pub default_anonymous_operation_name: Option, + + /// Whether scalar literals can be assigned to variables/arguments whose types are + /// custom scalars (declared in a schema extension). + pub allow_custom_scalar_literals: bool, } /// Converts a self-contained corpus of definitions into typed IR, or returns @@ -109,12 +116,14 @@ pub struct BuilderOptions { pub fn build_ir_in_relay_mode( schema: &SDLSchema, definitions: &[graphql_syntax::ExecutableDefinition], + feature_flags: &FeatureFlags, ) -> DiagnosticsResult> { let builder_options = BuilderOptions { allow_undefined_fragment_spreads: false, fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, relay_mode: Some(RelayMode), default_anonymous_operation_name: None, + allow_custom_scalar_literals: !feature_flags.enable_strict_custom_scalars, }; build_ir_with_extra_features(schema, definitions, &builder_options) @@ -132,6 +141,7 @@ pub fn build_ir( fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, relay_mode: None, default_anonymous_operation_name: None, + allow_custom_scalar_literals: true, // for compatibility }, ) } @@ -166,6 +176,7 @@ pub fn build_type_annotation( fragment_variables_semantic: FragmentVariablesSemantic::Disabled, relay_mode: None, default_anonymous_operation_name: None, + allow_custom_scalar_literals: true, // for compatibility }, ); builder.build_type_annotation(annotation) @@ -187,6 +198,7 @@ pub fn build_directive( fragment_variables_semantic: FragmentVariablesSemantic::Disabled, relay_mode: None, default_anonymous_operation_name: None, + allow_custom_scalar_literals: true, // for compatibility }, ); builder.build_directive(directive, directive_location) @@ -209,6 +221,7 @@ pub fn build_constant_value( fragment_variables_semantic: FragmentVariablesSemantic::Disabled, relay_mode: None, default_anonymous_operation_name: None, + allow_custom_scalar_literals: true, // for compatibility }, ); builder.build_constant_value(value, type_, validation) @@ -229,6 +242,7 @@ pub fn build_variable_definitions( fragment_variables_semantic: FragmentVariablesSemantic::Disabled, relay_mode: None, default_anonymous_operation_name: None, + allow_custom_scalar_literals: true, // for compatibility }, ); builder.build_variable_definitions(definitions) @@ -332,7 +346,7 @@ impl<'schema, 'signatures, 'options> Builder<'schema, 'signatures, 'options> { operation: &graphql_syntax::OperationDefinition, ) -> DiagnosticsResult { match &operation.name { - Some(name) => Ok(name.clone()), + Some(name) => Ok(*name), None => { if let Some(name) = self.options.default_anonymous_operation_name { Ok(Identifier { @@ -361,8 +375,7 @@ impl<'schema, 'signatures, 'options> Builder<'schema, 'signatures, 'options> { let kind = operation .operation .as_ref() - .map(|x| x.1) - .unwrap_or_else(|| OperationKind::Query); + .map_or_else(|| OperationKind::Query, |x| x.1); let operation_type = match kind { OperationKind::Mutation => self.schema.mutation_type(), OperationKind::Query => self.schema.query_type(), @@ -466,11 +479,7 @@ impl<'schema, 'signatures, 'options> Builder<'schema, 'signatures, 'options> { ) -> DiagnosticsResult { let type_ = self.build_type_annotation_for_input(&definition.type_)?; let default_value = match &definition.default_value { - Some(default_value) => Some(WithLocation::from_span( - self.location.source_location(), - default_value.span, - self.build_constant_value(&default_value.value, &type_, ValidationLevel::Strict)?, - )), + Some(default_value) => Some(self.build_variable_default_value(default_value, &type_)?), None => None, }; let directives = self.build_directives( @@ -489,6 +498,20 @@ impl<'schema, 'signatures, 'options> Builder<'schema, 'signatures, 'options> { }) } + fn build_variable_default_value( + &mut self, + default_value: &DefaultValue, + type_: &TypeReference, + ) -> DiagnosticsResult> { + let default_constant_value = + self.build_constant_value(&default_value.value, type_, ValidationLevel::Strict)?; + Ok(WithLocation::from_span( + self.location.source_location(), + default_value.span, + default_constant_value, + )) + } + fn build_type_annotation( &mut self, annotation: &graphql_syntax::TypeAnnotation, @@ -1212,7 +1235,7 @@ impl<'schema, 'signatures, 'options> Builder<'schema, 'signatures, 'options> { None => { let possible_argument_names = argument_definitions .iter() - .map(|arg_def| arg_def.name.0) + .map(|arg_def| arg_def.name.item.0) .collect::>(); let suggestions = suggestion_list::suggestion_list( argument.name.value, @@ -1244,9 +1267,9 @@ impl<'schema, 'signatures, 'options> Builder<'schema, 'signatures, 'options> { arguments .iter() .flat_map(|args| &args.items) - .all(|arg| arg.name.value != required_arg_def.name.0) + .all(|arg| arg.name.value != required_arg_def.name.item.0) }) - .map(|missing_arg| missing_arg.name.0) + .map(|missing_arg| missing_arg.name.item.0) .filter(is_non_nullable_field_required) .collect::>(); if !missing_arg_names.is_empty() { @@ -1545,8 +1568,8 @@ impl<'schema, 'signatures, 'options> Builder<'schema, 'signatures, 'options> { let mut required_fields = type_definition .fields .iter() - .filter(|x| x.type_.is_non_null()) - .map(|x| x.name.0) + .filter(|x| x.type_.is_non_null() && x.default_value.is_none()) + .map(|x| x.name.item.0) .collect::(); let fields = object @@ -1638,12 +1661,19 @@ impl<'schema, 'signatures, 'options> Builder<'schema, 'signatures, 'options> { match type_.nullable_type() { TypeReference::List(item_type) => match value { graphql_syntax::ConstantValue::List(list) => { - let items: DiagnosticsResult> = list - .items - .iter() - .map(|x| self.build_constant_value(x, item_type, enum_validation)) - .collect(); - Ok(ConstantValue::List(items?)) + let mut items = vec![]; + let mut errors = vec![]; + for item in list.items.iter() { + match self.build_constant_value(item, item_type, enum_validation) { + Ok(v) => items.push(v), + Err(diagnostics) => errors.extend(diagnostics), + } + } + if !errors.is_empty() { + Err(errors) + } else { + Ok(ConstantValue::List(items)) + } } _ => { // List Input Coercion: @@ -1689,39 +1719,41 @@ impl<'schema, 'signatures, 'options> Builder<'schema, 'signatures, 'options> { let mut required_fields = type_definition .fields .iter() - .filter(|x| x.type_.is_non_null()) - .map(|x| x.name.0) + .filter(|x| x.type_.is_non_null() && x.default_value.is_none()) + .map(|x| x.name.item.0) .collect::(); - let fields = object - .items - .iter() - .map( - |x| match type_definition.fields.named(ArgumentName(x.name.value)) { - Some(field_definition) => { - required_fields.remove(&x.name.value); - let prev_span = seen_fields.insert(x.name.value, x.name.span); - if let Some(prev_span) = prev_span { - return Err(vec![ - Diagnostic::error( - ValidationMessage::DuplicateInputField(x.name.value), - self.location.with_span(prev_span), - ) - .annotate( - "also defined here", - self.location.with_span(x.name.span), - ), - ]); - }; - - let value_span = x.value.span(); - let value = self.build_constant_value( - &x.value, - &field_definition.type_, - validation, - )?; - Ok(ConstantArgument { - name: x + let mut errors = vec![]; + let mut fields = vec![]; + for obj_entry in object.items.iter() { + match type_definition + .fields + .named(ArgumentName(obj_entry.name.value)) + { + Some(field_definition) => { + required_fields.remove(&obj_entry.name.value); + let prev_span = seen_fields.insert(obj_entry.name.value, obj_entry.name.span); + if let Some(prev_span) = prev_span { + return Err(vec![ + Diagnostic::error( + ValidationMessage::DuplicateInputField(obj_entry.name.value), + self.location.with_span(prev_span), + ) + .annotate( + "also defined here", + self.location.with_span(obj_entry.name.span), + ), + ]); + }; + + let value_span = obj_entry.value.span(); + match self.build_constant_value( + &obj_entry.value, + &field_definition.type_, + validation, + ) { + Ok(value) => fields.push(ConstantArgument { + name: obj_entry .name .name_with_location(self.location.source_location()) .map(ArgumentName), @@ -1730,31 +1762,36 @@ impl<'schema, 'signatures, 'options> Builder<'schema, 'signatures, 'options> { value_span, value, ), - }) + }), + Err(diagnostics) => errors.extend(diagnostics), } - None => Err(vec![Diagnostic::error( - ValidationMessageWithData::UnknownField { - type_: type_definition.name.item.0, - field: x.name.value, - suggestions: self.suggestions.field_name_suggestion( - self.schema.get_type(type_definition.name.item.0), - x.name.value, - ), - }, - self.location.with_span(x.name.span), - )]), - }, - ) - .collect::>>()?; - if required_fields.is_empty() { - Ok(ConstantValue::Object(fields)) - } else { + } + None => errors.push(Diagnostic::error( + ValidationMessageWithData::UnknownField { + type_: type_definition.name.item.0, + field: obj_entry.name.value, + suggestions: self.suggestions.field_name_suggestion( + self.schema.get_type(type_definition.name.item.0), + obj_entry.name.value, + ), + }, + self.location.with_span(obj_entry.name.span), + )), + } + } + if !required_fields.is_empty() { let mut missing: Vec = required_fields.into_iter().collect(); missing.sort(); - Err(vec![Diagnostic::error( + errors.push(Diagnostic::error( ValidationMessage::MissingRequiredFields(missing, type_definition.name.item.0), self.location.with_span(object.span), - )]) + )); + } + + if !errors.is_empty() { + Err(errors) + } else { + Ok(ConstantValue::Object(fields)) } } @@ -1843,46 +1880,101 @@ impl<'schema, 'signatures, 'options> Builder<'schema, 'signatures, 'options> { self.location.with_span(value.span()), )]), }, - _ => match value { - graphql_syntax::ConstantValue::Null(_) => Ok(ConstantValue::Null()), - graphql_syntax::ConstantValue::Int(node) => Ok(ConstantValue::Int(node.value)), - graphql_syntax::ConstantValue::Float(node) => Ok(ConstantValue::Float(node.value)), - graphql_syntax::ConstantValue::Boolean(node) => { - Ok(ConstantValue::Boolean(node.value)) - } - graphql_syntax::ConstantValue::String(node) => { - Ok(ConstantValue::String(node.value)) - } - graphql_syntax::ConstantValue::List(node) => { - let mut list_items = Vec::with_capacity(node.items.capacity()); - for item in node.items.iter() { - list_items.push(self.build_constant_scalar(item, type_definition)?) + _ => { + // if we're here, the type is considered a "custom" scalar + let constant_value = match value { + graphql_syntax::ConstantValue::Null(_) => Ok(ConstantValue::Null()), + graphql_syntax::ConstantValue::Int(node) => Ok(ConstantValue::Int(node.value)), + graphql_syntax::ConstantValue::Float(node) => { + Ok(ConstantValue::Float(node.value)) } - Ok(ConstantValue::List(list_items)) - } - graphql_syntax::ConstantValue::Object(node) => { - let mut object_props = Vec::with_capacity(node.items.capacity()); - for item in node.items.iter() { - object_props.push(ConstantArgument { - name: WithLocation { - location: self.location.with_span(item.span), - item: ArgumentName(item.name.value), - }, - value: WithLocation { - location: self.location.with_span(item.value.span()), - item: self.build_constant_scalar(&item.value, type_definition)?, - }, - }) + graphql_syntax::ConstantValue::Boolean(node) => { + Ok(ConstantValue::Boolean(node.value)) + } + graphql_syntax::ConstantValue::String(node) => { + Ok(ConstantValue::String(node.value)) + } + graphql_syntax::ConstantValue::List(node) => { + self.ensure_custom_scalars_allowed( + type_definition.name.item, + "list", + value.span(), + )?; + let mut list_items = Vec::with_capacity(node.items.capacity()); + for item in node.items.iter() { + list_items.push(self.build_constant_scalar(item, type_definition)?) + } + Ok(ConstantValue::List(list_items)) + } + graphql_syntax::ConstantValue::Object(node) => { + self.ensure_custom_scalars_allowed( + type_definition.name.item, + "object", + value.span(), + )?; + let mut object_props = Vec::with_capacity(node.items.capacity()); + for item in node.items.iter() { + object_props.push(ConstantArgument { + name: WithLocation { + location: self.location.with_span(item.span), + item: ArgumentName(item.name.value), + }, + value: WithLocation { + location: self.location.with_span(item.value.span()), + item: self + .build_constant_scalar(&item.value, type_definition)?, + }, + }) + } + Ok(ConstantValue::Object(object_props)) + } + graphql_syntax::ConstantValue::Enum(_) => { + self.ensure_custom_scalars_allowed( + type_definition.name.item, + "enum", + value.span(), + )?; + Err(vec![Diagnostic::error( + ValidationMessage::UnsupportedCustomScalarType( + type_definition.name.item.0, + ), + self.location.with_span(value.span()), + )]) } - Ok(ConstantValue::Object(object_props)) + }?; + if !self.options.allow_custom_scalar_literals { + return Err(vec![Diagnostic::error( + ValidationMessage::UnexpectedCustomScalarLiteral { + literal_value: format!("{}", value), + scalar_type_name: type_definition.name.item, + }, + self.location.with_span(value.span()), + )]); } - graphql_syntax::ConstantValue::Enum(_) => Err(vec![Diagnostic::error( - ValidationMessage::UnsupportedCustomScalarType(type_definition.name.item.0), - self.location.with_span(value.span()), - )]), - }, + Ok(constant_value) + } } } + + fn ensure_custom_scalars_allowed( + &self, + scalar_type_name: ScalarName, + literal_kind: &str, + value_span: Span, + ) -> DiagnosticsResult<()> { + if !self.options.allow_custom_scalar_literals { + Err(vec![Diagnostic::error( + ValidationMessage::UnexpectedNonScalarLiteralForCustomScalar { + literal_kind: literal_kind.to_string(), + scalar_type_name, + }, + self.location.with_span(value_span), + )]) + } else { + Ok(()) + } + } + fn lookup_field( &self, parent_type: Type, diff --git a/compiler/crates/graphql-ir/src/errors.rs b/compiler/crates/graphql-ir/src/errors.rs index 19013db94da3b..ba9eb38a3164f 100644 --- a/compiler/crates/graphql-ir/src/errors.rs +++ b/compiler/crates/graphql-ir/src/errors.rs @@ -10,6 +10,7 @@ use std::fmt::Display; use common::ArgumentName; use common::DiagnosticDisplay; use common::DirectiveName; +use common::ScalarName; use common::WithDiagnosticData; use graphql_syntax::OperationKind; use intern::string_key::StringKey; @@ -32,7 +33,18 @@ impl Display for ErrorLink { } /// Fixed set of validation errors with custom display messages -#[derive(Clone, Debug, Error, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +#[serde(tag = "type", content = "args")] pub enum ValidationMessage { #[error("Duplicate definitions for '{0}'")] DuplicateDefinition(StringKey), @@ -220,7 +232,7 @@ pub enum ValidationMessage { }, #[error( - "Expected field '{connection_field_name}' to have a '{first_arg}' or '{last_arg}' argument." + "Expected field '{connection_field_name}' to be passed a '{first_arg}' or '{last_arg}' argument." )] ExpectedConnectionToHaveCountArgs { connection_field_name: StringKey, @@ -228,7 +240,9 @@ pub enum ValidationMessage { last_arg: ArgumentName, }, - #[error("Expected '{connection_field_name}' to have a '{edges_selection_name}' selection.")] + #[error( + "Expected '{connection_field_name}' to be passed a '{edges_selection_name}' selection." + )] ExpectedConnectionToHaveEdgesSelection { connection_field_name: StringKey, edges_selection_name: StringKey, @@ -334,6 +348,16 @@ pub enum ValidationMessage { filters_arg_name: ArgumentName, }, + #[error( + "Expected the `{filters_arg_name}` argument to `@{connection_directive_name}` to be a list of argument names to the connection field to use to identify the connection, got `{invalid_name}`. Not specifying `filters` is often recommended and will use all fields." + )] + InvalidConnectionFiltersArgNotAnArgument { + connection_directive_name: DirectiveName, + connection_field_name: StringKey, + filters_arg_name: ArgumentName, + invalid_name: StringKey, + }, + #[error("@stream_connection does not support aliasing the '{field_name}' field.")] UnsupportedAliasingInStreamConnection { field_name: StringKey }, @@ -487,9 +511,64 @@ pub enum ValidationMessage { #[error("No fields can have an alias that start with two underscores.")] NoDoubleUnderscoreAlias, + + #[error( + "Unexpected scalar literal `{literal_value}` provided in a position expecting custom scalar type `{scalar_type_name}`. This value should come from a variable." + )] + UnexpectedCustomScalarLiteral { + literal_value: String, + scalar_type_name: ScalarName, + }, + + #[error( + "Unexpected {literal_kind} literal provided in a position expecting custom scalar type `{scalar_type_name}`." + )] + UnexpectedNonScalarLiteralForCustomScalar { + literal_kind: String, + scalar_type_name: ScalarName, + }, + + #[error( + "Unexpected `@required(action: THROW)` directive in mutation response. The use of `@required(action: THROW)` is not supported in mutations." + )] + RequiredInMutation, + + #[error( + "Unexpected `@throwOnFieldError` directive. The `@throwOnFieldError` directive is not supported unless experimental_emit_semantic_nullability_types is enabled." + )] + ThrowOnFieldErrorNotEnabled, + + #[error( + "Unexpected `@RelayResolver` field referenced in mutation response. Relay Resolver fields may not be read as part of a mutation response." + )] + ResolverInMutation, + + #[error("Expected the `as` argument of the @alias directive to be a static string.")] + FragmentAliasDirectiveDynamicNameArg, + + #[error( + "Unexpected empty string supplied for `as` argument of the @alias directive. If provided, the `as` argument of the `@alias` directive must be a non-empty string literal." + )] + FragmentAliasIsEmptyString, + + #[error( + "Missing required argument `as`. The `as` argument of the @alias directive is required on inline fragments without a type condition." + )] + FragmentAliasDirectiveMissingAs, } -#[derive(Clone, Debug, Error, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +#[serde(tag = "type")] pub enum ValidationMessageWithData { #[error("Unknown type '{type_name}'.{suggestions}", suggestions = did_you_mean(suggestions))] UnknownType { @@ -523,6 +602,11 @@ pub enum ValidationMessageWithData { argument_name: StringKey, suggestions: Vec, }, + + #[error( + "The directive `@dangerously_unaliased_fixme` is unsafe and should be replaced with `@alias`." + )] + DeprecatedDangerouslyUnaliasedDirective, } impl WithDiagnosticData for ValidationMessageWithData { @@ -538,6 +622,9 @@ impl WithDiagnosticData for ValidationMessageWithData { ValidationMessageWithData::ExpectedSelectionsOnObjectField { field_name, .. } => { vec![Box::new(format!("{} {{ }}", field_name))] } + ValidationMessageWithData::DeprecatedDangerouslyUnaliasedDirective => { + vec![Box::new("@alias".to_string())] + } } } } diff --git a/compiler/crates/graphql-ir/src/ir.rs b/compiler/crates/graphql-ir/src/ir.rs index 63a0b1e7f4a48..d12cde91a9a13 100644 --- a/compiler/crates/graphql-ir/src/ir.rs +++ b/compiler/crates/graphql-ir/src/ir.rs @@ -14,18 +14,22 @@ use std::hash::Hash; use std::str::FromStr; use std::sync::Arc; +use ::intern::impl_lookup; +use ::intern::intern; +use ::intern::string_key::Intern; +use ::intern::string_key::StringKey; +use ::intern::BuildIdHasher; +use ::intern::Lookup; use common::ArgumentName; +use common::Diagnostic; +use common::DiagnosticsResult; use common::DirectiveName; use common::Location; use common::Named; +use common::NamedItem; use common::WithLocation; use graphql_syntax::FloatValue; use graphql_syntax::OperationKind; -use intern::impl_lookup; -use intern::string_key::Intern; -use intern::string_key::StringKey; -use intern::BuildIdHasher; -use intern::Lookup; use schema::FieldID; use schema::SDLSchema; use schema::Schema; @@ -35,6 +39,7 @@ use serde::Deserialize; use serde::Serialize; use crate::AssociatedData; +use crate::ValidationMessage; // Definitions #[derive(Clone, Debug, Eq, PartialEq)] @@ -202,6 +207,18 @@ impl From for StringKey { } } +impl From for StringKey { + fn from(operation_definition_name: OperationDefinitionName) -> Self { + operation_definition_name.0 + } +} + +impl From for StringKey { + fn from(fragment_definition_name: FragmentDefinitionName) -> Self { + fragment_definition_name.0 + } +} + impl Lookup for ExecutableDefinitionName { fn lookup(self) -> &'static str { match self { @@ -222,7 +239,17 @@ impl ExecutableDefinitionName { } } -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Copy, + Debug, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] pub struct VariableName(pub StringKey); impl Display for VariableName { @@ -380,6 +407,23 @@ pub struct FragmentSpread { pub directives: Vec, } +impl FragmentSpread { + // Get the alias of this fragment spread from the optional `@alias` directive. + // If the `as` argument is not specified, the fragment name is used as the fallback. + pub fn alias(&self) -> DiagnosticsResult>> { + if let Some(directive) = self.directives.named(DirectiveName(intern!("alias"))) { + Ok(alias_arg_as(directive)?.or_else(|| { + Some(WithLocation::new( + directive.name.location, + self.fragment.item.0, + )) + })) + } else { + Ok(None) + } + } +} + /// ... SelectionSet /// ... on Type SelectionSet #[derive(Clone, Debug, Eq, PartialEq)] @@ -390,6 +434,34 @@ pub struct InlineFragment { /// Points to "..." pub spread_location: Location, } + +impl InlineFragment { + /// Get the alias of this inline fragment from the optional `@alias` directive. + /// If the `as` argument is not present, the type condition is used as the fallback. + /// Is is an error to omit the `as` argument if the inline fragment does not + /// have a type condition. + pub fn alias(&self, schema: &SDLSchema) -> DiagnosticsResult>> { + if let Some(directive) = self.directives.named(DirectiveName(intern!("alias"))) { + if let Some(alias) = alias_arg_as(directive)? { + Ok(Some(alias)) + } else { + match self.type_condition { + Some(type_condition) => Ok(Some(WithLocation::new( + directive.name.location, + schema.get_type_name(type_condition), + ))), + None => Err(vec![Diagnostic::error( + ValidationMessage::FragmentAliasDirectiveMissingAs, + directive.name.location, + )]), + } + } + } else { + Ok(None) + } + } +} + pub trait Field { fn alias(&self) -> Option>; fn definition(&self) -> WithLocation; @@ -466,7 +538,7 @@ impl Field for ScalarField { } } -/// https://spec.graphql.org/June2018/#sec--skip +/// #[derive(Clone, Debug, Eq, PartialEq)] pub struct Condition { pub selections: Vec, @@ -566,7 +638,7 @@ pub struct Variable { pub type_: TypeReference, } -/// Name : Value[Const] +/// Name : Value\[Const\] #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct ConstantArgument { pub name: WithLocation, @@ -632,3 +704,26 @@ pub enum ConditionValue { Constant(bool), Variable(Variable), } + +/// Extract the `as` argument from the `@alias` directive +fn alias_arg_as(alias_directive: &Directive) -> DiagnosticsResult>> { + match alias_directive.arguments.named(ArgumentName(intern!("as"))) { + Some(arg) => match arg.value.item { + Value::Constant(ConstantValue::String(alias)) => { + if alias == intern!("") { + Err(vec![Diagnostic::error( + ValidationMessage::FragmentAliasIsEmptyString, + arg.value.location, + )]) + } else { + Ok(Some(WithLocation::new(arg.value.location, alias))) + } + } + _ => Err(vec![Diagnostic::error( + ValidationMessage::FragmentAliasDirectiveDynamicNameArg, + alias_directive.name.location, + )]), + }, + None => Ok(None), + } +} diff --git a/compiler/crates/graphql-ir/src/lib.rs b/compiler/crates/graphql-ir/src/lib.rs index c70d628effea9..0d4945ab0d15e 100644 --- a/compiler/crates/graphql-ir/src/lib.rs +++ b/compiler/crates/graphql-ir/src/lib.rs @@ -39,6 +39,7 @@ pub use signatures::ProvidedVariableMetadata; pub use signatures::UNUSED_LOCAL_VARIABLE_DEPRECATED; pub use transform::transform_list; pub use transform::transform_list_multi; +pub use transform::TransformProgramPipe; pub use transform::Transformed; pub use transform::TransformedMulti; pub use transform::TransformedValue; @@ -47,6 +48,7 @@ pub use validator::Validator; pub use visitor::Visitor; pub use crate::errors::ValidationMessage; +pub use crate::errors::ValidationMessageWithData; /// Re-exported values to be used by the `associated_data_impl!` macro. pub mod reexport { diff --git a/compiler/crates/graphql-ir/src/program.rs b/compiler/crates/graphql-ir/src/program.rs index cf266f0971598..57fcfb208dbfc 100644 --- a/compiler/crates/graphql-ir/src/program.rs +++ b/compiler/crates/graphql-ir/src/program.rs @@ -125,7 +125,7 @@ impl Program { pub fn merge_program( &mut self, other_program: &Self, - removed_definition_names: Option<&[ExecutableDefinitionName]>, + removed_definition_names: Option>, ) { let mut operations: HashMap< OperationDefinitionName, @@ -147,15 +147,14 @@ impl Program { for removed in removed_definition_names { match removed { ExecutableDefinitionName::OperationDefinitionName(name) => { - operations.remove(name); + operations.remove(&name); } ExecutableDefinitionName::FragmentDefinitionName(name) => { - self.fragments.remove(name); + self.fragments.remove(&name); } }; } } - self.operations - .extend(operations.into_iter().map(|(_, op)| op)); + self.operations.extend(operations.into_values()); } } diff --git a/compiler/crates/graphql-ir/src/signatures.rs b/compiler/crates/graphql-ir/src/signatures.rs index 3b866843bf482..b0b3ab927af37 100644 --- a/compiler/crates/graphql-ir/src/signatures.rs +++ b/compiler/crates/graphql-ir/src/signatures.rs @@ -191,7 +191,7 @@ fn build_fragment_signature( }) .or_else(|| { argument_definition_directives - .get(0) + .first() .map(|x| build_fragment_variable_definitions(schema, fragment, x)) }) .unwrap_or_else(|| Ok(Default::default())); diff --git a/compiler/crates/graphql-ir/src/transform.rs b/compiler/crates/graphql-ir/src/transform.rs index fcb9a527b5aad..ce48df44e4e4b 100644 --- a/compiler/crates/graphql-ir/src/transform.rs +++ b/compiler/crates/graphql-ir/src/transform.rs @@ -503,6 +503,75 @@ where } } +#[derive(Debug, Clone)] +pub struct TransformProgramPipe { + initial: Program, + transformed_value: TransformedValue, +} + +impl TransformProgramPipe { + pub fn new(initial: Program) -> Self { + Self { + initial, + transformed_value: TransformedValue::Keep, + } + } + + pub fn pipe(self, transformer: T) -> Self + where + T: Transformer, + { + let mut transformer = transformer; + let initial = self.initial; + let transformed_value = match self.transformed_value { + TransformedValue::Keep => transformer.transform_program(&initial), + TransformedValue::Replace(replacement) => TransformedValue::Replace( + transformer + .transform_program(&replacement) + .replace_or_else(|| replacement), + ), + }; + Self { + initial, + transformed_value, + } + } + + pub fn pipe_option(self, option: Option, get_transformer: F) -> Self + where + T: Transformer, + F: FnOnce(X) -> T, + { + if let Some(x) = option { + self.pipe(get_transformer(x)) + } else { + self + } + } + + pub fn compose(self, other: F) -> Self + where + F: FnOnce(Program) -> Self, + { + let initial = self.initial; + match self.transformed_value { + TransformedValue::Keep => other(initial), + TransformedValue::Replace(replacement) => other(replacement), + } + } + + pub fn collect(self) -> Program { + match self.transformed_value { + TransformedValue::Keep => self.initial, + TransformedValue::Replace(replacement) => replacement, + } + } + + pub fn transformed_value(self) -> TransformedValue { + self.transformed_value + } +} + #[derive(Clone, Debug)] pub enum Transformed { Delete, diff --git a/compiler/crates/graphql-ir/src/validator.rs b/compiler/crates/graphql-ir/src/validator.rs index e5ee464ef5040..762763027e468 100644 --- a/compiler/crates/graphql-ir/src/validator.rs +++ b/compiler/crates/graphql-ir/src/validator.rs @@ -42,6 +42,7 @@ pub trait Validator { fragment: &FragmentDefinition, ) -> DiagnosticsResult<()> { validate!( + self.validate_variable_definitions(&fragment.variable_definitions), self.validate_selections(&fragment.selections), self.validate_directives(&fragment.directives) ) @@ -57,6 +58,7 @@ pub trait Validator { operation: &OperationDefinition, ) -> DiagnosticsResult<()> { validate!( + self.validate_variable_definitions(&operation.variable_definitions), self.validate_directives(&operation.directives), self.validate_selections(&operation.selections) ) @@ -198,6 +200,29 @@ pub trait Validator { self.validate_value(&argument.value.item) } + // Variable Definitions + fn validate_variable_definitions( + &mut self, + variables: &[VariableDefinition], + ) -> DiagnosticsResult<()> { + self.validate_list(variables, Self::validate_variable_definition) + } + + fn validate_variable_definition( + &mut self, + variable_definition: &VariableDefinition, + ) -> DiagnosticsResult<()> { + self.default_validate_variable_definition(variable_definition) + } + + fn default_validate_variable_definition( + &mut self, + variable_definition: &VariableDefinition, + ) -> DiagnosticsResult<()> { + let _ = variable_definition; + Ok(()) + } + // Values fn validate_value(&mut self, value: &Value) -> DiagnosticsResult<()> { self.default_validate_value(value) diff --git a/compiler/crates/graphql-ir/src/visitor.rs b/compiler/crates/graphql-ir/src/visitor.rs index 4799c048afde6..8b0b5f59455fe 100644 --- a/compiler/crates/graphql-ir/src/visitor.rs +++ b/compiler/crates/graphql-ir/src/visitor.rs @@ -26,6 +26,13 @@ pub trait Visitor { } } + fn visit_executable_definition(&mut self, definition: &ExecutableDefinition) { + match definition { + ExecutableDefinition::Operation(operation) => self.visit_operation(operation), + ExecutableDefinition::Fragment(fragment) => self.visit_fragment(fragment), + } + } + // Fragment Definition fn visit_fragment(&mut self, fragment: &FragmentDefinition) { self.default_visit_fragment(fragment) diff --git a/compiler/crates/graphql-ir/tests/parse.rs b/compiler/crates/graphql-ir/tests/parse.rs new file mode 100644 index 0000000000000..99e6ffad128bf --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse.rs @@ -0,0 +1,43 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use fnv::FnvHashMap; +use graphql_cli::DiagnosticPrinter; +use graphql_ir::build; +use graphql_syntax::parse_executable_with_features; +use graphql_syntax::FragmentArgumentSyntaxKind; +use graphql_syntax::ParserFeatures; +use relay_test_schema::TEST_SCHEMA; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let features = ParserFeatures { + fragment_argument_capability: + FragmentArgumentSyntaxKind::SpreadArgumentsAndFragmentVariableDefinitions, + }; + let ast = parse_executable_with_features(fixture.content, source_location, features).unwrap(); + let mut sources = FnvHashMap::default(); + sources.insert(source_location, fixture.content); + + build(&TEST_SCHEMA, &ast.definitions) + .map(|x| format!("{:#?}", x)) + .map_err(|errors| { + errors + .into_iter() + .map(|error| { + let printer = DiagnosticPrinter::new(|_| { + Some(TextSource::from_whole_document(fixture.content.to_string())) + }); + printer.diagnostic_to_string(&error) + }) + .collect::>() + .join("\n\n") + }) +} diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/argument_definitions.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/argument_definitions.expected index 62bb8f4f84610..1650f9ccc3b2c 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/argument_definitions.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/argument_definitions.expected @@ -69,7 +69,7 @@ fragment TestFragment on User }, ], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [ Directive { name: WithLocation { @@ -87,7 +87,7 @@ fragment TestFragment on User alias: None, definition: WithLocation { location: argument_definitions.graphql:161:171, - item: FieldID(517), + item: FieldID(526), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/argument_with_default.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/argument_with_default.expected new file mode 100644 index 0000000000000..054b480c825e9 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/argument_with_default.expected @@ -0,0 +1,103 @@ +==================================== INPUT ==================================== +mutation ArgumentWithDefault { + feedbackUnLike(input: {feedbackId: "123"}) { + feedback { + body { + text + } + } + } +} +==================================== OUTPUT =================================== +[ + Operation( + OperationDefinition { + kind: Mutation, + name: WithLocation { + location: argument_with_default.graphql:9:28, + item: OperationDefinitionName( + "ArgumentWithDefault", + ), + }, + type_: Object(7), + variable_definitions: [], + directives: [], + selections: [ + LinkedField { + alias: None, + definition: WithLocation { + location: argument_with_default.graphql:33:47, + item: FieldID(42), + }, + arguments: [ + Argument { + name: WithLocation { + location: argument_with_default.graphql:48:53, + item: ArgumentName( + "input", + ), + }, + value: WithLocation { + location: argument_with_default.graphql:55:74, + item: Constant( + Object( + [ + ConstantArgument { + name: WithLocation { + location: argument_with_default.graphql:56:66, + item: ArgumentName( + "feedbackId", + ), + }, + value: WithLocation { + location: argument_with_default.graphql:68:73, + item: String( + "123", + ), + }, + }, + ], + ), + ), + }, + }, + ], + directives: [], + selections: [ + LinkedField { + alias: None, + definition: WithLocation { + location: argument_with_default.graphql:82:90, + item: FieldID(169), + }, + arguments: [], + directives: [], + selections: [ + LinkedField { + alias: None, + definition: WithLocation { + location: argument_with_default.graphql:99:103, + item: FieldID(139), + }, + arguments: [], + directives: [], + selections: [ + ScalarField { + alias: None, + definition: WithLocation { + location: argument_with_default.graphql:114:118, + item: FieldID(410), + }, + arguments: [], + directives: [], + }, + ], + }, + ], + }, + ], + }, + ], + }, + ), +] diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/argument_with_default.graphql b/compiler/crates/graphql-ir/tests/parse/fixtures/argument_with_default.graphql new file mode 100644 index 0000000000000..02d185a6567cd --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/argument_with_default.graphql @@ -0,0 +1,9 @@ +mutation ArgumentWithDefault { + feedbackUnLike(input: {feedbackId: "123"}) { + feedback { + body { + text + } + } + } +} \ No newline at end of file diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/directive-generic.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/directive-generic.expected index 0d7771380d05f..cd17934994ec6 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/directive-generic.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/directive-generic.expected @@ -14,14 +14,14 @@ fragment TestFragment on User { }, variable_definitions: [], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ ScalarField { alias: None, definition: WithLocation { location: directive-generic.graphql:34:36, - item: FieldID(460), + item: FieldID(462), }, arguments: [], directives: [ diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/directive-include.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/directive-include.expected index d0fe3cb666eeb..3de8e81daf6fa 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/directive-include.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/directive-include.expected @@ -38,7 +38,7 @@ fragment Foo on User { directives: [], }, ], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ Condition { @@ -47,7 +47,7 @@ fragment Foo on User { alias: None, definition: WithLocation { location: directive-include.graphql:34:36, - item: FieldID(460), + item: FieldID(462), }, arguments: [], directives: [], @@ -75,7 +75,7 @@ fragment Foo on User { selections: [ InlineFragment { type_condition: Some( - Object(69), + Object(70), ), directives: [], selections: [ @@ -83,7 +83,7 @@ fragment Foo on User { alias: None, definition: WithLocation { location: directive-include.graphql:97:106, - item: FieldID(457), + item: FieldID(459), }, arguments: [], directives: [], @@ -154,14 +154,14 @@ fragment Foo on User { }, variable_definitions: [], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ ScalarField { alias: None, definition: WithLocation { location: directive-include.graphql:168:170, - item: FieldID(460), + item: FieldID(462), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/enum-values.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/enum-values.expected index cf64f19a24125..45e29fe051f4f 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/enum-values.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/enum-values.expected @@ -34,7 +34,7 @@ query EnumValueQuery { alias: None, definition: WithLocation { location: enum-values.graphql:34:48, - item: FieldID(474), + item: FieldID(476), }, arguments: [ Argument { @@ -60,7 +60,7 @@ query EnumValueQuery { alias: None, definition: WithLocation { location: enum-values.graphql:72:75, - item: FieldID(179), + item: FieldID(182), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/field-arguments.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/field-arguments.expected index 3c23493bc1bcc..ca39e17212ebb 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/field-arguments.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/field-arguments.expected @@ -55,7 +55,7 @@ query TestQuery { alias: None, definition: WithLocation { location: field-arguments.graphql:44:49, - item: FieldID(29), + item: FieldID(30), }, arguments: [], directives: [], @@ -92,7 +92,7 @@ query TestQuery { alias: None, definition: WithLocation { location: field-arguments.graphql:89:107, - item: FieldID(514), + item: FieldID(516), }, arguments: [], directives: [], @@ -144,7 +144,7 @@ query TestQuery { alias: None, definition: WithLocation { location: field-arguments.graphql:164:169, - item: FieldID(54), + item: FieldID(56), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/fixme_fat_interface_on_union.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/fixme_fat_interface_on_union.expected index 63f106aa66404..071319caaf86c 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/fixme_fat_interface_on_union.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/fixme_fat_interface_on_union.expected @@ -33,7 +33,7 @@ query Test { alias: None, definition: WithLocation { location: fixme_fat_interface_on_union.graphql:51:53, - item: FieldID(378), + item: FieldID(380), }, arguments: [], directives: [ diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-arguments-syntax.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-arguments-syntax.expected index fb574b6eba549..c4a8a5705dc66 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-arguments-syntax.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-arguments-syntax.expected @@ -85,14 +85,14 @@ fragment Foo($localId: ID!) on User { directives: [], }, ], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ LinkedField { alias: None, definition: WithLocation { location: fragment-with-arguments-syntax.graphql:71:85, - item: FieldID(474), + item: FieldID(476), }, arguments: [ Argument { @@ -128,7 +128,7 @@ fragment Foo($localId: ID!) on User { alias: None, definition: WithLocation { location: fragment-with-arguments-syntax.graphql:112:115, - item: FieldID(179), + item: FieldID(182), }, arguments: [], directives: [], @@ -144,7 +144,7 @@ fragment Foo($localId: ID!) on User { ), definition: WithLocation { location: fragment-with-arguments-syntax.graphql:138:152, - item: FieldID(474), + item: FieldID(476), }, arguments: [ Argument { @@ -180,7 +180,7 @@ fragment Foo($localId: ID!) on User { alias: None, definition: WithLocation { location: fragment-with-arguments-syntax.graphql:172:175, - item: FieldID(179), + item: FieldID(182), }, arguments: [], directives: [], @@ -253,14 +253,14 @@ fragment Foo($localId: ID!) on User { }, ], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ ScalarField { alias: None, definition: WithLocation { location: fragment-with-arguments-syntax.graphql:246:248, - item: FieldID(460), + item: FieldID(462), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-arguments.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-arguments.expected index 14c4d8b9d3f4e..96624130be666 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-arguments.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-arguments.expected @@ -86,7 +86,7 @@ fragment Foo on User @argumentDefinitions(localId: {type: "ID!"}) { directives: [], }, ], - type_condition: Object(69), + type_condition: Object(70), directives: [ Directive { name: WithLocation { @@ -104,7 +104,7 @@ fragment Foo on User @argumentDefinitions(localId: {type: "ID!"}) { alias: None, definition: WithLocation { location: fragment-with-arguments.graphql:131:145, - item: FieldID(474), + item: FieldID(476), }, arguments: [ Argument { @@ -140,7 +140,7 @@ fragment Foo on User @argumentDefinitions(localId: {type: "ID!"}) { alias: None, definition: WithLocation { location: fragment-with-arguments.graphql:172:175, - item: FieldID(179), + item: FieldID(182), }, arguments: [], directives: [], @@ -156,7 +156,7 @@ fragment Foo on User @argumentDefinitions(localId: {type: "ID!"}) { ), definition: WithLocation { location: fragment-with-arguments.graphql:198:212, - item: FieldID(474), + item: FieldID(476), }, arguments: [ Argument { @@ -192,7 +192,7 @@ fragment Foo on User @argumentDefinitions(localId: {type: "ID!"}) { alias: None, definition: WithLocation { location: fragment-with-arguments.graphql:232:235, - item: FieldID(179), + item: FieldID(182), }, arguments: [], directives: [], @@ -265,7 +265,7 @@ fragment Foo on User @argumentDefinitions(localId: {type: "ID!"}) { }, ], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [ Directive { name: WithLocation { @@ -283,7 +283,7 @@ fragment Foo on User @argumentDefinitions(localId: {type: "ID!"}) { alias: None, definition: WithLocation { location: fragment-with-arguments.graphql:347:349, - item: FieldID(460), + item: FieldID(462), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-arguments.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-arguments.expected index 1b57d3d0417fb..8de38bf89f8d1 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-arguments.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-arguments.expected @@ -21,7 +21,7 @@ fragment ChildFragment on User }, variable_definitions: [], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ FragmentSpread { @@ -84,7 +84,7 @@ fragment ChildFragment on User }, ], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [ Directive { name: WithLocation { @@ -102,7 +102,7 @@ fragment ChildFragment on User alias: None, definition: WithLocation { location: fragment-with-literal-arguments.graphql:174:188, - item: FieldID(474), + item: FieldID(476), }, arguments: [ Argument { @@ -138,7 +138,7 @@ fragment ChildFragment on User alias: None, definition: WithLocation { location: fragment-with-literal-arguments.graphql:215:218, - item: FieldID(179), + item: FieldID(182), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list.expected index 7d2f284a36dfc..3ab7ce2e7a6da 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list.expected @@ -24,7 +24,7 @@ fragment ChildFragment on User }, variable_definitions: [], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ LinkedField { @@ -36,7 +36,7 @@ fragment ChildFragment on User ), definition: WithLocation { location: fragment-with-literal-enum-arguments-into-enum-list.graphql:53:61, - item: FieldID(451), + item: FieldID(453), }, arguments: [ Argument { @@ -66,7 +66,7 @@ fragment ChildFragment on User alias: None, definition: WithLocation { location: fragment-with-literal-enum-arguments-into-enum-list.graphql:92:97, - item: FieldID(54), + item: FieldID(56), }, arguments: [], directives: [], @@ -133,7 +133,7 @@ fragment ChildFragment on User }, ], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [ Directive { name: WithLocation { @@ -151,7 +151,7 @@ fragment ChildFragment on User alias: None, definition: WithLocation { location: fragment-with-literal-enum-arguments-into-enum-list.graphql:253:261, - item: FieldID(451), + item: FieldID(453), }, arguments: [ Argument { @@ -191,7 +191,7 @@ fragment ChildFragment on User alias: None, definition: WithLocation { location: fragment-with-literal-enum-arguments-into-enum-list.graphql:297:302, - item: FieldID(54), + item: FieldID(56), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-enum-arguments.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-enum-arguments.expected index 9af9d96a925e2..69c3af370caeb 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-enum-arguments.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-enum-arguments.expected @@ -62,7 +62,7 @@ fragment ChildFragment on Query alias: None, definition: WithLocation { location: fragment-with-literal-enum-arguments.graphql:90:98, - item: FieldID(513), + item: FieldID(515), }, arguments: [], directives: [], @@ -177,7 +177,7 @@ fragment ChildFragment on Query alias: None, definition: WithLocation { location: fragment-with-literal-enum-arguments.graphql:292:300, - item: FieldID(513), + item: FieldID(515), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-enum-list-arguments.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-enum-list-arguments.expected index 96bd8a1cc84e6..3655c9d423b17 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-enum-list-arguments.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-enum-list-arguments.expected @@ -24,7 +24,7 @@ fragment ChildFragment on User }, variable_definitions: [], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ LinkedField { @@ -36,7 +36,7 @@ fragment ChildFragment on User ), definition: WithLocation { location: fragment-with-literal-enum-list-arguments.graphql:53:61, - item: FieldID(451), + item: FieldID(453), }, arguments: [ Argument { @@ -66,7 +66,7 @@ fragment ChildFragment on User alias: None, definition: WithLocation { location: fragment-with-literal-enum-list-arguments.graphql:92:97, - item: FieldID(54), + item: FieldID(56), }, arguments: [], directives: [], @@ -137,7 +137,7 @@ fragment ChildFragment on User }, ], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [ Directive { name: WithLocation { @@ -155,7 +155,7 @@ fragment ChildFragment on User alias: None, definition: WithLocation { location: fragment-with-literal-enum-list-arguments.graphql:255:263, - item: FieldID(451), + item: FieldID(453), }, arguments: [ Argument { @@ -195,7 +195,7 @@ fragment ChildFragment on User alias: None, definition: WithLocation { location: fragment-with-literal-enum-list-arguments.graphql:299:304, - item: FieldID(54), + item: FieldID(56), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-object-arguments.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-object-arguments.expected index 1c350ca70d9d9..73e2d9124cb04 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-object-arguments.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-object-arguments.expected @@ -77,7 +77,7 @@ fragment ChildFragment on Query alias: None, definition: WithLocation { location: fragment-with-literal-object-arguments.graphql:105:110, - item: FieldID(54), + item: FieldID(56), }, arguments: [], directives: [], @@ -147,7 +147,7 @@ fragment ChildFragment on Query }, type_: NonNull( Named( - InputObject(16), + InputObject(17), ), ), default_value: None, @@ -194,7 +194,7 @@ fragment ChildFragment on Query ), }, type_: Named( - InputObject(16), + InputObject(17), ), }, ), @@ -207,7 +207,7 @@ fragment ChildFragment on Query alias: None, definition: WithLocation { location: fragment-with-literal-object-arguments.graphql:308:313, - item: FieldID(54), + item: FieldID(56), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-object-list-arguments.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-object-list-arguments.expected index 8a2d368b408be..944c57b367750 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-object-list-arguments.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-literal-object-list-arguments.expected @@ -77,7 +77,7 @@ fragment ChildFragment on Query alias: None, definition: WithLocation { location: fragment-with-literal-object-list-arguments.graphql:105:110, - item: FieldID(54), + item: FieldID(56), }, arguments: [], directives: [], @@ -153,7 +153,7 @@ fragment ChildFragment on Query List( NonNull( Named( - InputObject(16), + InputObject(17), ), ), ), @@ -230,7 +230,7 @@ fragment ChildFragment on Query }, type_: List( Named( - InputObject(16), + InputObject(17), ), ), }, @@ -248,7 +248,7 @@ fragment ChildFragment on Query alias: None, definition: WithLocation { location: fragment-with-literal-object-list-arguments.graphql:340:345, - item: FieldID(54), + item: FieldID(56), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-variable-definitions-syntax.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-variable-definitions-syntax.expected index 362938db855ee..2924c81219797 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-variable-definitions-syntax.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment-with-variable-definitions-syntax.expected @@ -86,7 +86,7 @@ fragment Foo($localId: ID!) on User { directives: [], }, ], - type_condition: Object(69), + type_condition: Object(70), directives: [ Directive { name: WithLocation { @@ -104,7 +104,7 @@ fragment Foo($localId: ID!) on User { alias: None, definition: WithLocation { location: fragment-with-variable-definitions-syntax.graphql:131:145, - item: FieldID(474), + item: FieldID(476), }, arguments: [ Argument { @@ -140,7 +140,7 @@ fragment Foo($localId: ID!) on User { alias: None, definition: WithLocation { location: fragment-with-variable-definitions-syntax.graphql:172:175, - item: FieldID(179), + item: FieldID(182), }, arguments: [], directives: [], @@ -156,7 +156,7 @@ fragment Foo($localId: ID!) on User { ), definition: WithLocation { location: fragment-with-variable-definitions-syntax.graphql:198:212, - item: FieldID(474), + item: FieldID(476), }, arguments: [ Argument { @@ -192,7 +192,7 @@ fragment Foo($localId: ID!) on User { alias: None, definition: WithLocation { location: fragment-with-variable-definitions-syntax.graphql:232:235, - item: FieldID(179), + item: FieldID(182), }, arguments: [], directives: [], @@ -265,14 +265,14 @@ fragment Foo($localId: ID!) on User { }, ], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ ScalarField { alias: None, definition: WithLocation { location: fragment-with-variable-definitions-syntax.graphql:317:319, - item: FieldID(460), + item: FieldID(462), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment_with_arguments_defaulting.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment_with_arguments_defaulting.expected index 3d982c95e6153..f0bf71538b832 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/fragment_with_arguments_defaulting.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/fragment_with_arguments_defaulting.expected @@ -180,7 +180,7 @@ fragment F2 on Query @argumentDefinitions( alias: None, definition: WithLocation { location: fragment_with_arguments_defaulting.graphql:342:352, - item: FieldID(517), + item: FieldID(526), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/inline-untyped-fragment.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/inline-untyped-fragment.expected index 049a524f5378a..cd8f376789b71 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/inline-untyped-fragment.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/inline-untyped-fragment.expected @@ -16,7 +16,7 @@ fragment InlineUntypedFragment on User { }, variable_definitions: [], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ InlineFragment { @@ -27,7 +27,7 @@ fragment InlineUntypedFragment on User { alias: None, definition: WithLocation { location: inline-untyped-fragment.graphql:53:57, - item: FieldID(466), + item: FieldID(468), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-field-with-filters.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-field-with-filters.expected index f00721eb36404..6df3a2daaf27d 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-field-with-filters.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-field-with-filters.expected @@ -33,14 +33,14 @@ fragment LinkedHandleField on User { directives: [], }, ], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ LinkedField { alias: None, definition: WithLocation { location: linked-handle-field-with-filters.graphql:39:46, - item: FieldID(458), + item: FieldID(460), }, arguments: [ Argument { @@ -143,7 +143,7 @@ fragment LinkedHandleField on User { alias: None, definition: WithLocation { location: linked-handle-field-with-filters.graphql:158:163, - item: FieldID(173), + item: FieldID(176), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-field-with-key.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-field-with-key.expected index aa7747e1b29ef..feddac413f4aa 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-field-with-key.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-field-with-key.expected @@ -17,14 +17,14 @@ fragment LinkedHandleField on User { }, variable_definitions: [], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ LinkedField { alias: None, definition: WithLocation { location: linked-handle-field-with-key.graphql:39:46, - item: FieldID(458), + item: FieldID(460), }, arguments: [ Argument { @@ -94,7 +94,7 @@ fragment LinkedHandleField on User { alias: None, definition: WithLocation { location: linked-handle-field-with-key.graphql:142:147, - item: FieldID(173), + item: FieldID(176), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-field.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-field.expected index 49b684ce1095b..a394978779c09 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-field.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-field.expected @@ -18,14 +18,14 @@ fragment LinkedHandleField on User { }, variable_definitions: [], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ LinkedField { alias: None, definition: WithLocation { location: linked-handle-field.graphql:39:46, - item: FieldID(458), + item: FieldID(460), }, arguments: [ Argument { @@ -106,7 +106,7 @@ fragment LinkedHandleField on User { alias: None, definition: WithLocation { location: linked-handle-field.graphql:151:156, - item: FieldID(173), + item: FieldID(176), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-filter.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-filter.expected index fa132d80dd3e9..62bf208fcd2ea 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-filter.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/linked-handle-filter.expected @@ -18,14 +18,14 @@ fragment LinkedHandleField on User { }, variable_definitions: [], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ LinkedField { alias: None, definition: WithLocation { location: linked-handle-filter.graphql:39:46, - item: FieldID(458), + item: FieldID(460), }, arguments: [ Argument { @@ -126,7 +126,7 @@ fragment LinkedHandleField on User { alias: None, definition: WithLocation { location: linked-handle-filter.graphql:171:176, - item: FieldID(173), + item: FieldID(176), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/list-argument-complex-object.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/list-argument-complex-object.expected index e6f469c187741..e31df5f14223c 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/list-argument-complex-object.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/list-argument-complex-object.expected @@ -115,7 +115,7 @@ query ListArgumentQuery($waypoint: WayPoint!) { alias: None, definition: WithLocation { location: list-argument-complex-object.graphql:112:117, - item: FieldID(30), + item: FieldID(31), }, arguments: [], directives: [], @@ -124,7 +124,7 @@ query ListArgumentQuery($waypoint: WayPoint!) { alias: None, definition: WithLocation { location: list-argument-complex-object.graphql:126:130, - item: FieldID(33), + item: FieldID(34), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/list-argument.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/list-argument.expected index 950e2f9e9ed7a..d7a57465ef853 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/list-argument.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/list-argument.expected @@ -84,7 +84,7 @@ query ListArgumentQuery { alias: None, definition: WithLocation { location: list-argument.graphql:79:84, - item: FieldID(30), + item: FieldID(31), }, arguments: [], directives: [], @@ -93,7 +93,7 @@ query ListArgumentQuery { alias: None, definition: WithLocation { location: list-argument.graphql:93:97, - item: FieldID(33), + item: FieldID(34), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/list-of-enums.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/list-of-enums.expected index 280ab5ccd77bb..2728e9ef940ae 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/list-of-enums.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/list-of-enums.expected @@ -14,14 +14,14 @@ fragment TestFragment on User { }, variable_definitions: [], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ ScalarField { alias: None, definition: WithLocation { location: list-of-enums.graphql:34:40, - item: FieldID(483), + item: FieldID(485), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/literal-list-argument.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/literal-list-argument.expected index 48ad641897ed1..2333f7333b711 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/literal-list-argument.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/literal-list-argument.expected @@ -84,7 +84,7 @@ query LiteralListArgumentQuery { alias: None, definition: WithLocation { location: literal-list-argument.graphql:86:91, - item: FieldID(30), + item: FieldID(31), }, arguments: [], directives: [], @@ -93,7 +93,7 @@ query LiteralListArgumentQuery { alias: None, definition: WithLocation { location: literal-list-argument.graphql:100:104, - item: FieldID(33), + item: FieldID(34), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/literal-object-argument.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/literal-object-argument.expected index d3d38f9fab819..3725780960096 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/literal-object-argument.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/literal-object-argument.expected @@ -64,7 +64,7 @@ query LiteralObjectArgument { alias: None, definition: WithLocation { location: literal-object-argument.graphql:85:90, - item: FieldID(54), + item: FieldID(56), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/null-values.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/null-values.expected index 11a1ecdebebb3..ab6218b8b85ca 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/null-values.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/null-values.expected @@ -62,7 +62,7 @@ query NullValuesQuery { alias: None, definition: WithLocation { location: null-values.graphql:60:64, - item: FieldID(494), + item: FieldID(496), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/object-argument.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/object-argument.expected index 47aea3535eed8..8ac78d819a20a 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/object-argument.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/object-argument.expected @@ -88,7 +88,7 @@ query ObjectArgument($text: String!) { alias: None, definition: WithLocation { location: object-argument.graphql:89:94, - item: FieldID(54), + item: FieldID(56), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/scalar-handle-field.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/scalar-handle-field.expected index cb1f792048304..393e304c0b0e0 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/scalar-handle-field.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/scalar-handle-field.expected @@ -16,14 +16,14 @@ fragment ScalarHandleField on User { }, variable_definitions: [], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ ScalarField { alias: None, definition: WithLocation { location: scalar-handle-field.graphql:39:43, - item: FieldID(466), + item: FieldID(468), }, arguments: [], directives: [ diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/simple-fragment.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/simple-fragment.expected index 23d81172fa414..ce86728e4f702 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/simple-fragment.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/simple-fragment.expected @@ -14,14 +14,14 @@ fragment TestFragment on User { }, variable_definitions: [], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ ScalarField { alias: None, definition: WithLocation { location: simple-fragment.graphql:34:36, - item: FieldID(460), + item: FieldID(462), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/simple-query.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/simple-query.expected index d543383b7a7a2..016980ceaacbc 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/simple-query.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/simple-query.expected @@ -78,7 +78,7 @@ query TestQuery($id: ID!) { alias: None, definition: WithLocation { location: simple-query.graphql:55:57, - item: FieldID(214), + item: FieldID(217), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/unknown-fragment-type-suggestions.invalid.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/unknown-fragment-type-suggestions.invalid.expected index 832b62d6d4699..ce492bb6975f9 100644 --- a/compiler/crates/graphql-ir/tests/parse/fixtures/unknown-fragment-type-suggestions.invalid.expected +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/unknown-fragment-type-suggestions.invalid.expected @@ -4,7 +4,7 @@ fragment Foo on Users { id } ==================================== ERROR ==================================== -✖︎ Unknown type 'Users'. Did you mean `User` or `Query`? +✖︎ Unknown type 'Users'. Did you mean `User`, `Opera`, or `Query`? unknown-fragment-type-suggestions.invalid.graphql:2:17 1 │ # expected-to-throw diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/variable_with_default.expected b/compiler/crates/graphql-ir/tests/parse/fixtures/variable_with_default.expected new file mode 100644 index 0000000000000..4c3c8e0dcc2b3 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/variable_with_default.expected @@ -0,0 +1,134 @@ +==================================== INPUT ==================================== +mutation VariableWithDefault($input: FeedbackUnLikeInput! = { feedbackId: "123"}) { + feedbackUnLike(input: $input) { + feedback { + body { + text + } + } + } +} +==================================== OUTPUT =================================== +[ + Operation( + OperationDefinition { + kind: Mutation, + name: WithLocation { + location: variable_with_default.graphql:9:28, + item: OperationDefinitionName( + "VariableWithDefault", + ), + }, + type_: Object(7), + variable_definitions: [ + VariableDefinition { + name: WithLocation { + location: variable_with_default.graphql:29:35, + item: VariableName( + "input", + ), + }, + type_: NonNull( + Named( + InputObject(12), + ), + ), + default_value: Some( + WithLocation { + location: variable_with_default.graphql:58:80, + item: Object( + [ + ConstantArgument { + name: WithLocation { + location: variable_with_default.graphql:62:72, + item: ArgumentName( + "feedbackId", + ), + }, + value: WithLocation { + location: variable_with_default.graphql:74:79, + item: String( + "123", + ), + }, + }, + ], + ), + }, + ), + directives: [], + }, + ], + directives: [], + selections: [ + LinkedField { + alias: None, + definition: WithLocation { + location: variable_with_default.graphql:86:100, + item: FieldID(42), + }, + arguments: [ + Argument { + name: WithLocation { + location: variable_with_default.graphql:101:106, + item: ArgumentName( + "input", + ), + }, + value: WithLocation { + location: variable_with_default.graphql:108:114, + item: Variable( + Variable { + name: WithLocation { + location: variable_with_default.graphql:108:114, + item: VariableName( + "input", + ), + }, + type_: Named( + InputObject(12), + ), + }, + ), + }, + }, + ], + directives: [], + selections: [ + LinkedField { + alias: None, + definition: WithLocation { + location: variable_with_default.graphql:122:130, + item: FieldID(169), + }, + arguments: [], + directives: [], + selections: [ + LinkedField { + alias: None, + definition: WithLocation { + location: variable_with_default.graphql:139:143, + item: FieldID(139), + }, + arguments: [], + directives: [], + selections: [ + ScalarField { + alias: None, + definition: WithLocation { + location: variable_with_default.graphql:154:158, + item: FieldID(410), + }, + arguments: [], + directives: [], + }, + ], + }, + ], + }, + ], + }, + ], + }, + ), +] diff --git a/compiler/crates/graphql-ir/tests/parse/fixtures/variable_with_default.graphql b/compiler/crates/graphql-ir/tests/parse/fixtures/variable_with_default.graphql new file mode 100644 index 0000000000000..b448c35ee63b1 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse/fixtures/variable_with_default.graphql @@ -0,0 +1,9 @@ +mutation VariableWithDefault($input: FeedbackUnLikeInput! = { feedbackId: "123"}) { + feedbackUnLike(input: $input) { + feedback { + body { + text + } + } + } +} \ No newline at end of file diff --git a/compiler/crates/graphql-ir/tests/parse/mod.rs b/compiler/crates/graphql-ir/tests/parse/mod.rs deleted file mode 100644 index a43eff45dcb09..0000000000000 --- a/compiler/crates/graphql-ir/tests/parse/mod.rs +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::SourceLocationKey; -use common::TextSource; -use fixture_tests::Fixture; -use fnv::FnvHashMap; -use graphql_cli::DiagnosticPrinter; -use graphql_ir::build; -use graphql_syntax::parse_executable_with_features; -use graphql_syntax::FragmentArgumentSyntaxKind; -use graphql_syntax::ParserFeatures; -use relay_test_schema::TEST_SCHEMA; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let features = ParserFeatures { - fragment_argument_capability: - FragmentArgumentSyntaxKind::SpreadArgumentsAndFragmentVariableDefinitions, - }; - let ast = parse_executable_with_features(fixture.content, source_location, features).unwrap(); - let mut sources = FnvHashMap::default(); - sources.insert(source_location, fixture.content); - - build(&TEST_SCHEMA, &ast.definitions) - .map(|x| format!("{:#?}", x)) - .map_err(|errors| { - errors - .into_iter() - .map(|error| { - let printer = DiagnosticPrinter::new(|_| { - Some(TextSource::from_whole_document(fixture.content.to_string())) - }); - printer.diagnostic_to_string(&error) - }) - .collect::>() - .join("\n\n") - }) -} diff --git a/compiler/crates/graphql-ir/tests/parse_test.rs b/compiler/crates/graphql-ir/tests/parse_test.rs index ffb5c50c6cd60..e1e3c49d43e4b 100644 --- a/compiler/crates/graphql-ir/tests/parse_test.rs +++ b/compiler/crates/graphql-ir/tests/parse_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<9dade3d0ff881e265617477148740319>> + * @generated SignedSource<> */ mod parse; @@ -12,618 +12,632 @@ mod parse; use parse::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn argument_complex_object_invalid() { +#[tokio::test] +async fn argument_complex_object_invalid() { let input = include_str!("parse/fixtures/argument-complex-object.invalid.graphql"); let expected = include_str!("parse/fixtures/argument-complex-object.invalid.expected"); - test_fixture(transform_fixture, "argument-complex-object.invalid.graphql", "parse/fixtures/argument-complex-object.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument-complex-object.invalid.graphql", "parse/fixtures/argument-complex-object.invalid.expected", input, expected).await; } -#[test] -fn argument_definitions() { +#[tokio::test] +async fn argument_definitions() { let input = include_str!("parse/fixtures/argument_definitions.graphql"); let expected = include_str!("parse/fixtures/argument_definitions.expected"); - test_fixture(transform_fixture, "argument_definitions.graphql", "parse/fixtures/argument_definitions.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument_definitions.graphql", "parse/fixtures/argument_definitions.expected", input, expected).await; } -#[test] -fn argument_definitions_directives_invalid_directive_arg_invalid() { +#[tokio::test] +async fn argument_definitions_directives_invalid_directive_arg_invalid() { let input = include_str!("parse/fixtures/argument_definitions_directives_invalid_directive_arg.invalid.graphql"); let expected = include_str!("parse/fixtures/argument_definitions_directives_invalid_directive_arg.invalid.expected"); - test_fixture(transform_fixture, "argument_definitions_directives_invalid_directive_arg.invalid.graphql", "parse/fixtures/argument_definitions_directives_invalid_directive_arg.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument_definitions_directives_invalid_directive_arg.invalid.graphql", "parse/fixtures/argument_definitions_directives_invalid_directive_arg.invalid.expected", input, expected).await; } -#[test] -fn argument_definitions_directives_invalid_locations_invalid() { +#[tokio::test] +async fn argument_definitions_directives_invalid_locations_invalid() { let input = include_str!("parse/fixtures/argument_definitions_directives_invalid_locations.invalid.graphql"); let expected = include_str!("parse/fixtures/argument_definitions_directives_invalid_locations.invalid.expected"); - test_fixture(transform_fixture, "argument_definitions_directives_invalid_locations.invalid.graphql", "parse/fixtures/argument_definitions_directives_invalid_locations.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument_definitions_directives_invalid_locations.invalid.graphql", "parse/fixtures/argument_definitions_directives_invalid_locations.invalid.expected", input, expected).await; } -#[test] -fn argument_definitions_directives_invalid_syntax_invalid() { +#[tokio::test] +async fn argument_definitions_directives_invalid_syntax_invalid() { let input = include_str!("parse/fixtures/argument_definitions_directives_invalid_syntax.invalid.graphql"); let expected = include_str!("parse/fixtures/argument_definitions_directives_invalid_syntax.invalid.expected"); - test_fixture(transform_fixture, "argument_definitions_directives_invalid_syntax.invalid.graphql", "parse/fixtures/argument_definitions_directives_invalid_syntax.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument_definitions_directives_invalid_syntax.invalid.graphql", "parse/fixtures/argument_definitions_directives_invalid_syntax.invalid.expected", input, expected).await; } -#[test] -fn argument_definitions_directives_invalid_type_invalid() { +#[tokio::test] +async fn argument_definitions_directives_invalid_type_invalid() { let input = include_str!("parse/fixtures/argument_definitions_directives_invalid_type.invalid.graphql"); let expected = include_str!("parse/fixtures/argument_definitions_directives_invalid_type.invalid.expected"); - test_fixture(transform_fixture, "argument_definitions_directives_invalid_type.invalid.graphql", "parse/fixtures/argument_definitions_directives_invalid_type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument_definitions_directives_invalid_type.invalid.graphql", "parse/fixtures/argument_definitions_directives_invalid_type.invalid.expected", input, expected).await; } -#[test] -fn argument_definitions_typo_invalid() { +#[tokio::test] +async fn argument_definitions_typo_invalid() { let input = include_str!("parse/fixtures/argument_definitions_typo.invalid.graphql"); let expected = include_str!("parse/fixtures/argument_definitions_typo.invalid.expected"); - test_fixture(transform_fixture, "argument_definitions_typo.invalid.graphql", "parse/fixtures/argument_definitions_typo.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument_definitions_typo.invalid.graphql", "parse/fixtures/argument_definitions_typo.invalid.expected", input, expected).await; } -#[test] -fn complex_object_with_invalid_constant_fields_invalid() { +#[tokio::test] +async fn argument_with_default() { + let input = include_str!("parse/fixtures/argument_with_default.graphql"); + let expected = include_str!("parse/fixtures/argument_with_default.expected"); + test_fixture(transform_fixture, file!(), "argument_with_default.graphql", "parse/fixtures/argument_with_default.expected", input, expected).await; +} + +#[tokio::test] +async fn complex_object_with_invalid_constant_fields_invalid() { let input = include_str!("parse/fixtures/complex-object-with-invalid-constant-fields.invalid.graphql"); let expected = include_str!("parse/fixtures/complex-object-with-invalid-constant-fields.invalid.expected"); - test_fixture(transform_fixture, "complex-object-with-invalid-constant-fields.invalid.graphql", "parse/fixtures/complex-object-with-invalid-constant-fields.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "complex-object-with-invalid-constant-fields.invalid.graphql", "parse/fixtures/complex-object-with-invalid-constant-fields.invalid.expected", input, expected).await; } -#[test] -fn complex_object_with_invalid_fields_invalid() { +#[tokio::test] +async fn complex_object_with_invalid_fields_invalid() { let input = include_str!("parse/fixtures/complex-object-with-invalid-fields.invalid.graphql"); let expected = include_str!("parse/fixtures/complex-object-with-invalid-fields.invalid.expected"); - test_fixture(transform_fixture, "complex-object-with-invalid-fields.invalid.graphql", "parse/fixtures/complex-object-with-invalid-fields.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "complex-object-with-invalid-fields.invalid.graphql", "parse/fixtures/complex-object-with-invalid-fields.invalid.expected", input, expected).await; } -#[test] -fn complex_object_with_missing_fields_invalid() { +#[tokio::test] +async fn complex_object_with_missing_fields_invalid() { let input = include_str!("parse/fixtures/complex-object-with-missing-fields.invalid.graphql"); let expected = include_str!("parse/fixtures/complex-object-with-missing-fields.invalid.expected"); - test_fixture(transform_fixture, "complex-object-with-missing-fields.invalid.graphql", "parse/fixtures/complex-object-with-missing-fields.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "complex-object-with-missing-fields.invalid.graphql", "parse/fixtures/complex-object-with-missing-fields.invalid.expected", input, expected).await; } -#[test] -fn directive_generic() { +#[tokio::test] +async fn directive_generic() { let input = include_str!("parse/fixtures/directive-generic.graphql"); let expected = include_str!("parse/fixtures/directive-generic.expected"); - test_fixture(transform_fixture, "directive-generic.graphql", "parse/fixtures/directive-generic.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive-generic.graphql", "parse/fixtures/directive-generic.expected", input, expected).await; } -#[test] -fn directive_include() { +#[tokio::test] +async fn directive_include() { let input = include_str!("parse/fixtures/directive-include.graphql"); let expected = include_str!("parse/fixtures/directive-include.expected"); - test_fixture(transform_fixture, "directive-include.graphql", "parse/fixtures/directive-include.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive-include.graphql", "parse/fixtures/directive-include.expected", input, expected).await; } -#[test] -fn directive_match_on_fragment_invalid() { +#[tokio::test] +async fn directive_match_on_fragment_invalid() { let input = include_str!("parse/fixtures/directive-match-on-fragment.invalid.graphql"); let expected = include_str!("parse/fixtures/directive-match-on-fragment.invalid.expected"); - test_fixture(transform_fixture, "directive-match-on-fragment.invalid.graphql", "parse/fixtures/directive-match-on-fragment.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive-match-on-fragment.invalid.graphql", "parse/fixtures/directive-match-on-fragment.invalid.expected", input, expected).await; } -#[test] -fn directive_missing_required_argument_invalid() { +#[tokio::test] +async fn directive_missing_required_argument_invalid() { let input = include_str!("parse/fixtures/directive_missing_required_argument.invalid.graphql"); let expected = include_str!("parse/fixtures/directive_missing_required_argument.invalid.expected"); - test_fixture(transform_fixture, "directive_missing_required_argument.invalid.graphql", "parse/fixtures/directive_missing_required_argument.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive_missing_required_argument.invalid.graphql", "parse/fixtures/directive_missing_required_argument.invalid.expected", input, expected).await; } -#[test] -fn directive_module_match_on_query_invalid() { +#[tokio::test] +async fn directive_module_match_on_query_invalid() { let input = include_str!("parse/fixtures/directive-module-match-on-query.invalid.graphql"); let expected = include_str!("parse/fixtures/directive-module-match-on-query.invalid.expected"); - test_fixture(transform_fixture, "directive-module-match-on-query.invalid.graphql", "parse/fixtures/directive-module-match-on-query.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive-module-match-on-query.invalid.graphql", "parse/fixtures/directive-module-match-on-query.invalid.expected", input, expected).await; } -#[test] -fn directive_module_on_field_invalid() { +#[tokio::test] +async fn directive_module_on_field_invalid() { let input = include_str!("parse/fixtures/directive-module-on-field.invalid.graphql"); let expected = include_str!("parse/fixtures/directive-module-on-field.invalid.expected"); - test_fixture(transform_fixture, "directive-module-on-field.invalid.graphql", "parse/fixtures/directive-module-on-field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive-module-on-field.invalid.graphql", "parse/fixtures/directive-module-on-field.invalid.expected", input, expected).await; } -#[test] -fn directive_unknown_argument_invalid() { +#[tokio::test] +async fn directive_unknown_argument_invalid() { let input = include_str!("parse/fixtures/directive-unknown-argument.invalid.graphql"); let expected = include_str!("parse/fixtures/directive-unknown-argument.invalid.expected"); - test_fixture(transform_fixture, "directive-unknown-argument.invalid.graphql", "parse/fixtures/directive-unknown-argument.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive-unknown-argument.invalid.graphql", "parse/fixtures/directive-unknown-argument.invalid.expected", input, expected).await; } -#[test] -fn directive_wrong_argument_type_invalid() { +#[tokio::test] +async fn directive_wrong_argument_type_invalid() { let input = include_str!("parse/fixtures/directive_wrong_argument_type.invalid.graphql"); let expected = include_str!("parse/fixtures/directive_wrong_argument_type.invalid.expected"); - test_fixture(transform_fixture, "directive_wrong_argument_type.invalid.graphql", "parse/fixtures/directive_wrong_argument_type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive_wrong_argument_type.invalid.graphql", "parse/fixtures/directive_wrong_argument_type.invalid.expected", input, expected).await; } -#[test] -fn duplicate_arguments_invalid() { +#[tokio::test] +async fn duplicate_arguments_invalid() { let input = include_str!("parse/fixtures/duplicate_arguments.invalid.graphql"); let expected = include_str!("parse/fixtures/duplicate_arguments.invalid.expected"); - test_fixture(transform_fixture, "duplicate_arguments.invalid.graphql", "parse/fixtures/duplicate_arguments.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "duplicate_arguments.invalid.graphql", "parse/fixtures/duplicate_arguments.invalid.expected", input, expected).await; } -#[test] -fn duplicate_variables_invalid() { +#[tokio::test] +async fn duplicate_variables_invalid() { let input = include_str!("parse/fixtures/duplicate_variables_invalid.graphql"); let expected = include_str!("parse/fixtures/duplicate_variables_invalid.expected"); - test_fixture(transform_fixture, "duplicate_variables_invalid.graphql", "parse/fixtures/duplicate_variables_invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "duplicate_variables_invalid.graphql", "parse/fixtures/duplicate_variables_invalid.expected", input, expected).await; } -#[test] -fn enum_values() { +#[tokio::test] +async fn enum_values() { let input = include_str!("parse/fixtures/enum-values.graphql"); let expected = include_str!("parse/fixtures/enum-values.expected"); - test_fixture(transform_fixture, "enum-values.graphql", "parse/fixtures/enum-values.expected", input, expected); + test_fixture(transform_fixture, file!(), "enum-values.graphql", "parse/fixtures/enum-values.expected", input, expected).await; } -#[test] -fn enum_values_invalid() { +#[tokio::test] +async fn enum_values_invalid() { let input = include_str!("parse/fixtures/enum-values.invalid.graphql"); let expected = include_str!("parse/fixtures/enum-values.invalid.expected"); - test_fixture(transform_fixture, "enum-values.invalid.graphql", "parse/fixtures/enum-values.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "enum-values.invalid.graphql", "parse/fixtures/enum-values.invalid.expected", input, expected).await; } -#[test] -fn fetch_token_with_arguments_invalid() { +#[tokio::test] +async fn fetch_token_with_arguments_invalid() { let input = include_str!("parse/fixtures/fetch_token_with_arguments.invalid.graphql"); let expected = include_str!("parse/fixtures/fetch_token_with_arguments.invalid.expected"); - test_fixture(transform_fixture, "fetch_token_with_arguments.invalid.graphql", "parse/fixtures/fetch_token_with_arguments.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fetch_token_with_arguments.invalid.graphql", "parse/fixtures/fetch_token_with_arguments.invalid.expected", input, expected).await; } -#[test] -fn field_argument_missing_required_invalid() { +#[tokio::test] +async fn field_argument_missing_required_invalid() { let input = include_str!("parse/fixtures/field_argument_missing_required.invalid.graphql"); let expected = include_str!("parse/fixtures/field_argument_missing_required.invalid.expected"); - test_fixture(transform_fixture, "field_argument_missing_required.invalid.graphql", "parse/fixtures/field_argument_missing_required.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "field_argument_missing_required.invalid.graphql", "parse/fixtures/field_argument_missing_required.invalid.expected", input, expected).await; } -#[test] -fn field_argument_unknown_invalid() { +#[tokio::test] +async fn field_argument_unknown_invalid() { let input = include_str!("parse/fixtures/field_argument_unknown.invalid.graphql"); let expected = include_str!("parse/fixtures/field_argument_unknown.invalid.expected"); - test_fixture(transform_fixture, "field_argument_unknown.invalid.graphql", "parse/fixtures/field_argument_unknown.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "field_argument_unknown.invalid.graphql", "parse/fixtures/field_argument_unknown.invalid.expected", input, expected).await; } -#[test] -fn field_argument_wrong_type_invalid() { +#[tokio::test] +async fn field_argument_wrong_type_invalid() { let input = include_str!("parse/fixtures/field_argument_wrong_type.invalid.graphql"); let expected = include_str!("parse/fixtures/field_argument_wrong_type.invalid.expected"); - test_fixture(transform_fixture, "field_argument_wrong_type.invalid.graphql", "parse/fixtures/field_argument_wrong_type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "field_argument_wrong_type.invalid.graphql", "parse/fixtures/field_argument_wrong_type.invalid.expected", input, expected).await; } -#[test] -fn field_arguments() { +#[tokio::test] +async fn field_arguments() { let input = include_str!("parse/fixtures/field-arguments.graphql"); let expected = include_str!("parse/fixtures/field-arguments.expected"); - test_fixture(transform_fixture, "field-arguments.graphql", "parse/fixtures/field-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "field-arguments.graphql", "parse/fixtures/field-arguments.expected", input, expected).await; } -#[test] -fn fixme_fat_interface_on_union() { +#[tokio::test] +async fn fixme_fat_interface_on_union() { let input = include_str!("parse/fixtures/fixme_fat_interface_on_union.graphql"); let expected = include_str!("parse/fixtures/fixme_fat_interface_on_union.expected"); - test_fixture(transform_fixture, "fixme_fat_interface_on_union.graphql", "parse/fixtures/fixme_fat_interface_on_union.expected", input, expected); + test_fixture(transform_fixture, file!(), "fixme_fat_interface_on_union.graphql", "parse/fixtures/fixme_fat_interface_on_union.expected", input, expected).await; } -#[test] -fn fixme_fat_interface_on_union_invalid() { +#[tokio::test] +async fn fixme_fat_interface_on_union_invalid() { let input = include_str!("parse/fixtures/fixme_fat_interface_on_union.invalid.graphql"); let expected = include_str!("parse/fixtures/fixme_fat_interface_on_union.invalid.expected"); - test_fixture(transform_fixture, "fixme_fat_interface_on_union.invalid.graphql", "parse/fixtures/fixme_fat_interface_on_union.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fixme_fat_interface_on_union.invalid.graphql", "parse/fixtures/fixme_fat_interface_on_union.invalid.expected", input, expected).await; } -#[test] -fn fragment_spread_on_wrong_type_invalid() { +#[tokio::test] +async fn fragment_spread_on_wrong_type_invalid() { let input = include_str!("parse/fixtures/fragment-spread-on-wrong-type.invalid.graphql"); let expected = include_str!("parse/fixtures/fragment-spread-on-wrong-type.invalid.expected"); - test_fixture(transform_fixture, "fragment-spread-on-wrong-type.invalid.graphql", "parse/fixtures/fragment-spread-on-wrong-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-spread-on-wrong-type.invalid.graphql", "parse/fixtures/fragment-spread-on-wrong-type.invalid.expected", input, expected).await; } -#[test] -fn fragment_spread_within_wrong_parent_type_invalid() { +#[tokio::test] +async fn fragment_spread_within_wrong_parent_type_invalid() { let input = include_str!("parse/fixtures/fragment-spread-within-wrong-parent-type.invalid.graphql"); let expected = include_str!("parse/fixtures/fragment-spread-within-wrong-parent-type.invalid.expected"); - test_fixture(transform_fixture, "fragment-spread-within-wrong-parent-type.invalid.graphql", "parse/fixtures/fragment-spread-within-wrong-parent-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-spread-within-wrong-parent-type.invalid.graphql", "parse/fixtures/fragment-spread-within-wrong-parent-type.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_argument_type_invalid() { +#[tokio::test] +async fn fragment_with_argument_type_invalid() { let input = include_str!("parse/fixtures/fragment-with-argument-type.invalid.graphql"); let expected = include_str!("parse/fixtures/fragment-with-argument-type.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-argument-type.invalid.graphql", "parse/fixtures/fragment-with-argument-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-argument-type.invalid.graphql", "parse/fixtures/fragment-with-argument-type.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_argument_type_syntax_error_invalid() { +#[tokio::test] +async fn fragment_with_argument_type_syntax_error_invalid() { let input = include_str!("parse/fixtures/fragment-with-argument-type-syntax-error.invalid.graphql"); let expected = include_str!("parse/fixtures/fragment-with-argument-type-syntax-error.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-argument-type-syntax-error.invalid.graphql", "parse/fixtures/fragment-with-argument-type-syntax-error.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-argument-type-syntax-error.invalid.graphql", "parse/fixtures/fragment-with-argument-type-syntax-error.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_arguments() { +#[tokio::test] +async fn fragment_with_arguments() { let input = include_str!("parse/fixtures/fragment-with-arguments.graphql"); let expected = include_str!("parse/fixtures/fragment-with-arguments.expected"); - test_fixture(transform_fixture, "fragment-with-arguments.graphql", "parse/fixtures/fragment-with-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-arguments.graphql", "parse/fixtures/fragment-with-arguments.expected", input, expected).await; } -#[test] -fn fragment_with_arguments_defaulting() { +#[tokio::test] +async fn fragment_with_arguments_defaulting() { let input = include_str!("parse/fixtures/fragment_with_arguments_defaulting.graphql"); let expected = include_str!("parse/fixtures/fragment_with_arguments_defaulting.expected"); - test_fixture(transform_fixture, "fragment_with_arguments_defaulting.graphql", "parse/fixtures/fragment_with_arguments_defaulting.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_with_arguments_defaulting.graphql", "parse/fixtures/fragment_with_arguments_defaulting.expected", input, expected).await; } -#[test] -fn fragment_with_arguments_duplicate_invalid() { +#[tokio::test] +async fn fragment_with_arguments_duplicate_invalid() { let input = include_str!("parse/fixtures/fragment-with-arguments-duplicate.invalid.graphql"); let expected = include_str!("parse/fixtures/fragment-with-arguments-duplicate.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-arguments-duplicate.invalid.graphql", "parse/fixtures/fragment-with-arguments-duplicate.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-arguments-duplicate.invalid.graphql", "parse/fixtures/fragment-with-arguments-duplicate.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_arguments_incorrect_nullability_invalid() { +#[tokio::test] +async fn fragment_with_arguments_incorrect_nullability_invalid() { let input = include_str!("parse/fixtures/fragment_with_arguments_incorrect_nullability.invalid.graphql"); let expected = include_str!("parse/fixtures/fragment_with_arguments_incorrect_nullability.invalid.expected"); - test_fixture(transform_fixture, "fragment_with_arguments_incorrect_nullability.invalid.graphql", "parse/fixtures/fragment_with_arguments_incorrect_nullability.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_with_arguments_incorrect_nullability.invalid.graphql", "parse/fixtures/fragment_with_arguments_incorrect_nullability.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_arguments_invalid_type_invalid() { +#[tokio::test] +async fn fragment_with_arguments_invalid_type_invalid() { let input = include_str!("parse/fixtures/fragment-with-arguments-invalid-type.invalid.graphql"); let expected = include_str!("parse/fixtures/fragment-with-arguments-invalid-type.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-arguments-invalid-type.invalid.graphql", "parse/fixtures/fragment-with-arguments-invalid-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-arguments-invalid-type.invalid.graphql", "parse/fixtures/fragment-with-arguments-invalid-type.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_arguments_syntax() { +#[tokio::test] +async fn fragment_with_arguments_syntax() { let input = include_str!("parse/fixtures/fragment-with-arguments-syntax.graphql"); let expected = include_str!("parse/fixtures/fragment-with-arguments-syntax.expected"); - test_fixture(transform_fixture, "fragment-with-arguments-syntax.graphql", "parse/fixtures/fragment-with-arguments-syntax.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-arguments-syntax.graphql", "parse/fixtures/fragment-with-arguments-syntax.expected", input, expected).await; } -#[test] -fn fragment_with_both_arguments_and_directive_invalid() { +#[tokio::test] +async fn fragment_with_both_arguments_and_directive_invalid() { let input = include_str!("parse/fixtures/fragment-with-both-arguments-and-directive.invalid.graphql"); let expected = include_str!("parse/fixtures/fragment-with-both-arguments-and-directive.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-both-arguments-and-directive.invalid.graphql", "parse/fixtures/fragment-with-both-arguments-and-directive.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-both-arguments-and-directive.invalid.graphql", "parse/fixtures/fragment-with-both-arguments-and-directive.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_both_variable_definition_and_directive_invalid() { +#[tokio::test] +async fn fragment_with_both_variable_definition_and_directive_invalid() { let input = include_str!("parse/fixtures/fragment-with-both-variable-definition-and-directive.invalid.graphql"); let expected = include_str!("parse/fixtures/fragment-with-both-variable-definition-and-directive.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-both-variable-definition-and-directive.invalid.graphql", "parse/fixtures/fragment-with-both-variable-definition-and-directive.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-both-variable-definition-and-directive.invalid.graphql", "parse/fixtures/fragment-with-both-variable-definition-and-directive.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_literal_arguments() { +#[tokio::test] +async fn fragment_with_literal_arguments() { let input = include_str!("parse/fixtures/fragment-with-literal-arguments.graphql"); let expected = include_str!("parse/fixtures/fragment-with-literal-arguments.expected"); - test_fixture(transform_fixture, "fragment-with-literal-arguments.graphql", "parse/fixtures/fragment-with-literal-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-literal-arguments.graphql", "parse/fixtures/fragment-with-literal-arguments.expected", input, expected).await; } -#[test] -fn fragment_with_literal_enum_arguments() { +#[tokio::test] +async fn fragment_with_literal_enum_arguments() { let input = include_str!("parse/fixtures/fragment-with-literal-enum-arguments.graphql"); let expected = include_str!("parse/fixtures/fragment-with-literal-enum-arguments.expected"); - test_fixture(transform_fixture, "fragment-with-literal-enum-arguments.graphql", "parse/fixtures/fragment-with-literal-enum-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-literal-enum-arguments.graphql", "parse/fixtures/fragment-with-literal-enum-arguments.expected", input, expected).await; } -#[test] -fn fragment_with_literal_enum_arguments_into_enum_list() { +#[tokio::test] +async fn fragment_with_literal_enum_arguments_into_enum_list() { let input = include_str!("parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list.graphql"); let expected = include_str!("parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list.expected"); - test_fixture(transform_fixture, "fragment-with-literal-enum-arguments-into-enum-list.graphql", "parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-literal-enum-arguments-into-enum-list.graphql", "parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list.expected", input, expected).await; } -#[test] -fn fragment_with_literal_enum_arguments_into_enum_list_indirect_invalid() { +#[tokio::test] +async fn fragment_with_literal_enum_arguments_into_enum_list_indirect_invalid() { let input = include_str!("parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list-indirect.invalid.graphql"); let expected = include_str!("parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list-indirect.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-literal-enum-arguments-into-enum-list-indirect.invalid.graphql", "parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list-indirect.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-literal-enum-arguments-into-enum-list-indirect.invalid.graphql", "parse/fixtures/fragment-with-literal-enum-arguments-into-enum-list-indirect.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_literal_enum_list_arguments() { +#[tokio::test] +async fn fragment_with_literal_enum_list_arguments() { let input = include_str!("parse/fixtures/fragment-with-literal-enum-list-arguments.graphql"); let expected = include_str!("parse/fixtures/fragment-with-literal-enum-list-arguments.expected"); - test_fixture(transform_fixture, "fragment-with-literal-enum-list-arguments.graphql", "parse/fixtures/fragment-with-literal-enum-list-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-literal-enum-list-arguments.graphql", "parse/fixtures/fragment-with-literal-enum-list-arguments.expected", input, expected).await; } -#[test] -fn fragment_with_literal_object_arguments() { +#[tokio::test] +async fn fragment_with_literal_object_arguments() { let input = include_str!("parse/fixtures/fragment-with-literal-object-arguments.graphql"); let expected = include_str!("parse/fixtures/fragment-with-literal-object-arguments.expected"); - test_fixture(transform_fixture, "fragment-with-literal-object-arguments.graphql", "parse/fixtures/fragment-with-literal-object-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-literal-object-arguments.graphql", "parse/fixtures/fragment-with-literal-object-arguments.expected", input, expected).await; } -#[test] -fn fragment_with_literal_object_list_arguments() { +#[tokio::test] +async fn fragment_with_literal_object_list_arguments() { let input = include_str!("parse/fixtures/fragment-with-literal-object-list-arguments.graphql"); let expected = include_str!("parse/fixtures/fragment-with-literal-object-list-arguments.expected"); - test_fixture(transform_fixture, "fragment-with-literal-object-list-arguments.graphql", "parse/fixtures/fragment-with-literal-object-list-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-literal-object-list-arguments.graphql", "parse/fixtures/fragment-with-literal-object-list-arguments.expected", input, expected).await; } -#[test] -fn fragment_with_undefined_literal_arguments_invalid() { +#[tokio::test] +async fn fragment_with_undefined_literal_arguments_invalid() { let input = include_str!("parse/fixtures/fragment-with-undefined-literal-arguments.invalid.graphql"); let expected = include_str!("parse/fixtures/fragment-with-undefined-literal-arguments.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-undefined-literal-arguments.invalid.graphql", "parse/fixtures/fragment-with-undefined-literal-arguments.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-undefined-literal-arguments.invalid.graphql", "parse/fixtures/fragment-with-undefined-literal-arguments.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_undefined_variable_arguments_invalid() { +#[tokio::test] +async fn fragment_with_undefined_variable_arguments_invalid() { let input = include_str!("parse/fixtures/fragment-with-undefined-variable-arguments.invalid.graphql"); let expected = include_str!("parse/fixtures/fragment-with-undefined-variable-arguments.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-undefined-variable-arguments.invalid.graphql", "parse/fixtures/fragment-with-undefined-variable-arguments.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-undefined-variable-arguments.invalid.graphql", "parse/fixtures/fragment-with-undefined-variable-arguments.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_unnecessary_unchecked_arguments_invalid() { +#[tokio::test] +async fn fragment_with_unnecessary_unchecked_arguments_invalid() { let input = include_str!("parse/fixtures/fragment-with-unnecessary-unchecked-arguments.invalid.graphql"); let expected = include_str!("parse/fixtures/fragment-with-unnecessary-unchecked-arguments.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-unnecessary-unchecked-arguments.invalid.graphql", "parse/fixtures/fragment-with-unnecessary-unchecked-arguments.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-unnecessary-unchecked-arguments.invalid.graphql", "parse/fixtures/fragment-with-unnecessary-unchecked-arguments.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_variable_definitions_syntax() { +#[tokio::test] +async fn fragment_with_variable_definitions_syntax() { let input = include_str!("parse/fixtures/fragment-with-variable-definitions-syntax.graphql"); let expected = include_str!("parse/fixtures/fragment-with-variable-definitions-syntax.expected"); - test_fixture(transform_fixture, "fragment-with-variable-definitions-syntax.graphql", "parse/fixtures/fragment-with-variable-definitions-syntax.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-variable-definitions-syntax.graphql", "parse/fixtures/fragment-with-variable-definitions-syntax.expected", input, expected).await; } -#[test] -fn fragment_with_variable_definitions_syntax_and_argdefs_invalid() { +#[tokio::test] +async fn fragment_with_variable_definitions_syntax_and_argdefs_invalid() { let input = include_str!("parse/fixtures/fragment-with-variable-definitions-syntax-and-argdefs.invalid.graphql"); let expected = include_str!("parse/fixtures/fragment-with-variable-definitions-syntax-and-argdefs.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-variable-definitions-syntax-and-argdefs.invalid.graphql", "parse/fixtures/fragment-with-variable-definitions-syntax-and-argdefs.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-variable-definitions-syntax-and-argdefs.invalid.graphql", "parse/fixtures/fragment-with-variable-definitions-syntax-and-argdefs.invalid.expected", input, expected).await; } -#[test] -fn inline_fragment_on_wrong_type_invalid() { +#[tokio::test] +async fn inline_fragment_on_wrong_type_invalid() { let input = include_str!("parse/fixtures/inline-fragment-on-wrong-type.invalid.graphql"); let expected = include_str!("parse/fixtures/inline-fragment-on-wrong-type.invalid.expected"); - test_fixture(transform_fixture, "inline-fragment-on-wrong-type.invalid.graphql", "parse/fixtures/inline-fragment-on-wrong-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-on-wrong-type.invalid.graphql", "parse/fixtures/inline-fragment-on-wrong-type.invalid.expected", input, expected).await; } -#[test] -fn inline_fragment_with_invalid_type() { +#[tokio::test] +async fn inline_fragment_with_invalid_type() { let input = include_str!("parse/fixtures/inline-fragment-with-invalid-type.graphql"); let expected = include_str!("parse/fixtures/inline-fragment-with-invalid-type.expected"); - test_fixture(transform_fixture, "inline-fragment-with-invalid-type.graphql", "parse/fixtures/inline-fragment-with-invalid-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-with-invalid-type.graphql", "parse/fixtures/inline-fragment-with-invalid-type.expected", input, expected).await; } -#[test] -fn inline_fragment_within_invalid_inline_fragment_grandparent_invalid() { +#[tokio::test] +async fn inline_fragment_within_invalid_inline_fragment_grandparent_invalid() { let input = include_str!("parse/fixtures/inline-fragment-within-invalid-inline-fragment-grandparent.invalid.graphql"); let expected = include_str!("parse/fixtures/inline-fragment-within-invalid-inline-fragment-grandparent.invalid.expected"); - test_fixture(transform_fixture, "inline-fragment-within-invalid-inline-fragment-grandparent.invalid.graphql", "parse/fixtures/inline-fragment-within-invalid-inline-fragment-grandparent.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-within-invalid-inline-fragment-grandparent.invalid.graphql", "parse/fixtures/inline-fragment-within-invalid-inline-fragment-grandparent.invalid.expected", input, expected).await; } -#[test] -fn inline_fragment_within_linked_field_and_wrong_inline_fragment_invalid() { +#[tokio::test] +async fn inline_fragment_within_linked_field_and_wrong_inline_fragment_invalid() { let input = include_str!("parse/fixtures/inline-fragment-within-linked-field-and-wrong-inline-fragment.invalid.graphql"); let expected = include_str!("parse/fixtures/inline-fragment-within-linked-field-and-wrong-inline-fragment.invalid.expected"); - test_fixture(transform_fixture, "inline-fragment-within-linked-field-and-wrong-inline-fragment.invalid.graphql", "parse/fixtures/inline-fragment-within-linked-field-and-wrong-inline-fragment.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-within-linked-field-and-wrong-inline-fragment.invalid.graphql", "parse/fixtures/inline-fragment-within-linked-field-and-wrong-inline-fragment.invalid.expected", input, expected).await; } -#[test] -fn inline_fragment_within_wrong_parent_type_invalid() { +#[tokio::test] +async fn inline_fragment_within_wrong_parent_type_invalid() { let input = include_str!("parse/fixtures/inline-fragment-within-wrong-parent-type.invalid.graphql"); let expected = include_str!("parse/fixtures/inline-fragment-within-wrong-parent-type.invalid.expected"); - test_fixture(transform_fixture, "inline-fragment-within-wrong-parent-type.invalid.graphql", "parse/fixtures/inline-fragment-within-wrong-parent-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-within-wrong-parent-type.invalid.graphql", "parse/fixtures/inline-fragment-within-wrong-parent-type.invalid.expected", input, expected).await; } -#[test] -fn inline_untyped_fragment() { +#[tokio::test] +async fn inline_untyped_fragment() { let input = include_str!("parse/fixtures/inline-untyped-fragment.graphql"); let expected = include_str!("parse/fixtures/inline-untyped-fragment.expected"); - test_fixture(transform_fixture, "inline-untyped-fragment.graphql", "parse/fixtures/inline-untyped-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-untyped-fragment.graphql", "parse/fixtures/inline-untyped-fragment.expected", input, expected).await; } -#[test] -fn invalid_argument_type_invalid() { +#[tokio::test] +async fn invalid_argument_type_invalid() { let input = include_str!("parse/fixtures/invalid-argument-type.invalid.graphql"); let expected = include_str!("parse/fixtures/invalid-argument-type.invalid.expected"); - test_fixture(transform_fixture, "invalid-argument-type.invalid.graphql", "parse/fixtures/invalid-argument-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-argument-type.invalid.graphql", "parse/fixtures/invalid-argument-type.invalid.expected", input, expected).await; } -#[test] -fn linked_field_with_no_selection_invalid() { +#[tokio::test] +async fn linked_field_with_no_selection_invalid() { let input = include_str!("parse/fixtures/linked-field-with-no-selection.invalid.graphql"); let expected = include_str!("parse/fixtures/linked-field-with-no-selection.invalid.expected"); - test_fixture(transform_fixture, "linked-field-with-no-selection.invalid.graphql", "parse/fixtures/linked-field-with-no-selection.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "linked-field-with-no-selection.invalid.graphql", "parse/fixtures/linked-field-with-no-selection.invalid.expected", input, expected).await; } -#[test] -fn linked_handle_field() { +#[tokio::test] +async fn linked_handle_field() { let input = include_str!("parse/fixtures/linked-handle-field.graphql"); let expected = include_str!("parse/fixtures/linked-handle-field.expected"); - test_fixture(transform_fixture, "linked-handle-field.graphql", "parse/fixtures/linked-handle-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "linked-handle-field.graphql", "parse/fixtures/linked-handle-field.expected", input, expected).await; } -#[test] -fn linked_handle_field_with_filters() { +#[tokio::test] +async fn linked_handle_field_with_filters() { let input = include_str!("parse/fixtures/linked-handle-field-with-filters.graphql"); let expected = include_str!("parse/fixtures/linked-handle-field-with-filters.expected"); - test_fixture(transform_fixture, "linked-handle-field-with-filters.graphql", "parse/fixtures/linked-handle-field-with-filters.expected", input, expected); + test_fixture(transform_fixture, file!(), "linked-handle-field-with-filters.graphql", "parse/fixtures/linked-handle-field-with-filters.expected", input, expected).await; } -#[test] -fn linked_handle_field_with_key() { +#[tokio::test] +async fn linked_handle_field_with_key() { let input = include_str!("parse/fixtures/linked-handle-field-with-key.graphql"); let expected = include_str!("parse/fixtures/linked-handle-field-with-key.expected"); - test_fixture(transform_fixture, "linked-handle-field-with-key.graphql", "parse/fixtures/linked-handle-field-with-key.expected", input, expected); + test_fixture(transform_fixture, file!(), "linked-handle-field-with-key.graphql", "parse/fixtures/linked-handle-field-with-key.expected", input, expected).await; } -#[test] -fn linked_handle_filter() { +#[tokio::test] +async fn linked_handle_filter() { let input = include_str!("parse/fixtures/linked-handle-filter.graphql"); let expected = include_str!("parse/fixtures/linked-handle-filter.expected"); - test_fixture(transform_fixture, "linked-handle-filter.graphql", "parse/fixtures/linked-handle-filter.expected", input, expected); + test_fixture(transform_fixture, file!(), "linked-handle-filter.graphql", "parse/fixtures/linked-handle-filter.expected", input, expected).await; } -#[test] -fn list_argument() { +#[tokio::test] +async fn list_argument() { let input = include_str!("parse/fixtures/list-argument.graphql"); let expected = include_str!("parse/fixtures/list-argument.expected"); - test_fixture(transform_fixture, "list-argument.graphql", "parse/fixtures/list-argument.expected", input, expected); + test_fixture(transform_fixture, file!(), "list-argument.graphql", "parse/fixtures/list-argument.expected", input, expected).await; } -#[test] -fn list_argument_complex_object() { +#[tokio::test] +async fn list_argument_complex_object() { let input = include_str!("parse/fixtures/list-argument-complex-object.graphql"); let expected = include_str!("parse/fixtures/list-argument-complex-object.expected"); - test_fixture(transform_fixture, "list-argument-complex-object.graphql", "parse/fixtures/list-argument-complex-object.expected", input, expected); + test_fixture(transform_fixture, file!(), "list-argument-complex-object.graphql", "parse/fixtures/list-argument-complex-object.expected", input, expected).await; } -#[test] -fn list_of_enums() { +#[tokio::test] +async fn list_of_enums() { let input = include_str!("parse/fixtures/list-of-enums.graphql"); let expected = include_str!("parse/fixtures/list-of-enums.expected"); - test_fixture(transform_fixture, "list-of-enums.graphql", "parse/fixtures/list-of-enums.expected", input, expected); + test_fixture(transform_fixture, file!(), "list-of-enums.graphql", "parse/fixtures/list-of-enums.expected", input, expected).await; } -#[test] -fn literal_list_argument() { +#[tokio::test] +async fn literal_list_argument() { let input = include_str!("parse/fixtures/literal-list-argument.graphql"); let expected = include_str!("parse/fixtures/literal-list-argument.expected"); - test_fixture(transform_fixture, "literal-list-argument.graphql", "parse/fixtures/literal-list-argument.expected", input, expected); + test_fixture(transform_fixture, file!(), "literal-list-argument.graphql", "parse/fixtures/literal-list-argument.expected", input, expected).await; } -#[test] -fn literal_list_argument_invalid() { +#[tokio::test] +async fn literal_list_argument_invalid() { let input = include_str!("parse/fixtures/literal-list-argument.invalid.graphql"); let expected = include_str!("parse/fixtures/literal-list-argument.invalid.expected"); - test_fixture(transform_fixture, "literal-list-argument.invalid.graphql", "parse/fixtures/literal-list-argument.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "literal-list-argument.invalid.graphql", "parse/fixtures/literal-list-argument.invalid.expected", input, expected).await; } -#[test] -fn literal_object_argument() { +#[tokio::test] +async fn literal_object_argument() { let input = include_str!("parse/fixtures/literal-object-argument.graphql"); let expected = include_str!("parse/fixtures/literal-object-argument.expected"); - test_fixture(transform_fixture, "literal-object-argument.graphql", "parse/fixtures/literal-object-argument.expected", input, expected); + test_fixture(transform_fixture, file!(), "literal-object-argument.graphql", "parse/fixtures/literal-object-argument.expected", input, expected).await; } -#[test] -fn literal_object_argument_invalid() { +#[tokio::test] +async fn literal_object_argument_invalid() { let input = include_str!("parse/fixtures/literal-object-argument.invalid.graphql"); let expected = include_str!("parse/fixtures/literal-object-argument.invalid.expected"); - test_fixture(transform_fixture, "literal-object-argument.invalid.graphql", "parse/fixtures/literal-object-argument.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "literal-object-argument.invalid.graphql", "parse/fixtures/literal-object-argument.invalid.expected", input, expected).await; } -#[test] -fn null_values() { +#[tokio::test] +async fn null_values() { let input = include_str!("parse/fixtures/null-values.graphql"); let expected = include_str!("parse/fixtures/null-values.expected"); - test_fixture(transform_fixture, "null-values.graphql", "parse/fixtures/null-values.expected", input, expected); + test_fixture(transform_fixture, file!(), "null-values.graphql", "parse/fixtures/null-values.expected", input, expected).await; } -#[test] -fn null_values_invalid() { +#[tokio::test] +async fn null_values_invalid() { let input = include_str!("parse/fixtures/null-values.invalid.graphql"); let expected = include_str!("parse/fixtures/null-values.invalid.expected"); - test_fixture(transform_fixture, "null-values.invalid.graphql", "parse/fixtures/null-values.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "null-values.invalid.graphql", "parse/fixtures/null-values.invalid.expected", input, expected).await; } -#[test] -fn object_argument() { +#[tokio::test] +async fn object_argument() { let input = include_str!("parse/fixtures/object-argument.graphql"); let expected = include_str!("parse/fixtures/object-argument.expected"); - test_fixture(transform_fixture, "object-argument.graphql", "parse/fixtures/object-argument.expected", input, expected); + test_fixture(transform_fixture, file!(), "object-argument.graphql", "parse/fixtures/object-argument.expected", input, expected).await; } -#[test] -fn query_with_argument_invalid() { +#[tokio::test] +async fn query_with_argument_invalid() { let input = include_str!("parse/fixtures/query-with-argument.invalid.graphql"); let expected = include_str!("parse/fixtures/query-with-argument.invalid.expected"); - test_fixture(transform_fixture, "query-with-argument.invalid.graphql", "parse/fixtures/query-with-argument.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-argument.invalid.graphql", "parse/fixtures/query-with-argument.invalid.expected", input, expected).await; } -#[test] -fn scalar_field_with_selection() { +#[tokio::test] +async fn scalar_field_with_selection() { let input = include_str!("parse/fixtures/scalar-field-with-selection.graphql"); let expected = include_str!("parse/fixtures/scalar-field-with-selection.expected"); - test_fixture(transform_fixture, "scalar-field-with-selection.graphql", "parse/fixtures/scalar-field-with-selection.expected", input, expected); + test_fixture(transform_fixture, file!(), "scalar-field-with-selection.graphql", "parse/fixtures/scalar-field-with-selection.expected", input, expected).await; } -#[test] -fn scalar_handle_field() { +#[tokio::test] +async fn scalar_handle_field() { let input = include_str!("parse/fixtures/scalar-handle-field.graphql"); let expected = include_str!("parse/fixtures/scalar-handle-field.expected"); - test_fixture(transform_fixture, "scalar-handle-field.graphql", "parse/fixtures/scalar-handle-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "scalar-handle-field.graphql", "parse/fixtures/scalar-handle-field.expected", input, expected).await; } -#[test] -fn simple_fragment() { +#[tokio::test] +async fn simple_fragment() { let input = include_str!("parse/fixtures/simple-fragment.graphql"); let expected = include_str!("parse/fixtures/simple-fragment.expected"); - test_fixture(transform_fixture, "simple-fragment.graphql", "parse/fixtures/simple-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "simple-fragment.graphql", "parse/fixtures/simple-fragment.expected", input, expected).await; } -#[test] -fn simple_query() { +#[tokio::test] +async fn simple_query() { let input = include_str!("parse/fixtures/simple-query.graphql"); let expected = include_str!("parse/fixtures/simple-query.expected"); - test_fixture(transform_fixture, "simple-query.graphql", "parse/fixtures/simple-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "simple-query.graphql", "parse/fixtures/simple-query.expected", input, expected).await; } -#[test] -fn subscription_with_multiple_selections_invalid() { +#[tokio::test] +async fn subscription_with_multiple_selections_invalid() { let input = include_str!("parse/fixtures/subscription-with-multiple-selections.invalid.graphql"); let expected = include_str!("parse/fixtures/subscription-with-multiple-selections.invalid.expected"); - test_fixture(transform_fixture, "subscription-with-multiple-selections.invalid.graphql", "parse/fixtures/subscription-with-multiple-selections.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "subscription-with-multiple-selections.invalid.graphql", "parse/fixtures/subscription-with-multiple-selections.invalid.expected", input, expected).await; } -#[test] -fn typename_with_arguments_invalid() { +#[tokio::test] +async fn typename_with_arguments_invalid() { let input = include_str!("parse/fixtures/typename_with_arguments.invalid.graphql"); let expected = include_str!("parse/fixtures/typename_with_arguments.invalid.expected"); - test_fixture(transform_fixture, "typename_with_arguments.invalid.graphql", "parse/fixtures/typename_with_arguments.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "typename_with_arguments.invalid.graphql", "parse/fixtures/typename_with_arguments.invalid.expected", input, expected).await; } -#[test] -fn undefined_fragment_but_close_invalid() { +#[tokio::test] +async fn undefined_fragment_but_close_invalid() { let input = include_str!("parse/fixtures/undefined-fragment-but-close.invalid.graphql"); let expected = include_str!("parse/fixtures/undefined-fragment-but-close.invalid.expected"); - test_fixture(transform_fixture, "undefined-fragment-but-close.invalid.graphql", "parse/fixtures/undefined-fragment-but-close.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "undefined-fragment-but-close.invalid.graphql", "parse/fixtures/undefined-fragment-but-close.invalid.expected", input, expected).await; } -#[test] -fn undefined_fragment_invalid() { +#[tokio::test] +async fn undefined_fragment_invalid() { let input = include_str!("parse/fixtures/undefined-fragment.invalid.graphql"); let expected = include_str!("parse/fixtures/undefined-fragment.invalid.expected"); - test_fixture(transform_fixture, "undefined-fragment.invalid.graphql", "parse/fixtures/undefined-fragment.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "undefined-fragment.invalid.graphql", "parse/fixtures/undefined-fragment.invalid.expected", input, expected).await; } -#[test] -fn undefined_type_invalid() { +#[tokio::test] +async fn undefined_type_invalid() { let input = include_str!("parse/fixtures/undefined-type.invalid.graphql"); let expected = include_str!("parse/fixtures/undefined-type.invalid.expected"); - test_fixture(transform_fixture, "undefined-type.invalid.graphql", "parse/fixtures/undefined-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "undefined-type.invalid.graphql", "parse/fixtures/undefined-type.invalid.expected", input, expected).await; } -#[test] -fn unknown_field_invalid() { +#[tokio::test] +async fn unknown_field_invalid() { let input = include_str!("parse/fixtures/unknown_field.invalid.graphql"); let expected = include_str!("parse/fixtures/unknown_field.invalid.expected"); - test_fixture(transform_fixture, "unknown_field.invalid.graphql", "parse/fixtures/unknown_field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unknown_field.invalid.graphql", "parse/fixtures/unknown_field.invalid.expected", input, expected).await; } -#[test] -fn unknown_fragment_type_invalid() { +#[tokio::test] +async fn unknown_fragment_type_invalid() { let input = include_str!("parse/fixtures/unknown-fragment-type.invalid.graphql"); let expected = include_str!("parse/fixtures/unknown-fragment-type.invalid.expected"); - test_fixture(transform_fixture, "unknown-fragment-type.invalid.graphql", "parse/fixtures/unknown-fragment-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unknown-fragment-type.invalid.graphql", "parse/fixtures/unknown-fragment-type.invalid.expected", input, expected).await; } -#[test] -fn unknown_fragment_type_suggestions_invalid() { +#[tokio::test] +async fn unknown_fragment_type_suggestions_invalid() { let input = include_str!("parse/fixtures/unknown-fragment-type-suggestions.invalid.graphql"); let expected = include_str!("parse/fixtures/unknown-fragment-type-suggestions.invalid.expected"); - test_fixture(transform_fixture, "unknown-fragment-type-suggestions.invalid.graphql", "parse/fixtures/unknown-fragment-type-suggestions.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unknown-fragment-type-suggestions.invalid.graphql", "parse/fixtures/unknown-fragment-type-suggestions.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn variable_with_default() { + let input = include_str!("parse/fixtures/variable_with_default.graphql"); + let expected = include_str!("parse/fixtures/variable_with_default.expected"); + test_fixture(transform_fixture, file!(), "variable_with_default.graphql", "parse/fixtures/variable_with_default.expected", input, expected).await; } diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions.rs b/compiler/crates/graphql-ir/tests/parse_with_extensions.rs new file mode 100644 index 0000000000000..ec75bf03a9aa7 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions.rs @@ -0,0 +1,60 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use fnv::FnvHashMap; +use graphql_cli::DiagnosticPrinter; +use graphql_ir::build_ir_with_extra_features; +use graphql_ir::BuilderOptions; +use graphql_ir::FragmentVariablesSemantic; +use graphql_syntax::parse_executable; +use relay_test_schema::get_test_schema_with_extensions; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let mut sources = FnvHashMap::default(); + sources.insert( + SourceLocationKey::standalone(fixture.file_name), + fixture.content, + ); + + let allow_custom_scalar_literals = !fixture.content.contains("relay:no_custom_scalar_literals"); + + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + if let [base, extensions] = parts.as_slice() { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(base, source_location).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + build_ir_with_extra_features( + &schema, + &ast.definitions, + &BuilderOptions { + allow_undefined_fragment_spreads: false, + fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, + relay_mode: None, + default_anonymous_operation_name: None, + allow_custom_scalar_literals, + }, + ) + .map(|x| format!("{:#?}", x)) + .map_err(|errors| { + errors + .into_iter() + .map(|error| { + let printer = DiagnosticPrinter::new(|_| { + Some(TextSource::from_whole_document(fixture.content.to_string())) + }); + printer.diagnostic_to_string(&error) + }) + .collect::>() + .join("\n\n") + }) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/client-fields.expected b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/client-fields.expected index fbd2485276e96..17ae6b4b06c31 100644 --- a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/client-fields.expected +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/client-fields.expected @@ -135,14 +135,14 @@ type Foo { }, variable_definitions: [], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ ScalarField { alias: None, definition: WithLocation { location: client-fields.graphql:171:173, - item: FieldID(460), + item: FieldID(462), }, arguments: [], directives: [], @@ -151,7 +151,7 @@ type Foo { alias: None, definition: WithLocation { location: client-fields.graphql:226:238, - item: FieldID(517), + item: FieldID(526), }, arguments: [], directives: [], @@ -160,7 +160,7 @@ type Foo { alias: None, definition: WithLocation { location: client-fields.graphql:245:250, - item: FieldID(174), + item: FieldID(177), }, arguments: [], directives: [], @@ -169,7 +169,7 @@ type Foo { alias: None, definition: WithLocation { location: client-fields.graphql:259:265, - item: FieldID(176), + item: FieldID(179), }, arguments: [], directives: [], @@ -178,7 +178,7 @@ type Foo { alias: None, definition: WithLocation { location: client-fields.graphql:272:276, - item: FieldID(177), + item: FieldID(180), }, arguments: [], directives: [], @@ -187,7 +187,7 @@ type Foo { alias: None, definition: WithLocation { location: client-fields.graphql:287:289, - item: FieldID(460), + item: FieldID(462), }, arguments: [], directives: [], @@ -200,7 +200,7 @@ type Foo { alias: None, definition: WithLocation { location: client-fields.graphql:308:316, - item: FieldID(175), + item: FieldID(178), }, arguments: [], directives: [], @@ -209,7 +209,7 @@ type Foo { alias: None, definition: WithLocation { location: client-fields.graphql:325:336, - item: FieldID(294), + item: FieldID(297), }, arguments: [], directives: [], @@ -220,7 +220,7 @@ type Foo { }, InlineFragment { type_condition: Some( - Object(69), + Object(70), ), directives: [], selections: [ @@ -228,7 +228,7 @@ type Foo { alias: None, definition: WithLocation { location: client-fields.graphql:367:370, - item: FieldID(518), + item: FieldID(527), }, arguments: [], directives: [], @@ -245,7 +245,7 @@ type Foo { }, InlineFragment { type_condition: Some( - Object(78), + Object(82), ), directives: [], selections: [ @@ -253,7 +253,7 @@ type Foo { alias: None, definition: WithLocation { location: client-fields.graphql:470:472, - item: FieldID(519), + item: FieldID(528), }, arguments: [], directives: [], @@ -279,14 +279,14 @@ type Foo { }, variable_definitions: [], used_global_variables: [], - type_condition: Object(78), + type_condition: Object(82), directives: [], selections: [ ScalarField { alias: None, definition: WithLocation { location: client-fields.graphql:526:528, - item: FieldID(519), + item: FieldID(528), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_directive_arg.invalid.expected b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_directive_arg.invalid.expected new file mode 100644 index 0000000000000..d93155f3ca7a3 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_directive_arg.invalid.expected @@ -0,0 +1,42 @@ +==================================== INPUT ==================================== +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + extension_field @customScalarDirective(arg: "foo") { + __typename + } + extension_scalar_field @customScalarDirective(arg: "bar") +} + +# %extensions% + +scalar CustomScalarType + +directive @customScalarDirective(arg: CustomScalarType!) on FIELD + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field: Obj + extension_scalar_field: Int +} +==================================== ERROR ==================================== +✖︎ Unexpected scalar literal `"foo"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_directive_arg.invalid.graphql:5:49 + 4 │ query CustomScalarLiteralArgQuery { + 5 │ extension_field @customScalarDirective(arg: "foo") { + │ ^^^^^ + 6 │ __typename + + +✖︎ Unexpected scalar literal `"bar"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_directive_arg.invalid.graphql:8:56 + 7 │ } + 8 │ extension_scalar_field @customScalarDirective(arg: "bar") + │ ^^^^^ + 9 │ } diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_directive_arg.invalid.graphql b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_directive_arg.invalid.graphql new file mode 100644 index 0000000000000..85cba6cf7edb4 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_directive_arg.invalid.graphql @@ -0,0 +1,24 @@ +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + extension_field @customScalarDirective(arg: "foo") { + __typename + } + extension_scalar_field @customScalarDirective(arg: "bar") +} + +# %extensions% + +scalar CustomScalarType + +directive @customScalarDirective(arg: CustomScalarType!) on FIELD + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field: Obj + extension_scalar_field: Int +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_directive_arg_variable.expected b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_directive_arg_variable.expected new file mode 100644 index 0000000000000..6c06fa5b2c02b --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_directive_arg_variable.expected @@ -0,0 +1,164 @@ +==================================== INPUT ==================================== +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery($var: CustomScalarType!) { + extension_field @customScalarDirective(arg: $var) { + __typename + } + extension_scalar_field @customScalarDirective(arg: $var) +} + +# %extensions% + +scalar CustomScalarType + +directive @customScalarDirective(arg: CustomScalarType!) on FIELD + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field: Obj + extension_scalar_field: Int +} +==================================== OUTPUT =================================== +[ + Operation( + OperationDefinition { + kind: Query, + name: WithLocation { + location: custom_scalar_directive_arg_variable.graphql:41:68, + item: OperationDefinitionName( + "CustomScalarLiteralArgQuery", + ), + }, + type_: Object(0), + variable_definitions: [ + VariableDefinition { + name: WithLocation { + location: custom_scalar_directive_arg_variable.graphql:69:73, + item: VariableName( + "var", + ), + }, + type_: NonNull( + Named( + Scalar(8), + ), + ), + default_value: None, + directives: [], + }, + ], + directives: [], + selections: [ + LinkedField { + alias: None, + definition: WithLocation { + location: custom_scalar_directive_arg_variable.graphql:100:115, + item: FieldID(527), + }, + arguments: [], + directives: [ + Directive { + name: WithLocation { + location: custom_scalar_directive_arg_variable.graphql:116:138, + item: DirectiveName( + "customScalarDirective", + ), + }, + arguments: [ + Argument { + name: WithLocation { + location: custom_scalar_directive_arg_variable.graphql:139:142, + item: ArgumentName( + "arg", + ), + }, + value: WithLocation { + location: custom_scalar_directive_arg_variable.graphql:144:148, + item: Variable( + Variable { + name: WithLocation { + location: custom_scalar_directive_arg_variable.graphql:144:148, + item: VariableName( + "var", + ), + }, + type_: NonNull( + Named( + Scalar(8), + ), + ), + }, + ), + }, + }, + ], + data: None, + }, + ], + selections: [ + ScalarField { + alias: None, + definition: WithLocation { + location: custom_scalar_directive_arg_variable.graphql:160:170, + item: FieldID(529), + }, + arguments: [], + directives: [], + }, + ], + }, + ScalarField { + alias: None, + definition: WithLocation { + location: custom_scalar_directive_arg_variable.graphql:181:203, + item: FieldID(528), + }, + arguments: [], + directives: [ + Directive { + name: WithLocation { + location: custom_scalar_directive_arg_variable.graphql:204:226, + item: DirectiveName( + "customScalarDirective", + ), + }, + arguments: [ + Argument { + name: WithLocation { + location: custom_scalar_directive_arg_variable.graphql:227:230, + item: ArgumentName( + "arg", + ), + }, + value: WithLocation { + location: custom_scalar_directive_arg_variable.graphql:232:236, + item: Variable( + Variable { + name: WithLocation { + location: custom_scalar_directive_arg_variable.graphql:232:236, + item: VariableName( + "var", + ), + }, + type_: NonNull( + Named( + Scalar(8), + ), + ), + }, + ), + }, + }, + ], + data: None, + }, + ], + }, + ], + }, + ), +] diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_directive_arg_variable.graphql b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_directive_arg_variable.graphql new file mode 100644 index 0000000000000..d3878c9dae69e --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_directive_arg_variable.graphql @@ -0,0 +1,23 @@ +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery($var: CustomScalarType!) { + extension_field @customScalarDirective(arg: $var) { + __typename + } + extension_scalar_field @customScalarDirective(arg: $var) +} + +# %extensions% + +scalar CustomScalarType + +directive @customScalarDirective(arg: CustomScalarType!) on FIELD + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field: Obj + extension_scalar_field: Int +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_literal_arg.invalid.expected b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_literal_arg.invalid.expected new file mode 100644 index 0000000000000..0e20aa6c36326 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_literal_arg.invalid.expected @@ -0,0 +1,40 @@ +==================================== INPUT ==================================== +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + extension_field(custom_scalar_arg: ["1234", "5678"]) { + __typename + } + extension_scalar_field(custom_scalar_arg: []) +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: CustomScalarType!): Obj + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int +} +==================================== ERROR ==================================== +✖︎ Unexpected list literal provided in a position expecting custom scalar type `CustomScalarType`. + + custom_scalar_list_literal_arg.invalid.graphql:5:40 + 4 │ query CustomScalarLiteralArgQuery { + 5 │ extension_field(custom_scalar_arg: ["1234", "5678"]) { + │ ^^^^^^^^^^^^^^^^ + 6 │ __typename + + +✖︎ Unexpected list literal provided in a position expecting custom scalar type `CustomScalarType`. + + custom_scalar_list_literal_arg.invalid.graphql:8:47 + 7 │ } + 8 │ extension_scalar_field(custom_scalar_arg: []) + │ ^^ + 9 │ } diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_literal_arg.invalid.graphql b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_literal_arg.invalid.graphql new file mode 100644 index 0000000000000..a8b0604e133a9 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_literal_arg.invalid.graphql @@ -0,0 +1,22 @@ +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + extension_field(custom_scalar_arg: ["1234", "5678"]) { + __typename + } + extension_scalar_field(custom_scalar_arg: []) +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: CustomScalarType!): Obj + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_mixed_arg.invalid.expected b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_mixed_arg.invalid.expected new file mode 100644 index 0000000000000..21a643a27c07f --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_mixed_arg.invalid.expected @@ -0,0 +1,40 @@ +==================================== INPUT ==================================== +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery($some_arg: CustomScalarType!) { + extension_field(custom_scalar_arg: ["1234", $some_arg]) { + __typename + } + extension_scalar_field(custom_scalar_arg: ["1234", $some_arg]) +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: [CustomScalarType!]!): Obj + extension_scalar_field(custom_scalar_arg: [CustomScalarType!]!): Int +} +==================================== ERROR ==================================== +✖︎ Unexpected scalar literal `"1234"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_list_mixed_arg.invalid.graphql:5:41 + 4 │ query CustomScalarLiteralArgQuery($some_arg: CustomScalarType!) { + 5 │ extension_field(custom_scalar_arg: ["1234", $some_arg]) { + │ ^^^^^^ + 6 │ __typename + + +✖︎ Unexpected scalar literal `"1234"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_list_mixed_arg.invalid.graphql:8:48 + 7 │ } + 8 │ extension_scalar_field(custom_scalar_arg: ["1234", $some_arg]) + │ ^^^^^^ + 9 │ } diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_mixed_arg.invalid.graphql b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_mixed_arg.invalid.graphql new file mode 100644 index 0000000000000..f858a7961713b --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_mixed_arg.invalid.graphql @@ -0,0 +1,22 @@ +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery($some_arg: CustomScalarType!) { + extension_field(custom_scalar_arg: ["1234", $some_arg]) { + __typename + } + extension_scalar_field(custom_scalar_arg: ["1234", $some_arg]) +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: [CustomScalarType!]!): Obj + extension_scalar_field(custom_scalar_arg: [CustomScalarType!]!): Int +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_other_literal_args.invalid.expected b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_other_literal_args.invalid.expected new file mode 100644 index 0000000000000..dec084e274014 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_other_literal_args.invalid.expected @@ -0,0 +1,83 @@ +==================================== INPUT ==================================== +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + null_arg: extension_scalar_field(custom_scalar_arg: null) + bool_arg: extension_scalar_field(custom_scalar_arg: true) + int_arg: extension_scalar_field(custom_scalar_arg: 123) + float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + enum_arg: extension_scalar_field(custom_scalar_arg: FOO) +} + +# %extensions% + +scalar CustomScalarType + +enum TestEnum { + FOO + BAR + BAZ +} + +type Obj { + some_key: Int! +} + +extend type Query { + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int +} +==================================== ERROR ==================================== +✖︎ Expected a value of type 'CustomScalarType' + + custom_scalar_list_other_literal_args.invalid.graphql:5:57 + 4 │ query CustomScalarLiteralArgQuery { + 5 │ null_arg: extension_scalar_field(custom_scalar_arg: null) + │ ^^^^ + 6 │ bool_arg: extension_scalar_field(custom_scalar_arg: true) + + +✖︎ Unexpected scalar literal `true` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_list_other_literal_args.invalid.graphql:6:57 + 5 │ null_arg: extension_scalar_field(custom_scalar_arg: null) + 6 │ bool_arg: extension_scalar_field(custom_scalar_arg: true) + │ ^^^^ + 7 │ int_arg: extension_scalar_field(custom_scalar_arg: 123) + + +✖︎ Unexpected scalar literal `123` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_list_other_literal_args.invalid.graphql:7:56 + 6 │ bool_arg: extension_scalar_field(custom_scalar_arg: true) + 7 │ int_arg: extension_scalar_field(custom_scalar_arg: 123) + │ ^^^ + 8 │ float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + + +✖︎ Unexpected scalar literal `3.14` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_list_other_literal_args.invalid.graphql:8:58 + 7 │ int_arg: extension_scalar_field(custom_scalar_arg: 123) + 8 │ float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + │ ^^^^ + 9 │ string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + + +✖︎ Unexpected scalar literal `"asdf"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_list_other_literal_args.invalid.graphql:9:59 + 8 │ float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + 9 │ string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + │ ^^^^^^ + 10 │ enum_arg: extension_scalar_field(custom_scalar_arg: FOO) + + +✖︎ Unexpected enum literal provided in a position expecting custom scalar type `CustomScalarType`. + + custom_scalar_list_other_literal_args.invalid.graphql:10:57 + 9 │ string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + 10 │ enum_arg: extension_scalar_field(custom_scalar_arg: FOO) + │ ^^^ + 11 │ } diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_other_literal_args.invalid.graphql b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_other_literal_args.invalid.graphql new file mode 100644 index 0000000000000..4742d685e59a4 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_list_other_literal_args.invalid.graphql @@ -0,0 +1,29 @@ +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + null_arg: extension_scalar_field(custom_scalar_arg: null) + bool_arg: extension_scalar_field(custom_scalar_arg: true) + int_arg: extension_scalar_field(custom_scalar_arg: 123) + float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + enum_arg: extension_scalar_field(custom_scalar_arg: FOO) +} + +# %extensions% + +scalar CustomScalarType + +enum TestEnum { + FOO + BAR + BAZ +} + +type Obj { + some_key: Int! +} + +extend type Query { + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_literal_arg.invalid.expected b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_literal_arg.invalid.expected new file mode 100644 index 0000000000000..8b038dfe7ba60 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_literal_arg.invalid.expected @@ -0,0 +1,40 @@ +==================================== INPUT ==================================== +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + extension_field(custom_scalar_arg: "1234") { + __typename + } + extension_scalar_field(custom_scalar_arg: "1234") +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: CustomScalarType!): Obj + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int +} +==================================== ERROR ==================================== +✖︎ Unexpected scalar literal `"1234"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_literal_arg.invalid.graphql:5:40 + 4 │ query CustomScalarLiteralArgQuery { + 5 │ extension_field(custom_scalar_arg: "1234") { + │ ^^^^^^ + 6 │ __typename + + +✖︎ Unexpected scalar literal `"1234"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_literal_arg.invalid.graphql:8:47 + 7 │ } + 8 │ extension_scalar_field(custom_scalar_arg: "1234") + │ ^^^^^^ + 9 │ } diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_literal_arg.invalid.graphql b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_literal_arg.invalid.graphql new file mode 100644 index 0000000000000..cdb972607c45d --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_literal_arg.invalid.graphql @@ -0,0 +1,22 @@ +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + extension_field(custom_scalar_arg: "1234") { + __typename + } + extension_scalar_field(custom_scalar_arg: "1234") +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: CustomScalarType!): Obj + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_literal_arg_nested.invalid.expected b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_literal_arg_nested.invalid.expected new file mode 100644 index 0000000000000..cce4d6da21304 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_literal_arg_nested.invalid.expected @@ -0,0 +1,30 @@ +==================================== INPUT ==================================== +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + extension_field { + scalar_field(custom_scalar_arg: "1234") + } +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! + scalar_field(custom_scalar_arg: CustomScalarType!): Int +} + +extend type Query { + extension_field: Obj +} +==================================== ERROR ==================================== +✖︎ Unexpected scalar literal `"1234"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_literal_arg_nested.invalid.graphql:6:41 + 5 │ extension_field { + 6 │ scalar_field(custom_scalar_arg: "1234") + │ ^^^^^^ + 7 │ } diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_literal_arg_nested.invalid.graphql b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_literal_arg_nested.invalid.graphql new file mode 100644 index 0000000000000..b65fc501fd814 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_literal_arg_nested.invalid.graphql @@ -0,0 +1,21 @@ +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + extension_field { + scalar_field(custom_scalar_arg: "1234") + } +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! + scalar_field(custom_scalar_arg: CustomScalarType!): Int +} + +extend type Query { + extension_field: Obj +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_object_literal_arg.invalid.expected b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_object_literal_arg.invalid.expected new file mode 100644 index 0000000000000..53efdb1e5615b --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_object_literal_arg.invalid.expected @@ -0,0 +1,68 @@ +==================================== INPUT ==================================== +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + extension_field(input: { + a: "foo", + b: 123, + }) { + __typename + } + extension_scalar_field(input: { + a: { + foo: 123, + bar: 456, + }, + }) +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +input InputType { + a: CustomScalarType! + b: CustomScalarType +} + +extend type Query { + extension_field(input: InputType!): Obj + extension_scalar_field(input: InputType!): Int +} +==================================== ERROR ==================================== +✖︎ Unexpected scalar literal `"foo"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_object_literal_arg.invalid.graphql:6:12 + 5 │ extension_field(input: { + 6 │ a: "foo", + │ ^^^^^ + 7 │ b: 123, + + +✖︎ Unexpected scalar literal `123` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_object_literal_arg.invalid.graphql:7:12 + 6 │ a: "foo", + 7 │ b: 123, + │ ^^^ + 8 │ }) { + + +✖︎ Unexpected object literal provided in a position expecting custom scalar type `CustomScalarType`. + + custom_scalar_object_literal_arg.invalid.graphql:12:12 + 11 │ extension_scalar_field(input: { + 12 │ a: { + │ ^ + 13 │ foo: 123, + │ ^^^^^^^^^^^^^^^^^^^^^ + 14 │ bar: 456, + │ ^^^^^^^^^^^^^^^^^^^^^ + 15 │ }, + │ ^^^^^^^^^ + 16 │ }) diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_object_literal_arg.invalid.graphql b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_object_literal_arg.invalid.graphql new file mode 100644 index 0000000000000..e3ee120c2247e --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_object_literal_arg.invalid.graphql @@ -0,0 +1,35 @@ +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + extension_field(input: { + a: "foo", + b: 123, + }) { + __typename + } + extension_scalar_field(input: { + a: { + foo: 123, + bar: 456, + }, + }) +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +input InputType { + a: CustomScalarType! + b: CustomScalarType +} + +extend type Query { + extension_field(input: InputType!): Obj + extension_scalar_field(input: InputType!): Int +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_other_literal_args.invalid.expected b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_other_literal_args.invalid.expected new file mode 100644 index 0000000000000..5904edb575875 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_other_literal_args.invalid.expected @@ -0,0 +1,83 @@ +==================================== INPUT ==================================== +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + null_arg: extension_scalar_field(custom_scalar_arg: null) + bool_arg: extension_scalar_field(custom_scalar_arg: true) + int_arg: extension_scalar_field(custom_scalar_arg: 123) + float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + enum_arg: extension_scalar_field(custom_scalar_arg: FOO) +} + +# %extensions% + +scalar CustomScalarType + +enum TestEnum { + FOO + BAR + BAZ +} + +type Obj { + some_key: Int! +} + +extend type Query { + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int +} +==================================== ERROR ==================================== +✖︎ Expected a value of type 'CustomScalarType' + + custom_scalar_other_literal_args.invalid.graphql:5:57 + 4 │ query CustomScalarLiteralArgQuery { + 5 │ null_arg: extension_scalar_field(custom_scalar_arg: null) + │ ^^^^ + 6 │ bool_arg: extension_scalar_field(custom_scalar_arg: true) + + +✖︎ Unexpected scalar literal `true` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_other_literal_args.invalid.graphql:6:57 + 5 │ null_arg: extension_scalar_field(custom_scalar_arg: null) + 6 │ bool_arg: extension_scalar_field(custom_scalar_arg: true) + │ ^^^^ + 7 │ int_arg: extension_scalar_field(custom_scalar_arg: 123) + + +✖︎ Unexpected scalar literal `123` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_other_literal_args.invalid.graphql:7:56 + 6 │ bool_arg: extension_scalar_field(custom_scalar_arg: true) + 7 │ int_arg: extension_scalar_field(custom_scalar_arg: 123) + │ ^^^ + 8 │ float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + + +✖︎ Unexpected scalar literal `3.14` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_other_literal_args.invalid.graphql:8:58 + 7 │ int_arg: extension_scalar_field(custom_scalar_arg: 123) + 8 │ float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + │ ^^^^ + 9 │ string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + + +✖︎ Unexpected scalar literal `"asdf"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_other_literal_args.invalid.graphql:9:59 + 8 │ float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + 9 │ string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + │ ^^^^^^ + 10 │ enum_arg: extension_scalar_field(custom_scalar_arg: FOO) + + +✖︎ Unexpected enum literal provided in a position expecting custom scalar type `CustomScalarType`. + + custom_scalar_other_literal_args.invalid.graphql:10:57 + 9 │ string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + 10 │ enum_arg: extension_scalar_field(custom_scalar_arg: FOO) + │ ^^^ + 11 │ } diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_other_literal_args.invalid.graphql b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_other_literal_args.invalid.graphql new file mode 100644 index 0000000000000..4742d685e59a4 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_other_literal_args.invalid.graphql @@ -0,0 +1,29 @@ +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + null_arg: extension_scalar_field(custom_scalar_arg: null) + bool_arg: extension_scalar_field(custom_scalar_arg: true) + int_arg: extension_scalar_field(custom_scalar_arg: 123) + float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + enum_arg: extension_scalar_field(custom_scalar_arg: FOO) +} + +# %extensions% + +scalar CustomScalarType + +enum TestEnum { + FOO + BAR + BAZ +} + +type Obj { + some_key: Int! +} + +extend type Query { + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_variable_arg.expected b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_variable_arg.expected new file mode 100644 index 0000000000000..56f54cf5f435b --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_variable_arg.expected @@ -0,0 +1,140 @@ +==================================== INPUT ==================================== +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery($arg: CustomScalarType!) { + extension_field(custom_scalar_arg: $arg) { + __typename + } + extension_scalar_field(custom_scalar_arg: $arg) +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: CustomScalarType!): Obj + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int +} +==================================== OUTPUT =================================== +[ + Operation( + OperationDefinition { + kind: Query, + name: WithLocation { + location: custom_scalar_variable_arg.graphql:41:68, + item: OperationDefinitionName( + "CustomScalarLiteralArgQuery", + ), + }, + type_: Object(0), + variable_definitions: [ + VariableDefinition { + name: WithLocation { + location: custom_scalar_variable_arg.graphql:69:73, + item: VariableName( + "arg", + ), + }, + type_: NonNull( + Named( + Scalar(8), + ), + ), + default_value: None, + directives: [], + }, + ], + directives: [], + selections: [ + LinkedField { + alias: None, + definition: WithLocation { + location: custom_scalar_variable_arg.graphql:100:115, + item: FieldID(527), + }, + arguments: [ + Argument { + name: WithLocation { + location: custom_scalar_variable_arg.graphql:116:133, + item: ArgumentName( + "custom_scalar_arg", + ), + }, + value: WithLocation { + location: custom_scalar_variable_arg.graphql:135:139, + item: Variable( + Variable { + name: WithLocation { + location: custom_scalar_variable_arg.graphql:135:139, + item: VariableName( + "arg", + ), + }, + type_: NonNull( + Named( + Scalar(8), + ), + ), + }, + ), + }, + }, + ], + directives: [], + selections: [ + ScalarField { + alias: None, + definition: WithLocation { + location: custom_scalar_variable_arg.graphql:151:161, + item: FieldID(529), + }, + arguments: [], + directives: [], + }, + ], + }, + ScalarField { + alias: None, + definition: WithLocation { + location: custom_scalar_variable_arg.graphql:172:194, + item: FieldID(528), + }, + arguments: [ + Argument { + name: WithLocation { + location: custom_scalar_variable_arg.graphql:195:212, + item: ArgumentName( + "custom_scalar_arg", + ), + }, + value: WithLocation { + location: custom_scalar_variable_arg.graphql:214:218, + item: Variable( + Variable { + name: WithLocation { + location: custom_scalar_variable_arg.graphql:214:218, + item: VariableName( + "arg", + ), + }, + type_: NonNull( + Named( + Scalar(8), + ), + ), + }, + ), + }, + }, + ], + directives: [], + }, + ], + }, + ), +] diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_variable_arg.graphql b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_variable_arg.graphql new file mode 100644 index 0000000000000..52c0e071fe49f --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_variable_arg.graphql @@ -0,0 +1,21 @@ +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery($arg: CustomScalarType!) { + extension_field(custom_scalar_arg: $arg) { + __typename + } + extension_scalar_field(custom_scalar_arg: $arg) +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: CustomScalarType!): Obj + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_variable_default_arg.invalid.expected b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_variable_default_arg.invalid.expected new file mode 100644 index 0000000000000..41feb92e4b749 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_variable_default_arg.invalid.expected @@ -0,0 +1,31 @@ +==================================== INPUT ==================================== +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery($arg: CustomScalarType! = "foobar") { + extension_field(custom_scalar_arg: $arg) { + __typename + } + extension_scalar_field(custom_scalar_arg: $arg) +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: CustomScalarType!): Obj + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int +} +==================================== ERROR ==================================== +✖︎ Unexpected scalar literal `"foobar"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + custom_scalar_variable_default_arg.invalid.graphql:4:61 + 3 │ + 4 │ query CustomScalarLiteralArgQuery($arg: CustomScalarType! = "foobar") { + │ ^^^^^^^^ + 5 │ extension_field(custom_scalar_arg: $arg) { diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_variable_default_arg.invalid.graphql b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_variable_default_arg.invalid.graphql new file mode 100644 index 0000000000000..ff62476eae543 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_scalar_variable_default_arg.invalid.graphql @@ -0,0 +1,22 @@ +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery($arg: CustomScalarType! = "foobar") { + extension_field(custom_scalar_arg: $arg) { + __typename + } + extension_scalar_field(custom_scalar_arg: $arg) +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: CustomScalarType!): Obj + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_server_scalar_literal_args.invalid.expected b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_server_scalar_literal_args.invalid.expected new file mode 100644 index 0000000000000..bd0ce4734159a --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_server_scalar_literal_args.invalid.expected @@ -0,0 +1,102 @@ +==================================== INPUT ==================================== +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + null_arg: extension_scalar_field(custom_scalar_arg: null) + bool_arg: extension_scalar_field(custom_scalar_arg: true) + int_arg: extension_scalar_field(custom_scalar_arg: 123) + float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + enum_arg: extension_scalar_field(custom_scalar_arg: FOO) + list_arg: extension_scalar_field(custom_scalar_arg: [1, 2, 3]) + object_arg: extension_scalar_field(custom_scalar_arg: {a: 1, b: "foo"}) +} + +# %extensions% + +enum TestEnum { + FOO + BAR + BAZ +} + +type Obj { + some_key: Int! +} + +extend type Query { + # JSON is a custom scalar defined in the server schema + extension_scalar_field(custom_scalar_arg: JSON!): Int +} +==================================== ERROR ==================================== +✖︎ Expected a value of type 'JSON' + + custom_server_scalar_literal_args.invalid.graphql:5:57 + 4 │ query CustomScalarLiteralArgQuery { + 5 │ null_arg: extension_scalar_field(custom_scalar_arg: null) + │ ^^^^ + 6 │ bool_arg: extension_scalar_field(custom_scalar_arg: true) + + +✖︎ Unexpected scalar literal `true` provided in a position expecting custom scalar type `JSON`. This value should come from a variable. + + custom_server_scalar_literal_args.invalid.graphql:6:57 + 5 │ null_arg: extension_scalar_field(custom_scalar_arg: null) + 6 │ bool_arg: extension_scalar_field(custom_scalar_arg: true) + │ ^^^^ + 7 │ int_arg: extension_scalar_field(custom_scalar_arg: 123) + + +✖︎ Unexpected scalar literal `123` provided in a position expecting custom scalar type `JSON`. This value should come from a variable. + + custom_server_scalar_literal_args.invalid.graphql:7:56 + 6 │ bool_arg: extension_scalar_field(custom_scalar_arg: true) + 7 │ int_arg: extension_scalar_field(custom_scalar_arg: 123) + │ ^^^ + 8 │ float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + + +✖︎ Unexpected scalar literal `3.14` provided in a position expecting custom scalar type `JSON`. This value should come from a variable. + + custom_server_scalar_literal_args.invalid.graphql:8:58 + 7 │ int_arg: extension_scalar_field(custom_scalar_arg: 123) + 8 │ float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + │ ^^^^ + 9 │ string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + + +✖︎ Unexpected scalar literal `"asdf"` provided in a position expecting custom scalar type `JSON`. This value should come from a variable. + + custom_server_scalar_literal_args.invalid.graphql:9:59 + 8 │ float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + 9 │ string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + │ ^^^^^^ + 10 │ enum_arg: extension_scalar_field(custom_scalar_arg: FOO) + + +✖︎ Unexpected enum literal provided in a position expecting custom scalar type `JSON`. + + custom_server_scalar_literal_args.invalid.graphql:10:57 + 9 │ string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + 10 │ enum_arg: extension_scalar_field(custom_scalar_arg: FOO) + │ ^^^ + 11 │ list_arg: extension_scalar_field(custom_scalar_arg: [1, 2, 3]) + + +✖︎ Unexpected list literal provided in a position expecting custom scalar type `JSON`. + + custom_server_scalar_literal_args.invalid.graphql:11:57 + 10 │ enum_arg: extension_scalar_field(custom_scalar_arg: FOO) + 11 │ list_arg: extension_scalar_field(custom_scalar_arg: [1, 2, 3]) + │ ^^^^^^^^^ + 12 │ object_arg: extension_scalar_field(custom_scalar_arg: {a: 1, b: "foo"}) + + +✖︎ Unexpected object literal provided in a position expecting custom scalar type `JSON`. + + custom_server_scalar_literal_args.invalid.graphql:12:59 + 11 │ list_arg: extension_scalar_field(custom_scalar_arg: [1, 2, 3]) + 12 │ object_arg: extension_scalar_field(custom_scalar_arg: {a: 1, b: "foo"}) + │ ^^^^^^^^^^^^^^^^ + 13 │ } diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_server_scalar_literal_args.invalid.graphql b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_server_scalar_literal_args.invalid.graphql new file mode 100644 index 0000000000000..d1c05cabb7ffd --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/custom_server_scalar_literal_args.invalid.graphql @@ -0,0 +1,30 @@ +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + null_arg: extension_scalar_field(custom_scalar_arg: null) + bool_arg: extension_scalar_field(custom_scalar_arg: true) + int_arg: extension_scalar_field(custom_scalar_arg: 123) + float_arg: extension_scalar_field(custom_scalar_arg: 3.14) + string_arg: extension_scalar_field(custom_scalar_arg: "asdf") + enum_arg: extension_scalar_field(custom_scalar_arg: FOO) + list_arg: extension_scalar_field(custom_scalar_arg: [1, 2, 3]) + object_arg: extension_scalar_field(custom_scalar_arg: {a: 1, b: "foo"}) +} + +# %extensions% + +enum TestEnum { + FOO + BAR + BAZ +} + +type Obj { + some_key: Int! +} + +extend type Query { + # JSON is a custom scalar defined in the server schema + extension_scalar_field(custom_scalar_arg: JSON!): Int +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/list_of_custom_scalar_literal_arg.invalid.expected b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/list_of_custom_scalar_literal_arg.invalid.expected new file mode 100644 index 0000000000000..f38fd2629c978 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/list_of_custom_scalar_literal_arg.invalid.expected @@ -0,0 +1,58 @@ +==================================== INPUT ==================================== +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + extension_field(custom_scalar_arg: ["1234", "5678"]) { + __typename + } + extension_scalar_field(custom_scalar_arg: ["1234", "5678"]) +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: [CustomScalarType!]!): Obj + extension_scalar_field(custom_scalar_arg: [CustomScalarType!]!): Int +} +==================================== ERROR ==================================== +✖︎ Unexpected scalar literal `"1234"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + list_of_custom_scalar_literal_arg.invalid.graphql:5:41 + 4 │ query CustomScalarLiteralArgQuery { + 5 │ extension_field(custom_scalar_arg: ["1234", "5678"]) { + │ ^^^^^^ + 6 │ __typename + + +✖︎ Unexpected scalar literal `"5678"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + list_of_custom_scalar_literal_arg.invalid.graphql:5:49 + 4 │ query CustomScalarLiteralArgQuery { + 5 │ extension_field(custom_scalar_arg: ["1234", "5678"]) { + │ ^^^^^^ + 6 │ __typename + + +✖︎ Unexpected scalar literal `"1234"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + list_of_custom_scalar_literal_arg.invalid.graphql:8:48 + 7 │ } + 8 │ extension_scalar_field(custom_scalar_arg: ["1234", "5678"]) + │ ^^^^^^ + 9 │ } + + +✖︎ Unexpected scalar literal `"5678"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + list_of_custom_scalar_literal_arg.invalid.graphql:8:56 + 7 │ } + 8 │ extension_scalar_field(custom_scalar_arg: ["1234", "5678"]) + │ ^^^^^^ + 9 │ } diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/list_of_custom_scalar_literal_arg.invalid.graphql b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/list_of_custom_scalar_literal_arg.invalid.graphql new file mode 100644 index 0000000000000..8e50bec4a7102 --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions/fixtures/list_of_custom_scalar_literal_arg.invalid.graphql @@ -0,0 +1,22 @@ +# expected-to-throw +# relay:no_custom_scalar_literals + +query CustomScalarLiteralArgQuery { + extension_field(custom_scalar_arg: ["1234", "5678"]) { + __typename + } + extension_scalar_field(custom_scalar_arg: ["1234", "5678"]) +} + +# %extensions% + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: [CustomScalarType!]!): Obj + extension_scalar_field(custom_scalar_arg: [CustomScalarType!]!): Int +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions/mod.rs b/compiler/crates/graphql-ir/tests/parse_with_extensions/mod.rs deleted file mode 100644 index 62ad15b6a3bb1..0000000000000 --- a/compiler/crates/graphql-ir/tests/parse_with_extensions/mod.rs +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::SourceLocationKey; -use common::TextSource; -use fixture_tests::Fixture; -use fnv::FnvHashMap; -use graphql_cli::DiagnosticPrinter; -use graphql_ir::build; -use graphql_syntax::parse_executable; -use relay_test_schema::get_test_schema_with_extensions; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let mut sources = FnvHashMap::default(); - sources.insert( - SourceLocationKey::standalone(fixture.file_name), - fixture.content, - ); - - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - if let [base, extensions] = parts.as_slice() { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(base, source_location).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - build(&schema, &ast.definitions) - .map(|x| format!("{:#?}", x)) - .map_err(|errors| { - errors - .into_iter() - .map(|error| { - let printer = DiagnosticPrinter::new(|_| { - Some(TextSource::from_whole_document(fixture.content.to_string())) - }); - printer.diagnostic_to_string(&error) - }) - .collect::>() - .join("\n\n") - }) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} diff --git a/compiler/crates/graphql-ir/tests/parse_with_extensions_test.rs b/compiler/crates/graphql-ir/tests/parse_with_extensions_test.rs index 0131f0c73e32b..df77e7a518eba 100644 --- a/compiler/crates/graphql-ir/tests/parse_with_extensions_test.rs +++ b/compiler/crates/graphql-ir/tests/parse_with_extensions_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<624dd9ddfecfe569df689202ad63347c>> */ mod parse_with_extensions; @@ -12,16 +12,107 @@ mod parse_with_extensions; use parse_with_extensions::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn client_fields() { +#[tokio::test] +async fn client_fields() { let input = include_str!("parse_with_extensions/fixtures/client-fields.graphql"); let expected = include_str!("parse_with_extensions/fixtures/client-fields.expected"); - test_fixture(transform_fixture, "client-fields.graphql", "parse_with_extensions/fixtures/client-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields.graphql", "parse_with_extensions/fixtures/client-fields.expected", input, expected).await; } -#[test] -fn client_fields_invalid() { +#[tokio::test] +async fn client_fields_invalid() { let input = include_str!("parse_with_extensions/fixtures/client-fields.invalid.graphql"); let expected = include_str!("parse_with_extensions/fixtures/client-fields.invalid.expected"); - test_fixture(transform_fixture, "client-fields.invalid.graphql", "parse_with_extensions/fixtures/client-fields.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields.invalid.graphql", "parse_with_extensions/fixtures/client-fields.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn custom_scalar_directive_arg_invalid() { + let input = include_str!("parse_with_extensions/fixtures/custom_scalar_directive_arg.invalid.graphql"); + let expected = include_str!("parse_with_extensions/fixtures/custom_scalar_directive_arg.invalid.expected"); + test_fixture(transform_fixture, file!(), "custom_scalar_directive_arg.invalid.graphql", "parse_with_extensions/fixtures/custom_scalar_directive_arg.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn custom_scalar_directive_arg_variable() { + let input = include_str!("parse_with_extensions/fixtures/custom_scalar_directive_arg_variable.graphql"); + let expected = include_str!("parse_with_extensions/fixtures/custom_scalar_directive_arg_variable.expected"); + test_fixture(transform_fixture, file!(), "custom_scalar_directive_arg_variable.graphql", "parse_with_extensions/fixtures/custom_scalar_directive_arg_variable.expected", input, expected).await; +} + +#[tokio::test] +async fn custom_scalar_list_literal_arg_invalid() { + let input = include_str!("parse_with_extensions/fixtures/custom_scalar_list_literal_arg.invalid.graphql"); + let expected = include_str!("parse_with_extensions/fixtures/custom_scalar_list_literal_arg.invalid.expected"); + test_fixture(transform_fixture, file!(), "custom_scalar_list_literal_arg.invalid.graphql", "parse_with_extensions/fixtures/custom_scalar_list_literal_arg.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn custom_scalar_list_mixed_arg_invalid() { + let input = include_str!("parse_with_extensions/fixtures/custom_scalar_list_mixed_arg.invalid.graphql"); + let expected = include_str!("parse_with_extensions/fixtures/custom_scalar_list_mixed_arg.invalid.expected"); + test_fixture(transform_fixture, file!(), "custom_scalar_list_mixed_arg.invalid.graphql", "parse_with_extensions/fixtures/custom_scalar_list_mixed_arg.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn custom_scalar_list_other_literal_args_invalid() { + let input = include_str!("parse_with_extensions/fixtures/custom_scalar_list_other_literal_args.invalid.graphql"); + let expected = include_str!("parse_with_extensions/fixtures/custom_scalar_list_other_literal_args.invalid.expected"); + test_fixture(transform_fixture, file!(), "custom_scalar_list_other_literal_args.invalid.graphql", "parse_with_extensions/fixtures/custom_scalar_list_other_literal_args.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn custom_scalar_literal_arg_invalid() { + let input = include_str!("parse_with_extensions/fixtures/custom_scalar_literal_arg.invalid.graphql"); + let expected = include_str!("parse_with_extensions/fixtures/custom_scalar_literal_arg.invalid.expected"); + test_fixture(transform_fixture, file!(), "custom_scalar_literal_arg.invalid.graphql", "parse_with_extensions/fixtures/custom_scalar_literal_arg.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn custom_scalar_literal_arg_nested_invalid() { + let input = include_str!("parse_with_extensions/fixtures/custom_scalar_literal_arg_nested.invalid.graphql"); + let expected = include_str!("parse_with_extensions/fixtures/custom_scalar_literal_arg_nested.invalid.expected"); + test_fixture(transform_fixture, file!(), "custom_scalar_literal_arg_nested.invalid.graphql", "parse_with_extensions/fixtures/custom_scalar_literal_arg_nested.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn custom_scalar_object_literal_arg_invalid() { + let input = include_str!("parse_with_extensions/fixtures/custom_scalar_object_literal_arg.invalid.graphql"); + let expected = include_str!("parse_with_extensions/fixtures/custom_scalar_object_literal_arg.invalid.expected"); + test_fixture(transform_fixture, file!(), "custom_scalar_object_literal_arg.invalid.graphql", "parse_with_extensions/fixtures/custom_scalar_object_literal_arg.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn custom_scalar_other_literal_args_invalid() { + let input = include_str!("parse_with_extensions/fixtures/custom_scalar_other_literal_args.invalid.graphql"); + let expected = include_str!("parse_with_extensions/fixtures/custom_scalar_other_literal_args.invalid.expected"); + test_fixture(transform_fixture, file!(), "custom_scalar_other_literal_args.invalid.graphql", "parse_with_extensions/fixtures/custom_scalar_other_literal_args.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn custom_scalar_variable_arg() { + let input = include_str!("parse_with_extensions/fixtures/custom_scalar_variable_arg.graphql"); + let expected = include_str!("parse_with_extensions/fixtures/custom_scalar_variable_arg.expected"); + test_fixture(transform_fixture, file!(), "custom_scalar_variable_arg.graphql", "parse_with_extensions/fixtures/custom_scalar_variable_arg.expected", input, expected).await; +} + +#[tokio::test] +async fn custom_scalar_variable_default_arg_invalid() { + let input = include_str!("parse_with_extensions/fixtures/custom_scalar_variable_default_arg.invalid.graphql"); + let expected = include_str!("parse_with_extensions/fixtures/custom_scalar_variable_default_arg.invalid.expected"); + test_fixture(transform_fixture, file!(), "custom_scalar_variable_default_arg.invalid.graphql", "parse_with_extensions/fixtures/custom_scalar_variable_default_arg.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn custom_server_scalar_literal_args_invalid() { + let input = include_str!("parse_with_extensions/fixtures/custom_server_scalar_literal_args.invalid.graphql"); + let expected = include_str!("parse_with_extensions/fixtures/custom_server_scalar_literal_args.invalid.expected"); + test_fixture(transform_fixture, file!(), "custom_server_scalar_literal_args.invalid.graphql", "parse_with_extensions/fixtures/custom_server_scalar_literal_args.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn list_of_custom_scalar_literal_arg_invalid() { + let input = include_str!("parse_with_extensions/fixtures/list_of_custom_scalar_literal_arg.invalid.graphql"); + let expected = include_str!("parse_with_extensions/fixtures/list_of_custom_scalar_literal_arg.invalid.expected"); + test_fixture(transform_fixture, file!(), "list_of_custom_scalar_literal_arg.invalid.graphql", "parse_with_extensions/fixtures/list_of_custom_scalar_literal_arg.invalid.expected", input, expected).await; } diff --git a/compiler/crates/graphql-ir/tests/parse_with_provider.rs b/compiler/crates/graphql-ir/tests/parse_with_provider.rs new file mode 100644 index 0000000000000..e73bf5dc27b2e --- /dev/null +++ b/compiler/crates/graphql-ir/tests/parse_with_provider.rs @@ -0,0 +1,54 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use fnv::FnvHashMap; +use graphql_cli::DiagnosticPrinter; +use graphql_ir::build_ir_with_extra_features; +use graphql_ir::BuilderOptions; +use graphql_ir::FragmentVariablesSemantic; +use graphql_ir::RelayMode; +use graphql_syntax::parse_executable_with_features; +use graphql_syntax::FragmentArgumentSyntaxKind; +use graphql_syntax::ParserFeatures; +use relay_test_schema::TEST_SCHEMA; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let features = ParserFeatures { + fragment_argument_capability: + FragmentArgumentSyntaxKind::SpreadArgumentsAndFragmentVariableDefinitions, + }; + let ast = parse_executable_with_features(fixture.content, source_location, features).unwrap(); + let mut sources = FnvHashMap::default(); + sources.insert(source_location, fixture.content); + + let builder_options = BuilderOptions { + allow_undefined_fragment_spreads: false, + fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, + relay_mode: Some(RelayMode), + default_anonymous_operation_name: None, + allow_custom_scalar_literals: true, // for compatibility + }; + + build_ir_with_extra_features(&TEST_SCHEMA, &ast.definitions, &builder_options) + .map(|x| format!("{:#?}", x)) + .map_err(|errors| { + errors + .into_iter() + .map(|error| { + let printer = DiagnosticPrinter::new(|_| { + Some(TextSource::from_whole_document(fixture.content.to_string())) + }); + printer.diagnostic_to_string(&error) + }) + .collect::>() + .join("\n\n") + }) +} diff --git a/compiler/crates/graphql-ir/tests/parse_with_provider/fixtures/fragment_with_valid_provider.expected b/compiler/crates/graphql-ir/tests/parse_with_provider/fixtures/fragment_with_valid_provider.expected index 3e76a8adf8110..f151b32076fcf 100644 --- a/compiler/crates/graphql-ir/tests/parse_with_provider/fixtures/fragment_with_valid_provider.expected +++ b/compiler/crates/graphql-ir/tests/parse_with_provider/fixtures/fragment_with_valid_provider.expected @@ -113,7 +113,7 @@ fragment TestFragment on User directives: [], }, ], - type_condition: Object(69), + type_condition: Object(70), directives: [ Directive { name: WithLocation { @@ -131,7 +131,7 @@ fragment TestFragment on User alias: None, definition: WithLocation { location: fragment_with_valid_provider.graphql:210:224, - item: FieldID(474), + item: FieldID(476), }, arguments: [ Argument { @@ -167,7 +167,7 @@ fragment TestFragment on User alias: None, definition: WithLocation { location: fragment_with_valid_provider.graphql:251:254, - item: FieldID(179), + item: FieldID(182), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse_with_provider/fixtures/use_fragment_spread_with_provider.expected b/compiler/crates/graphql-ir/tests/parse_with_provider/fixtures/use_fragment_spread_with_provider.expected index ad9200006f839..652b76973c9b7 100644 --- a/compiler/crates/graphql-ir/tests/parse_with_provider/fixtures/use_fragment_spread_with_provider.expected +++ b/compiler/crates/graphql-ir/tests/parse_with_provider/fixtures/use_fragment_spread_with_provider.expected @@ -34,7 +34,7 @@ fragment ChildFragment2 on User }, variable_definitions: [], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [], selections: [ FragmentSpread { @@ -143,7 +143,7 @@ fragment ChildFragment2 on User }, ], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [ Directive { name: WithLocation { @@ -161,7 +161,7 @@ fragment ChildFragment2 on User alias: None, definition: WithLocation { location: use_fragment_spread_with_provider.graphql:286:300, - item: FieldID(474), + item: FieldID(476), }, arguments: [ Argument { @@ -199,7 +199,7 @@ fragment ChildFragment2 on User alias: None, definition: WithLocation { location: use_fragment_spread_with_provider.graphql:327:330, - item: FieldID(179), + item: FieldID(182), }, arguments: [], directives: [], @@ -275,7 +275,7 @@ fragment ChildFragment2 on User }, ], used_global_variables: [], - type_condition: Object(69), + type_condition: Object(70), directives: [ Directive { name: WithLocation { @@ -295,7 +295,7 @@ fragment ChildFragment2 on User alias: None, definition: WithLocation { location: use_fragment_spread_with_provider.graphql:526:532, - item: FieldID(483), + item: FieldID(485), }, arguments: [], directives: [], diff --git a/compiler/crates/graphql-ir/tests/parse_with_provider/mod.rs b/compiler/crates/graphql-ir/tests/parse_with_provider/mod.rs deleted file mode 100644 index 342d477251022..0000000000000 --- a/compiler/crates/graphql-ir/tests/parse_with_provider/mod.rs +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::SourceLocationKey; -use common::TextSource; -use fixture_tests::Fixture; -use fnv::FnvHashMap; -use graphql_cli::DiagnosticPrinter; -use graphql_ir::build_ir_with_extra_features; -use graphql_ir::BuilderOptions; -use graphql_ir::FragmentVariablesSemantic; -use graphql_ir::RelayMode; -use graphql_syntax::parse_executable_with_features; -use graphql_syntax::FragmentArgumentSyntaxKind; -use graphql_syntax::ParserFeatures; -use relay_test_schema::TEST_SCHEMA; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let features = ParserFeatures { - fragment_argument_capability: - FragmentArgumentSyntaxKind::SpreadArgumentsAndFragmentVariableDefinitions, - }; - let ast = parse_executable_with_features(fixture.content, source_location, features).unwrap(); - let mut sources = FnvHashMap::default(); - sources.insert(source_location, fixture.content); - - let builder_options = BuilderOptions { - allow_undefined_fragment_spreads: false, - fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, - relay_mode: Some(RelayMode), - default_anonymous_operation_name: None, - }; - - build_ir_with_extra_features(&TEST_SCHEMA, &ast.definitions, &builder_options) - .map(|x| format!("{:#?}", x)) - .map_err(|errors| { - errors - .into_iter() - .map(|error| { - let printer = DiagnosticPrinter::new(|_| { - Some(TextSource::from_whole_document(fixture.content.to_string())) - }); - printer.diagnostic_to_string(&error) - }) - .collect::>() - .join("\n\n") - }) -} diff --git a/compiler/crates/graphql-ir/tests/parse_with_provider_test.rs b/compiler/crates/graphql-ir/tests/parse_with_provider_test.rs index 62d2924ec35f6..05fb2728bf53d 100644 --- a/compiler/crates/graphql-ir/tests/parse_with_provider_test.rs +++ b/compiler/crates/graphql-ir/tests/parse_with_provider_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<1670bffb19ef1ac0b170241dddc11e88>> + * @generated SignedSource<> */ mod parse_with_provider; @@ -12,30 +12,30 @@ mod parse_with_provider; use parse_with_provider::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn fragment_with_invalid_defaultvalue_provider() { +#[tokio::test] +async fn fragment_with_invalid_defaultvalue_provider() { let input = include_str!("parse_with_provider/fixtures/fragment_with_invalid_defaultvalue_provider.graphql"); let expected = include_str!("parse_with_provider/fixtures/fragment_with_invalid_defaultvalue_provider.expected"); - test_fixture(transform_fixture, "fragment_with_invalid_defaultvalue_provider.graphql", "parse_with_provider/fixtures/fragment_with_invalid_defaultvalue_provider.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_with_invalid_defaultvalue_provider.graphql", "parse_with_provider/fixtures/fragment_with_invalid_defaultvalue_provider.expected", input, expected).await; } -#[test] -fn fragment_with_invalid_type_provider() { +#[tokio::test] +async fn fragment_with_invalid_type_provider() { let input = include_str!("parse_with_provider/fixtures/fragment_with_invalid_type_provider.graphql"); let expected = include_str!("parse_with_provider/fixtures/fragment_with_invalid_type_provider.expected"); - test_fixture(transform_fixture, "fragment_with_invalid_type_provider.graphql", "parse_with_provider/fixtures/fragment_with_invalid_type_provider.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_with_invalid_type_provider.graphql", "parse_with_provider/fixtures/fragment_with_invalid_type_provider.expected", input, expected).await; } -#[test] -fn fragment_with_valid_provider() { +#[tokio::test] +async fn fragment_with_valid_provider() { let input = include_str!("parse_with_provider/fixtures/fragment_with_valid_provider.graphql"); let expected = include_str!("parse_with_provider/fixtures/fragment_with_valid_provider.expected"); - test_fixture(transform_fixture, "fragment_with_valid_provider.graphql", "parse_with_provider/fixtures/fragment_with_valid_provider.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_with_valid_provider.graphql", "parse_with_provider/fixtures/fragment_with_valid_provider.expected", input, expected).await; } -#[test] -fn use_fragment_spread_with_provider() { +#[tokio::test] +async fn use_fragment_spread_with_provider() { let input = include_str!("parse_with_provider/fixtures/use_fragment_spread_with_provider.graphql"); let expected = include_str!("parse_with_provider/fixtures/use_fragment_spread_with_provider.expected"); - test_fixture(transform_fixture, "use_fragment_spread_with_provider.graphql", "parse_with_provider/fixtures/use_fragment_spread_with_provider.expected", input, expected); + test_fixture(transform_fixture, file!(), "use_fragment_spread_with_provider.graphql", "parse_with_provider/fixtures/use_fragment_spread_with_provider.expected", input, expected).await; } diff --git a/compiler/crates/graphql-syntax/Cargo.toml b/compiler/crates/graphql-syntax/Cargo.toml index 45b46374cf3be..60a974f832058 100644 --- a/compiler/crates/graphql-syntax/Cargo.toml +++ b/compiler/crates/graphql-syntax/Cargo.toml @@ -1,9 +1,11 @@ -# @generated by autocargo from //relay/oss/crates/graphql-syntax:[graphql-syntax,graphql-syntax_print_test,parse_document_test,parse_document_with_features_test,parse_executable_document_test,parse_executable_document_with_error_recovery_test,parse_schema_document_test] +# @generated by autocargo from //relay/oss/crates/graphql-syntax:[advance_schema_document_test,graphql-syntax,graphql-syntax_print_test,parse_document_test,parse_document_with_features_test,parse_executable_document_test,parse_executable_document_with_error_recovery_test,parse_schema_document_test] + [package] name = "graphql-syntax" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -14,9 +16,10 @@ path = "tests/print_test.rs" common = { path = "../common" } intern = { path = "../intern" } logos = "0.12" -serde = { version = "1.0.136", features = ["derive", "rc"] } -thiserror = "1.0.36" +serde = { version = "1.0.185", features = ["derive", "rc"] } +thiserror = "1.0.49" [dev-dependencies] fixture-tests = { path = "../fixture-tests" } graphql-cli = { path = "../graphql-cli" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/graphql-syntax/src/lexer.rs b/compiler/crates/graphql-syntax/src/lexer.rs index a3fed227c2843..bcf9156d9606f 100644 --- a/compiler/crates/graphql-syntax/src/lexer.rs +++ b/compiler/crates/graphql-syntax/src/lexer.rs @@ -18,8 +18,20 @@ pub struct TokenKindExtras { pub error_token: Option, } -/// Lexer for the GraphQL specification: http://spec.graphql.org/ -#[derive(Logos, Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +/// Lexer for the GraphQL specification: +#[derive( + Logos, + Copy, + Clone, + Debug, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +#[serde(tag = "type")] #[logos(extras = TokenKindExtras)] pub enum TokenKind { #[regex(r"[ \t\r\n\f,\ufeff]+|#[^\n\r]*", logos::skip)] diff --git a/compiler/crates/graphql-syntax/src/lib.rs b/compiler/crates/graphql-syntax/src/lib.rs index bc63c5d45faf9..8b37cc4868b29 100644 --- a/compiler/crates/graphql-syntax/src/lib.rs +++ b/compiler/crates/graphql-syntax/src/lib.rs @@ -26,7 +26,6 @@ pub use parser::FragmentArgumentSyntaxKind; pub use parser::ParserFeatures; pub use source::GraphQLSource; pub use syntax_error::SyntaxError; -pub use utils::*; use crate::parser::Parser; @@ -60,6 +59,17 @@ pub fn parse_executable( parse_executable_with_error_recovery(source, source_location).into() } +/// Parses a GraphQL document that's restricted to executable +/// definitions with custom feature flags passed as `features`. +pub fn parse_executable_with_features( + source: &str, + source_location: SourceLocationKey, + features: ParserFeatures, +) -> DiagnosticsResult { + parse_executable_with_error_recovery_and_parser_features(source, source_location, features) + .into() +} + /// Parses a GraphQL document that's restricted to executable definitions, /// with error recovery. pub fn parse_executable_with_error_recovery( @@ -82,17 +92,6 @@ pub fn parse_executable_with_error_recovery_and_parser_features( parser.parse_executable_document() } -/// Parses a GraphQL document that's restricted to executable -/// definitions with custom feature flags passed as `features`. -pub fn parse_executable_with_features( - source: &str, - source_location: SourceLocationKey, - features: ParserFeatures, -) -> DiagnosticsResult { - parse_executable_with_error_recovery_and_parser_features(source, source_location, features) - .into() -} - /// Parses a GraphQL document that's restricted to type system definitions /// including schema definition, type definitions and type system extensions. pub fn parse_schema_document( @@ -104,6 +103,26 @@ pub fn parse_schema_document( parser.parse_schema_document() } +/// Parses a GraphQL schema document into a list of slices of the original +/// source text where each slice is a type system definition. +pub fn parse_schema_document_into_type_system_definitions<'a>( + source: &'a str, + source_location: SourceLocationKey, +) -> DiagnosticsResult> { + let features = ParserFeatures::default(); + let parser = Parser::new(source, source_location, features); + parser.parse_schema_document_into_type_system_definitions() +} + +pub fn parse_type_system_definition( + source: &str, + source_location: SourceLocationKey, +) -> DiagnosticsResult { + let features = ParserFeatures::default(); + let parser = Parser::new(source, source_location, features); + parser.parse_type_system_definition() +} + pub fn parse_field_definition( source: &str, source_location: SourceLocationKey, diff --git a/compiler/crates/graphql-syntax/src/node/mod.rs b/compiler/crates/graphql-syntax/src/node.rs similarity index 100% rename from compiler/crates/graphql-syntax/src/node/mod.rs rename to compiler/crates/graphql-syntax/src/node.rs diff --git a/compiler/crates/graphql-syntax/src/node/constant_value.rs b/compiler/crates/graphql-syntax/src/node/constant_value.rs index 736512434ac92..5324063d2c499 100644 --- a/compiler/crates/graphql-syntax/src/node/constant_value.rs +++ b/compiler/crates/graphql-syntax/src/node/constant_value.rs @@ -25,6 +25,17 @@ pub enum ConstantValue { Object(List), } +macro_rules! generate_unwrap_fn { + ($fn_name:ident,$self:ident,$t:ty,$cv:pat => $result:expr) => { + pub fn $fn_name(&$self) -> $t { + match $self { + $cv => $result, + other => panic!("expected constant {} but got {:#?}", stringify!($cv), other), + } + } + } +} + impl ConstantValue { pub fn span(&self) -> Span { match self { @@ -44,6 +55,19 @@ impl ConstantValue { _ => None, } } + + pub fn get_bool_literal(&self) -> Option { + match self { + ConstantValue::Boolean(BooleanNode { value, .. }) => Some(*value), + _ => None, + } + } + + generate_unwrap_fn!(unwrap_int, self, i64, ConstantValue::Int(i) => i.value); + generate_unwrap_fn!(unwrap_float, self, FloatValue, ConstantValue::Float(f) => f.value); + generate_unwrap_fn!(unwrap_boolean, self, bool, ConstantValue::Boolean(b) => b.value); + generate_unwrap_fn!(unwrap_string, self, StringKey, ConstantValue::String(s) => s.value); + generate_unwrap_fn!(unwrap_enum, self, StringKey, ConstantValue::Enum(e) => e.value); } impl fmt::Display for ConstantValue { @@ -113,9 +137,10 @@ impl fmt::Display for IntNode { #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] pub struct FloatNode { pub token: Token, + /// NOTE: we can't just store an f64 here because it doesn't implement Hash, Eq, Ord pub value: FloatValue, /// Preserve a value, as it was represented in the source - /// TODO: We may remove this, as we migrate from JS + /// NOTE: this is needed for pretty-printing the AST to ensure we don't change what was in the source pub source_value: StringKey, } @@ -194,3 +219,16 @@ impl std::convert::From for FloatValue { FloatValue::new(value as f64) } } + +#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub struct DefaultValue { + pub span: Span, + pub equals: Token, + pub value: ConstantValue, +} + +impl fmt::Display for DefaultValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_fmt(format_args!("{}", self.value)) + } +} diff --git a/compiler/crates/graphql-syntax/src/node/executable.rs b/compiler/crates/graphql-syntax/src/node/executable.rs index c87203955930f..b2cb4d2fae610 100644 --- a/compiler/crates/graphql-syntax/src/node/executable.rs +++ b/compiler/crates/graphql-syntax/src/node/executable.rs @@ -59,6 +59,13 @@ impl ExecutableDefinition { .any(|d| d.name.value == directive_name), } } + + pub fn selections(&self) -> &[Selection] { + match self { + ExecutableDefinition::Operation(node) => &node.selections.items, + ExecutableDefinition::Fragment(node) => &node.selections.items, + } + } } impl fmt::Debug for ExecutableDefinition { @@ -86,12 +93,21 @@ impl OperationDefinition { // https://spec.graphql.org/June2018/#sec-Anonymous-Operation-Definitions self.operation .as_ref() - .map(|(_, operation_kind)| *operation_kind) - .unwrap_or(OperationKind::Query) + .map_or(OperationKind::Query, |(_, operation_kind)| *operation_kind) } } -#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Copy, + Debug, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] pub enum OperationKind { Query, Mutation, @@ -157,19 +173,6 @@ impl fmt::Display for TypeCondition { } } -#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] -pub struct DefaultValue { - pub span: Span, - pub equals: Token, - pub value: ConstantValue, -} - -impl fmt::Display for DefaultValue { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_fmt(format_args!("{}", self.value)) - } -} - // Selections #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] diff --git a/compiler/crates/graphql-syntax/src/node/type_annotation.rs b/compiler/crates/graphql-syntax/src/node/type_annotation.rs index 86e2914169624..2a1a645667370 100644 --- a/compiler/crates/graphql-syntax/src/node/type_annotation.rs +++ b/compiler/crates/graphql-syntax/src/node/type_annotation.rs @@ -32,16 +32,16 @@ impl TypeAnnotation { pub fn inner(&self) -> &NamedTypeAnnotation { match self { TypeAnnotation::Named(named) => named, - TypeAnnotation::List(of) => (*of).type_.inner(), - TypeAnnotation::NonNull(of) => (*of).type_.inner(), + TypeAnnotation::List(of) => of.type_.inner(), + TypeAnnotation::NonNull(of) => of.type_.inner(), } } pub fn span(&self) -> Span { match self { TypeAnnotation::Named(named) => named.name.span, - TypeAnnotation::List(of) => (*of).span, - TypeAnnotation::NonNull(of) => (*of).span, + TypeAnnotation::List(of) => of.span, + TypeAnnotation::NonNull(of) => of.span, } } } diff --git a/compiler/crates/graphql-syntax/src/node/type_system.rs b/compiler/crates/graphql-syntax/src/node/type_system.rs index 2030106becc14..469a8caac221f 100644 --- a/compiler/crates/graphql-syntax/src/node/type_system.rs +++ b/compiler/crates/graphql-syntax/src/node/type_system.rs @@ -7,15 +7,18 @@ use std::fmt; +use common::Named; use common::Span; +use intern::string_key::Intern; use intern::string_key::StringKey; use super::constant_directive::ConstantDirective; -use super::constant_value::ConstantValue; use super::constant_value::StringNode; use super::executable::OperationKind; use super::primitive::*; use super::type_annotation::TypeAnnotation; +use crate::DefaultValue; +use crate::TokenKind; #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] pub enum TypeSystemDefinition { @@ -37,7 +40,7 @@ pub enum TypeSystemDefinition { } impl TypeSystemDefinition { - pub fn location(&self) -> Span { + pub fn span(&self) -> Span { match self { TypeSystemDefinition::SchemaDefinition(_extension) => Span::empty(), // Not implemented TypeSystemDefinition::SchemaExtension(_extension) => Span::empty(), // Not implemented @@ -64,44 +67,52 @@ impl fmt::Display for TypeSystemDefinition { TypeSystemDefinition::SchemaDefinition(SchemaDefinition { directives, operation_types, + .. }) => write_schema_definition_helper(f, directives, &operation_types.items), TypeSystemDefinition::SchemaExtension(SchemaExtension { directives, operation_types, + .. }) => write_schema_extension_helper(f, directives, operation_types), TypeSystemDefinition::ObjectTypeDefinition(ObjectTypeDefinition { name, interfaces, fields, directives, + .. }) => write_object_helper(f, &name.value, interfaces, fields, directives, false), TypeSystemDefinition::ObjectTypeExtension(ObjectTypeExtension { name, interfaces, fields, directives, + .. }) => write_object_helper(f, &name.value, interfaces, fields, directives, true), TypeSystemDefinition::InterfaceTypeDefinition(InterfaceTypeDefinition { name, + interfaces, fields, directives, .. - }) => write_interface_helper(f, &name.value, fields, directives, false), + }) => write_interface_helper(f, &name.value, interfaces, fields, directives, false), TypeSystemDefinition::InterfaceTypeExtension(InterfaceTypeExtension { name, - interfaces: _, + interfaces, fields, directives, - }) => write_interface_helper(f, &name.value, fields, directives, true), + .. + }) => write_interface_helper(f, &name.value, interfaces, fields, directives, true), TypeSystemDefinition::UnionTypeDefinition(UnionTypeDefinition { name, directives, members, + .. }) => write_union_type_definition_helper(f, &name.value, directives, members, false), TypeSystemDefinition::UnionTypeExtension(UnionTypeExtension { name, directives, members, + .. }) => write_union_type_definition_helper(f, &name.value, directives, members, true), TypeSystemDefinition::DirectiveDefinition(DirectiveDefinition { name, @@ -109,6 +120,8 @@ impl fmt::Display for TypeSystemDefinition { repeatable, locations, description, + hack_source, + .. }) => write_directive_definition_helper( f, &name.value, @@ -116,11 +129,13 @@ impl fmt::Display for TypeSystemDefinition { repeatable, locations, description, + hack_source, ), TypeSystemDefinition::InputObjectTypeDefinition(InputObjectTypeDefinition { name, directives, fields, + .. }) => { write_input_object_type_definition_helper(f, &name.value, directives, fields, false) } @@ -128,6 +143,7 @@ impl fmt::Display for TypeSystemDefinition { name, directives, fields, + .. }) => { write_input_object_type_definition_helper(f, &name.value, directives, fields, true) } @@ -135,39 +151,105 @@ impl fmt::Display for TypeSystemDefinition { name, directives, values, + .. }) => write_enum_type_definition_helper(f, &name.value, directives, values, false), TypeSystemDefinition::EnumTypeExtension(EnumTypeExtension { name, directives, values, + .. }) => write_enum_type_definition_helper(f, &name.value, directives, values, true), TypeSystemDefinition::ScalarTypeDefinition(ScalarTypeDefinition { name, directives, + .. }) => write_scalar_type_definition_helper(f, &name.value, directives, false), - TypeSystemDefinition::ScalarTypeExtension(ScalarTypeExtension { name, directives }) => { - write_scalar_type_definition_helper(f, &name.value, directives, true) - } + TypeSystemDefinition::ScalarTypeExtension(ScalarTypeExtension { + name, + directives, + .. + }) => write_scalar_type_definition_helper(f, &name.value, directives, true), + } + } +} + +impl Named for TypeSystemDefinition { + type Name = StringKey; + fn name(&self) -> StringKey { + match self { + TypeSystemDefinition::SchemaDefinition(_definition) => "".intern(), // Not implemented + TypeSystemDefinition::SchemaExtension(_extension) => "".intern(), // Not implemented + TypeSystemDefinition::ObjectTypeDefinition(definition) => definition.name.value, + TypeSystemDefinition::ObjectTypeExtension(extension) => extension.name.value, + TypeSystemDefinition::InterfaceTypeDefinition(definition) => definition.name.value, + TypeSystemDefinition::InterfaceTypeExtension(extension) => extension.name.value, + TypeSystemDefinition::UnionTypeDefinition(definition) => definition.name.value, + TypeSystemDefinition::UnionTypeExtension(extension) => extension.name.value, + TypeSystemDefinition::DirectiveDefinition(definition) => definition.name.value, + TypeSystemDefinition::InputObjectTypeDefinition(definition) => definition.name.value, + TypeSystemDefinition::InputObjectTypeExtension(extension) => extension.name.value, + TypeSystemDefinition::EnumTypeDefinition(definition) => definition.name.value, + TypeSystemDefinition::EnumTypeExtension(extension) => extension.name.value, + TypeSystemDefinition::ScalarTypeDefinition(definition) => definition.name.value, + TypeSystemDefinition::ScalarTypeExtension(extension) => extension.name.value, } } } +/// This trait provides a *single* known into method, so we don't need +/// to type method usages that utilize this trait and call into_definition(). +/// It may be useful in the future to define a DefinitionIntoExtension trait +/// that does the inverse, but we haven't needed it yet (add it when we do!). +pub trait ExtensionIntoDefinition: Sized { + type DefinitionType: From; + + fn into_definition(self) -> Self::DefinitionType { + self.into() + } +} + #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] pub struct SchemaDefinition { pub directives: Vec, pub operation_types: List, + pub span: Span, } #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] pub struct SchemaExtension { pub directives: Vec, pub operation_types: Option>, + pub span: Span, +} +impl From for SchemaDefinition { + fn from(ext: SchemaExtension) -> Self { + Self { + directives: ext.directives, + operation_types: ext.operation_types.unwrap_or(List { + span: Span::empty(), + start: Token { + span: Span::empty(), + kind: TokenKind::OpenBrace, + }, + items: Vec::new(), + end: Token { + span: Span::empty(), + kind: TokenKind::CloseBrace, + }, + }), + span: ext.span, + } + } +} +impl ExtensionIntoDefinition for SchemaExtension { + type DefinitionType = SchemaDefinition; } #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] pub struct OperationTypeDefinition { pub operation: OperationType, pub type_: Identifier, + pub span: Span, } impl fmt::Display for OperationTypeDefinition { @@ -176,7 +258,7 @@ impl fmt::Display for OperationTypeDefinition { } } -#[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Copy, Clone)] +#[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Copy, Clone, serde::Serialize)] pub enum OperationType { Query, Mutation, @@ -199,6 +281,7 @@ pub struct ObjectTypeDefinition { pub interfaces: Vec, pub directives: Vec, pub fields: Option>, + pub span: Span, } #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] @@ -207,6 +290,21 @@ pub struct ObjectTypeExtension { pub interfaces: Vec, pub directives: Vec, pub fields: Option>, + pub span: Span, +} +impl From for ObjectTypeDefinition { + fn from(ext: ObjectTypeExtension) -> Self { + Self { + name: ext.name, + interfaces: ext.interfaces, + directives: ext.directives, + fields: ext.fields, + span: ext.span, + } + } +} +impl ExtensionIntoDefinition for ObjectTypeExtension { + type DefinitionType = ObjectTypeDefinition; } #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] @@ -215,6 +313,7 @@ pub struct InterfaceTypeDefinition { pub interfaces: Vec, pub directives: Vec, pub fields: Option>, + pub span: Span, } #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] @@ -223,6 +322,21 @@ pub struct InterfaceTypeExtension { pub interfaces: Vec, pub directives: Vec, pub fields: Option>, + pub span: Span, +} +impl From for InterfaceTypeDefinition { + fn from(ext: InterfaceTypeExtension) -> Self { + Self { + name: ext.name, + interfaces: ext.interfaces, + directives: ext.directives, + fields: ext.fields, + span: ext.span, + } + } +} +impl ExtensionIntoDefinition for InterfaceTypeExtension { + type DefinitionType = InterfaceTypeDefinition; } #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] @@ -230,6 +344,7 @@ pub struct UnionTypeDefinition { pub name: Identifier, pub directives: Vec, pub members: Vec, + pub span: Span, } #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] @@ -237,18 +352,46 @@ pub struct UnionTypeExtension { pub name: Identifier, pub directives: Vec, pub members: Vec, + pub span: Span, +} +impl From for UnionTypeDefinition { + fn from(ext: UnionTypeExtension) -> Self { + Self { + name: ext.name, + directives: ext.directives, + members: ext.members, + span: ext.span, + } + } +} +impl ExtensionIntoDefinition for UnionTypeExtension { + type DefinitionType = UnionTypeDefinition; } #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] pub struct ScalarTypeDefinition { pub name: Identifier, pub directives: Vec, + pub span: Span, } #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] pub struct ScalarTypeExtension { pub name: Identifier, pub directives: Vec, + pub span: Span, +} +impl From for ScalarTypeDefinition { + fn from(ext: ScalarTypeExtension) -> Self { + Self { + name: ext.name, + directives: ext.directives, + span: ext.span, + } + } +} +impl ExtensionIntoDefinition for ScalarTypeExtension { + type DefinitionType = ScalarTypeDefinition; } #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] @@ -256,6 +399,7 @@ pub struct EnumTypeDefinition { pub name: Identifier, pub directives: Vec, pub values: Option>, + pub span: Span, } #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] @@ -263,6 +407,20 @@ pub struct EnumTypeExtension { pub name: Identifier, pub directives: Vec, pub values: Option>, + pub span: Span, +} +impl From for EnumTypeDefinition { + fn from(ext: EnumTypeExtension) -> Self { + Self { + name: ext.name, + directives: ext.directives, + values: ext.values, + span: ext.span, + } + } +} +impl ExtensionIntoDefinition for EnumTypeExtension { + type DefinitionType = EnumTypeDefinition; } #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] @@ -270,6 +428,7 @@ pub struct InputObjectTypeDefinition { pub name: Identifier, pub directives: Vec, pub fields: Option>, + pub span: Span, } #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] @@ -277,12 +436,27 @@ pub struct InputObjectTypeExtension { pub name: Identifier, pub directives: Vec, pub fields: Option>, + pub span: Span, +} +impl From for InputObjectTypeDefinition { + fn from(ext: InputObjectTypeExtension) -> Self { + Self { + name: ext.name, + directives: ext.directives, + fields: ext.fields, + span: ext.span, + } + } +} +impl ExtensionIntoDefinition for InputObjectTypeExtension { + type DefinitionType = InputObjectTypeDefinition; } #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] pub struct EnumValueDefinition { pub name: Identifier, pub directives: Vec, + pub span: Span, } impl fmt::Display for EnumValueDefinition { @@ -299,6 +473,8 @@ pub struct DirectiveDefinition { pub repeatable: bool, pub locations: Vec, pub description: Option, + pub hack_source: Option, + pub span: Span, } #[derive(PartialEq, Eq, Ord, PartialOrd, Hash, Debug, Clone, Copy)] @@ -360,12 +536,13 @@ impl fmt::Display for DirectiveLocation { } } -#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] +#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)] pub struct InputValueDefinition { pub name: Identifier, pub type_: TypeAnnotation, - pub default_value: Option, + pub default_value: Option, pub directives: Vec, + pub span: Span, } impl fmt::Display for InputValueDefinition { @@ -399,6 +576,8 @@ pub struct FieldDefinition { pub arguments: Option>, pub directives: Vec, pub description: Option, + pub hack_source: Option, + pub span: Span, } impl fmt::Display for FieldDefinition { @@ -505,6 +684,7 @@ fn write_object_helper( fn write_interface_helper( f: &mut fmt::Formatter<'_>, name: &StringKey, + interfaces: &[Identifier], fields: &Option>, directives: &[ConstantDirective], is_extension: bool, @@ -514,6 +694,10 @@ fn write_interface_helper( } write!(f, "interface {}", name)?; + if !interfaces.is_empty() { + write!(f, " implements ")?; + write_list(f, interfaces, " & ")?; + } write_directives(f, directives)?; if let Some(fields) = fields.as_ref() { write_fields(f, &fields.items)?; @@ -548,6 +732,7 @@ fn write_directive_definition_helper( _repeatable: &bool, locations: &[DirectiveLocation], _description: &Option, + _hack_source: &Option, ) -> fmt::Result { write!(f, "directive @{}", name)?; if let Some(arguments) = arguments.as_ref() { diff --git a/compiler/crates/graphql-syntax/src/parser.rs b/compiler/crates/graphql-syntax/src/parser.rs index c5d5375d1ee33..0789b371f1276 100644 --- a/compiler/crates/graphql-syntax/src/parser.rs +++ b/compiler/crates/graphql-syntax/src/parser.rs @@ -22,7 +22,7 @@ use crate::syntax_error::SyntaxError; type ParseResult = Result; -#[derive(Default, PartialEq)] +#[derive(Default, Clone, Copy, PartialEq)] pub enum FragmentArgumentSyntaxKind { #[default] None, @@ -30,7 +30,7 @@ pub enum FragmentArgumentSyntaxKind { SpreadArgumentsAndFragmentVariableDefinitions, } -#[derive(Default)] +#[derive(Default, Clone, Copy)] pub struct ParserFeatures { /// Whether and how to enable the experimental fragment variables definitions syntax pub fragment_argument_capability: FragmentArgumentSyntaxKind, @@ -176,6 +176,35 @@ impl<'a> Parser<'a> { } } + pub fn parse_schema_document_into_type_system_definitions( + mut self, + ) -> DiagnosticsResult> { + let start = self.index(); + let definition_tokens = self + .parse_list( + |s| s.peek_type_system_definition(), + |s| s.advance_type_system_definition(), + ) + .unwrap_or_else(|_| Vec::new()); + if self.errors.is_empty() { + let definition_strs = + // Create range for first definition + std::iter::once((start, definition_tokens.first().map_or(0, |t| t.span.start))) + // Chain rest of definitions + .chain(definition_tokens.windows(2).map(|definition_pairs| { + let from = definition_pairs[0].span.start; + let to = definition_pairs[1].span.start; + (from, to) + })) + .map(|(from, to)| self.source[from as usize..to as usize].into()) + .collect::>(); + self.parse_eof()?; + Ok(definition_strs) + } else { + Err(self.errors) + } + } + /// Parses a type annotation such as `ID` or `[User!]!`. pub fn parse_type(mut self) -> DiagnosticsResult { let type_annotation = self.parse_type_annotation(); @@ -209,6 +238,15 @@ impl<'a> Parser<'a> { } } + pub fn parse_type_system_definition(mut self) -> DiagnosticsResult { + let type_system_definition = self.parse_type_system_definition_impl(); + if self.errors.is_empty() { + Ok(type_system_definition.unwrap()) + } else { + Err(self.errors) + } + } + fn parse_eof(mut self) -> DiagnosticsResult<()> { self.parse_kind(TokenKind::EndOfFile) .map(|_| ()) @@ -244,7 +282,7 @@ impl<'a> Parser<'a> { let start = self.index(); let definitions = self.parse_list( |s| s.peek_type_system_definition(), - |s| s.parse_type_system_definition(), + |s| s.parse_type_system_definition_impl(), )?; let end = self.index(); let span = Span::new(start, end); @@ -283,7 +321,7 @@ impl<'a> Parser<'a> { | (TokenKind::Identifier, "input") | (TokenKind::Identifier, "directive") | (TokenKind::Identifier, "extend") => Ok(Definition::TypeSystemDefinition( - self.parse_type_system_definition()?, + self.parse_type_system_definition_impl()?, )), _ => { let error = Diagnostic::error( @@ -345,7 +383,7 @@ impl<'a> Parser<'a> { /// Definition : /// [] ExecutableDefinition /// [x] TypeSystemDefinition - /// [] TypeSystemExtension + /// [x] TypeSystemExtension fn peek_type_system_definition(&self) -> bool { let token = self.peek(); match token.kind { @@ -369,9 +407,10 @@ impl<'a> Parser<'a> { /// Definition : /// [] ExecutableDefinition /// [x] TypeSystemDefinition - /// [] TypeSystemExtension - fn parse_type_system_definition(&mut self) -> ParseResult { + /// [x] TypeSystemExtension + fn parse_type_system_definition_impl(&mut self) -> ParseResult { let description = self.parse_optional_description(); + let hack_source = self.parse_optional_hack_source(); let token = self.peek(); if token.kind != TokenKind::Identifier { // TODO @@ -401,7 +440,7 @@ impl<'a> Parser<'a> { self.parse_input_object_type_definition()?, )), "directive" => Ok(TypeSystemDefinition::DirectiveDefinition( - self.parse_directive_definition(description)?, + self.parse_directive_definition(description, hack_source)?, )), "extend" => self.parse_type_system_extension(), token_str => { @@ -415,6 +454,38 @@ impl<'a> Parser<'a> { } } + /// Definition : + /// [] ExecutableDefinition + /// [x] TypeSystemDefinition + /// [] TypeSystemExtension + fn advance_type_system_definition(&mut self) -> ParseResult { + self.advance_optional_description(); // description + self.advance_optional_hack_source(); // hack_source + let token = self.peek(); + if token.kind != TokenKind::Identifier { + return Err(()); + } + match self.source(token) { + "schema" => self.advance_schema_definition(), + "scalar" => self.advance_scalar_type_definition(), + "type" => self.advance_object_type_definition(), + "interface" => self.advance_interface_type_definition(), + "union" => self.advance_union_type_definition(), + "enum" => self.advance_enum_type_definition(), + "input" => self.advance_input_object_type_definition(), + "directive" => self.advance_directive_definition(), + "extend" => self.advance_type_system_extension(), + token_str => { + let error = Diagnostic::error( + format!("Unexpected token: `{}`", token_str), + Location::new(self.source_location, token.span), + ); + self.record_error(error); + Err(()) + } + } + } + /** * TypeSystemExtension : * - SchemaExtension @@ -428,7 +499,7 @@ impl<'a> Parser<'a> { * - EnumTypeExtension * - InputObjectTypeDefinition */ - pub fn parse_type_system_extension(&mut self) -> ParseResult { + fn parse_type_system_extension(&mut self) -> ParseResult { self.parse_keyword("extend")?; let token = self.parse_kind(TokenKind::Identifier)?; match self.source(&token) { @@ -464,10 +535,46 @@ impl<'a> Parser<'a> { } } + /** + * TypeSystemExtension : + * - SchemaExtension + * - TypeExtension + * + * TypeExtension : + * - ScalarTypeExtension + * - ObjectTypeExtension + * - InterfaceTypeExtension + * - UnionTypeExtension + * - EnumTypeExtension + * - InputObjectTypeDefinition + */ + fn advance_type_system_extension(&mut self) -> ParseResult { + self.advance_keyword("extend")?; + let token = self.advance_kind(TokenKind::Identifier)?; + match self.source(&token) { + "schema" => self.advance_schema_extension(), + "scalar" => self.advance_scalar_type_extension(), + "type" => self.advance_object_type_extension(), + "interface" => self.advance_interface_type_extension(), + "union" => self.advance_union_type_extension(), + "enum" => self.advance_enum_type_extension(), + "input" => self.advance_input_object_type_extension(), + token_str => { + let error = Diagnostic::error( + format!("Unexpected token `{}`", token_str), + Location::new(self.source_location, token.span), + ); + self.record_error(error); + Err(()) + } + } + } + /** * SchemaDefinition : schema Directives? { OperationTypeDefinition+ } */ fn parse_schema_definition(&mut self) -> ParseResult { + let start = self.index(); self.parse_keyword("schema")?; let directives = self.parse_constant_directives()?; let operation_types = self.parse_delimited_nonempty_list( @@ -475,12 +582,30 @@ impl<'a> Parser<'a> { TokenKind::CloseBrace, Self::parse_operation_type_definition, )?; + let end = self.index(); + let span = Span::new(start, end); Ok(SchemaDefinition { directives, operation_types, + span, }) } + /** + * SchemaDefinition : schema Directives? { OperationTypeDefinition+ } + */ + fn advance_schema_definition(&mut self) -> ParseResult { + self.advance_keyword("schema")?; + self.advance_constant_directives()?; // directives + self.advance_delimited_nonempty_list( + // operation_types + TokenKind::OpenBrace, + TokenKind::CloseBrace, + Self::advance_operation_type_definition, + )?; + Ok(self.current) + } + /** * SchemaExtension : * - extend schema Directives? { OperationTypeDefinition+ } @@ -488,26 +613,64 @@ impl<'a> Parser<'a> { */ fn parse_schema_extension(&mut self) -> ParseResult { // `extend schema` was already parsed + let start = self.index(); let directives = self.parse_constant_directives()?; let operation_types = self.parse_optional_delimited_nonempty_list( TokenKind::OpenBrace, TokenKind::CloseBrace, Self::parse_operation_type_definition, )?; + let end = self.index(); + let span = Span::new(start, end); Ok(SchemaExtension { directives, operation_types, + span, }) } + /** + * SchemaExtension : + * - extend schema Directives? { OperationTypeDefinition+ } + * - extend schema Directives + */ + fn advance_schema_extension(&mut self) -> ParseResult { + // `extend schema` was already parsed + self.advance_constant_directives()?; // directives + self.advance_optional_delimited_nonempty_list( + // operation_types + TokenKind::OpenBrace, + TokenKind::CloseBrace, + Self::advance_operation_type_definition, + )?; + Ok(self.current) + } + /** * OperationTypeDefinition : OperationType : NamedType */ fn parse_operation_type_definition(&mut self) -> ParseResult { + let start = self.index(); let operation = self.parse_operation_type()?; self.parse_kind(TokenKind::Colon)?; let type_ = self.parse_identifier()?; - Ok(OperationTypeDefinition { operation, type_ }) + let end = self.index(); + let span = Span::new(start, end); + Ok(OperationTypeDefinition { + operation, + type_, + span, + }) + } + + /** + * OperationTypeDefinition : OperationType : NamedType + */ + fn advance_operation_type_definition(&mut self) -> ParseResult { + self.advance_operation_type()?; // operation + self.advance_kind(TokenKind::Colon)?; + self.advance_identifier()?; // type_ + Ok(self.current) } /** @@ -533,50 +696,115 @@ impl<'a> Parser<'a> { } } + /** + * OperationType : one of query mutation subscription + */ + fn advance_operation_type(&mut self) -> ParseResult { + let token = self.advance_kind(TokenKind::Identifier)?; + match self.source(&token) { + "query" => Ok(token), + "mutation" => Ok(token), + "subscription" => Ok(token), + token_str => { + let error = Diagnostic::error( + format!( + "Expected one of `query`, `mutation`, `subscription`, got `{}`", + token_str + ), + Location::new(self.source_location, token.span), + ); + self.record_error(error); + Err(()) + } + } + } + fn parse_object_type_definition(&mut self) -> ParseResult { + let start = self.index(); self.parse_keyword("type")?; let name = self.parse_identifier()?; let interfaces = self.parse_implements_interfaces()?; let directives = self.parse_constant_directives()?; let fields = self.parse_fields_definition()?; + let end = self.index(); + let span = Span::new(start, end); Ok(ObjectTypeDefinition { name, interfaces, directives, fields, + span, }) } + fn advance_object_type_definition(&mut self) -> ParseResult { + self.advance_keyword("type")?; + self.advance_identifier()?; // name + self.advance_implements_interfaces()?; // interfaces + self.advance_constant_directives()?; // directives + self.advance_fields_definition()?; // fields + Ok(self.current) + } + fn parse_interface_type_definition(&mut self) -> ParseResult { + let start = self.index(); self.parse_keyword("interface")?; let name = self.parse_identifier()?; let interfaces = self.parse_implements_interfaces()?; let directives = self.parse_constant_directives()?; let fields = self.parse_fields_definition()?; + let end = self.index(); + let span = Span::new(start, end); Ok(InterfaceTypeDefinition { name, interfaces, directives, fields, + span, }) } + fn advance_interface_type_definition(&mut self) -> ParseResult { + self.advance_keyword("interface")?; + self.advance_identifier()?; // name + self.advance_implements_interfaces()?; // interfaces + self.advance_constant_directives()?; // directives + self.advance_fields_definition()?; // fields + Ok(self.current) + } + /** * UnionTypeDefinition : * - Description? union Name Directives? UnionMemberTypes? */ fn parse_union_type_definition(&mut self) -> ParseResult { + let start = self.index(); self.parse_keyword("union")?; let name = self.parse_identifier()?; let directives = self.parse_constant_directives()?; let members = self.parse_union_member_types()?; + let end = self.index(); + let span = Span::new(start, end); Ok(UnionTypeDefinition { name, directives, members, + span, }) } + /** + * UnionTypeDefinition : + * - Description? union Name Directives? UnionMemberTypes? + */ + fn advance_union_type_definition(&mut self) -> ParseResult { + self.advance_keyword("union")?; + self.advance_identifier()?; + self.advance_constant_directives()?; + self.advance_union_member_types()?; + Ok(self.current) + } + /** * UnionTypeExtension : * - extend union Name Directives? UnionMemberTypes @@ -584,16 +812,33 @@ impl<'a> Parser<'a> { */ fn parse_union_type_extension(&mut self) -> ParseResult { // `extend union` was parsed before + let start = self.index(); let name = self.parse_identifier()?; let directives = self.parse_constant_directives()?; let members = self.parse_union_member_types()?; + let end = self.index(); + let span = Span::new(start, end); Ok(UnionTypeExtension { name, directives, members, + span, }) } + /** + * UnionTypeExtension : + * - extend union Name Directives? UnionMemberTypes + * - extend union Name Directives + */ + fn advance_union_type_extension(&mut self) -> ParseResult { + // `extend union` was parsed before + self.advance_identifier()?; // name + self.advance_constant_directives()?; // directives + self.advance_union_member_types()?; // members + Ok(self.current) + } + /** * UnionMemberTypes : * - = `|`? NamedType @@ -611,22 +856,54 @@ impl<'a> Parser<'a> { Ok(members) } + /** + * UnionMemberTypes : + * - = `|`? NamedType + * - UnionMemberTypes | NamedType + */ + fn advance_union_member_types(&mut self) -> ParseResult { + if self.advance_optional_kind(TokenKind::Equals).is_some() { + self.advance_optional_kind(TokenKind::Pipe); + self.advance_identifier()?; + while self.advance_optional_kind(TokenKind::Pipe).is_some() { + self.advance_identifier()?; + } + } + Ok(self.current) + } + /** * EnumTypeDefinition : * - Description? enum Name Directives? EnumValuesDefinition? */ fn parse_enum_type_definition(&mut self) -> ParseResult { + let start = self.index(); self.parse_keyword("enum")?; let name = self.parse_identifier()?; let directives = self.parse_constant_directives()?; let values = self.parse_enum_values_definition()?; + let end = self.index(); + let span = Span::new(start, end); Ok(EnumTypeDefinition { name, directives, values, + span, }) } + /** + * EnumTypeDefinition : + * - Description? enum Name Directives? EnumValuesDefinition? + */ + fn advance_enum_type_definition(&mut self) -> ParseResult { + self.advance_keyword("enum")?; + self.advance_identifier()?; // name + self.advance_constant_directives()?; // directives + self.advance_enum_values_definition()?; // values + Ok(self.current) + } + /** * EnumTypeExtension : * - extend enum Name Directives? EnumValuesDefinition @@ -634,16 +911,33 @@ impl<'a> Parser<'a> { */ fn parse_enum_type_extension(&mut self) -> ParseResult { // `extend enum` was already parsed + let start = self.index(); let name = self.parse_identifier()?; let directives = self.parse_constant_directives()?; let values = self.parse_enum_values_definition()?; + let end = self.index(); + let span = Span::new(start, end); Ok(EnumTypeExtension { name, directives, values, + span, }) } + /** + * EnumTypeExtension : + * - extend enum Name Directives? EnumValuesDefinition + * - extend enum Name Directives + */ + fn advance_enum_type_extension(&mut self) -> ParseResult { + // `extend enum` was already parsed + self.advance_identifier()?; // name + self.advance_constant_directives()?; // directives + self.advance_enum_values_definition()?; // values + Ok(self.current) + } + /** * EnumValuesDefinition : { EnumValueDefinition+ } */ @@ -655,16 +949,46 @@ impl<'a> Parser<'a> { ) } + /** + * EnumValuesDefinition : { EnumValueDefinition+ } + */ + fn advance_enum_values_definition(&mut self) -> ParseResult { + self.advance_optional_delimited_nonempty_list( + TokenKind::OpenBrace, + TokenKind::CloseBrace, + Self::advance_enum_value_definition, + ) + } + /** * EnumValueDefinition : Description? EnumValue Directives? * * EnumValue : Name */ fn parse_enum_value_definition(&mut self) -> ParseResult { + let start = self.index(); self.parse_optional_description(); let name = self.parse_identifier()?; let directives = self.parse_constant_directives()?; - Ok(EnumValueDefinition { name, directives }) + let end = self.index(); + let span = Span::new(start, end); + Ok(EnumValueDefinition { + name, + directives, + span, + }) + } + + /** + * EnumValueDefinition : Description? EnumValue Directives? + * + * EnumValue : Name + */ + fn advance_enum_value_definition(&mut self) -> ParseResult { + self.advance_optional_description(); + self.advance_identifier()?; + self.advance_constant_directives()?; + Ok(self.current) } /** @@ -675,6 +999,7 @@ impl<'a> Parser<'a> { */ fn parse_object_type_extension(&mut self) -> ParseResult { // `extend type` was parsed before + let start = self.index(); let name = self.parse_identifier()?; let interfaces = self.parse_implements_interfaces()?; let directives = self.parse_constant_directives()?; @@ -686,14 +1011,32 @@ impl<'a> Parser<'a> { )); return Err(()); } + let end = self.index(); + let span = Span::new(start, end); Ok(ObjectTypeExtension { name, interfaces, directives, fields, + span, }) } + /** + * ObjectTypeExtension : + * - extend type Name ImplementsInterfaces? DirectivesConst? FieldsDefinition + * - extend type Name ImplementsInterfaces? DirectivesConst + * - extend type Name ImplementsInterfaces + */ + fn advance_object_type_extension(&mut self) -> ParseResult { + // `extend type` was parsed before + self.advance_identifier()?; // name + self.advance_implements_interfaces()?; // interfaces + self.advance_constant_directives()?; // directives + self.advance_fields_definition()?; // fields + Ok(self.current) + } + /** * InterfaceTypeExtension : * - extend interface Name ImplementsInterfaces? DirectivesConst? FieldsDefinition @@ -702,6 +1045,7 @@ impl<'a> Parser<'a> { */ fn parse_interface_type_extension(&mut self) -> ParseResult { // `extend interface` was parsed before + let start = self.index(); let name = self.parse_identifier()?; let interfaces = self.parse_implements_interfaces()?; let directives = self.parse_constant_directives()?; @@ -713,22 +1057,57 @@ impl<'a> Parser<'a> { )); return Err(()); } + let end = self.index(); + let span = Span::new(start, end); Ok(InterfaceTypeExtension { name, interfaces, directives, fields, + span, }) } + /** + * InterfaceTypeExtension : + * - extend interface Name ImplementsInterfaces? DirectivesConst? FieldsDefinition + * - extend interface Name ImplementsInterfaces? DirectivesConst + * - extend interface Name ImplementsInterfaces + */ + fn advance_interface_type_extension(&mut self) -> ParseResult { + // `extend interface` was parsed before + self.advance_identifier()?; // name + self.advance_implements_interfaces()?; // interfaces + self.advance_constant_directives()?; // directives + self.advance_fields_definition()?; // fields + Ok(self.current) + } + /** * ScalarTypeDefinition : Description? scalar Name Directives? */ fn parse_scalar_type_definition(&mut self) -> ParseResult { + let start = self.index(); self.parse_keyword("scalar")?; let name = self.parse_identifier()?; let directives = self.parse_constant_directives()?; - Ok(ScalarTypeDefinition { name, directives }) + let end = self.index(); + let span = Span::new(start, end); + Ok(ScalarTypeDefinition { + name, + directives, + span, + }) + } + + /** + * ScalarTypeDefinition : Description? scalar Name Directives? + */ + fn advance_scalar_type_definition(&mut self) -> ParseResult { + self.advance_keyword("scalar")?; + self.advance_identifier()?; // name + self.advance_constant_directives()?; // directives + Ok(self.current) } /** @@ -737,9 +1116,27 @@ impl<'a> Parser<'a> { */ fn parse_scalar_type_extension(&mut self) -> ParseResult { // `extend scalar` was parsed before + let start = self.index(); let name = self.parse_identifier()?; let directives = self.parse_constant_directives()?; - Ok(ScalarTypeExtension { name, directives }) + let end = self.index(); + let span = Span::new(start, end); + Ok(ScalarTypeExtension { + name, + directives, + span, + }) + } + + /** + * ScalarTypeExtension : + * - extend scalar Name Directives + */ + fn advance_scalar_type_extension(&mut self) -> ParseResult { + // `extend scalar` was parsed before + self.advance_identifier()?; // name + self.advance_constant_directives()?; // directives + Ok(self.current) } /** @@ -747,17 +1144,33 @@ impl<'a> Parser<'a> { * - Description? input Name Directives? InputFieldsDefinition? */ fn parse_input_object_type_definition(&mut self) -> ParseResult { + let start = self.index(); self.parse_keyword("input")?; let name = self.parse_identifier()?; let directives = self.parse_constant_directives()?; let fields = self.parse_input_fields_definition()?; + let end = self.index(); + let span = Span::new(start, end); Ok(InputObjectTypeDefinition { name, directives, fields, + span, }) } + /** + * InputObjectTypeDefinition : + * - Description? input Name Directives? InputFieldsDefinition? + */ + fn advance_input_object_type_definition(&mut self) -> ParseResult { + self.advance_keyword("input")?; + self.advance_identifier()?; // name + self.advance_constant_directives()?; // directives + self.advance_input_fields_definition()?; // fields + Ok(self.current) + } + /** * InputObjectTypeExtension : * - extend input Name Directives? InputFieldsDefinition @@ -765,16 +1178,33 @@ impl<'a> Parser<'a> { */ fn parse_input_object_type_extension(&mut self) -> ParseResult { // `extend input` was parsed already here + let start = self.index(); let name = self.parse_identifier()?; let directives = self.parse_constant_directives()?; let fields = self.parse_input_fields_definition()?; + let end = self.index(); + let span = Span::new(start, end); Ok(InputObjectTypeExtension { name, directives, fields, + span, }) } + /** + * InputObjectTypeExtension : + * - extend input Name Directives? InputFieldsDefinition + * - extend input Name Directives + */ + fn advance_input_object_type_extension(&mut self) -> ParseResult { + // `extend input` was parsed already here + self.advance_identifier()?; // name + self.advance_constant_directives()?; // directives + self.advance_input_fields_definition()?; // fields + Ok(self.current) + } + /** * InputFieldsDefinition : { InputValueDefinition+ } */ @@ -786,6 +1216,17 @@ impl<'a> Parser<'a> { ) } + /** + * InputFieldsDefinition : { InputValueDefinition+ } + */ + fn advance_input_fields_definition(&mut self) -> ParseResult { + self.advance_optional_delimited_nonempty_list( + TokenKind::OpenBrace, + TokenKind::CloseBrace, + Self::advance_input_value_def, + ) + } + /** * DirectiveDefinition : * - Description? directive @ Name ArgumentsDefinition? `repeatable`? on DirectiveLocations @@ -793,7 +1234,9 @@ impl<'a> Parser<'a> { fn parse_directive_definition( &mut self, description: Option, + hack_source: Option, ) -> ParseResult { + let start = self.index(); self.parse_keyword("directive")?; self.parse_kind(TokenKind::At)?; let name = self.parse_identifier()?; @@ -805,15 +1248,38 @@ impl<'a> Parser<'a> { } self.parse_keyword("on")?; let locations = self.parse_directive_locations()?; + let end = self.index(); + let span = Span::new(start, end); Ok(DirectiveDefinition { name, arguments, repeatable, locations, description, + hack_source, + span, }) } + /** + * DirectiveDefinition : + * - Description? directive @ Name ArgumentsDefinition? `repeatable`? on DirectiveLocations + */ + fn advance_directive_definition(&mut self) -> ParseResult { + self.advance_keyword("directive")?; + self.advance_kind(TokenKind::At)?; + self.advance_identifier()?; // name + self.advance_argument_defs()?; // arguments + + let repeatable = self.peek_keyword("repeatable"); + if repeatable { + self.advance_token(); + } + self.advance_keyword("on")?; + self.advance_directive_locations()?; // locations + Ok(self.current) + } + /** * DirectiveLocations : * - `|`? DirectiveLocation @@ -829,6 +1295,20 @@ impl<'a> Parser<'a> { Ok(locations) } + /** + * DirectiveLocations : + * - `|`? DirectiveLocation + * - DirectiveLocations | DirectiveLocation + */ + fn advance_directive_locations(&mut self) -> ParseResult { + self.advance_optional_kind(TokenKind::Pipe); + self.advance_directive_location()?; + while self.advance_optional_kind(TokenKind::Pipe).is_some() { + self.advance_directive_location()?; + } + Ok(self.current) + } + /* * DirectiveLocation : * - ExecutableDirectiveLocation @@ -889,6 +1369,10 @@ impl<'a> Parser<'a> { } } + fn advance_directive_location(&mut self) -> ParseResult { + self.advance_kind(TokenKind::Identifier) + } + /** * Description : StringValue */ @@ -910,6 +1394,49 @@ impl<'a> Parser<'a> { } } + /** + * Description : StringValue + */ + fn advance_optional_description(&mut self) -> Option { + match self.peek_token_kind() { + TokenKind::StringLiteral => Some(self.parse_token()), + TokenKind::BlockStringLiteral => Some(self.parse_token()), + _ => None, + } + } + + /** + * hack_source : StringValue + */ + fn parse_optional_hack_source(&mut self) -> Option { + match self.peek_token_kind() { + TokenKind::StringLiteral => { + let token = self.parse_token(); + let source = self.source(&token); + let value = source[1..source.len() - 1].to_string().intern(); + Some(StringNode { token, value }) + } + TokenKind::BlockStringLiteral => { + let token = self.parse_token(); + let source = self.source(&token); + let value = clean_block_string_literal(source).intern(); + Some(StringNode { token, value }) + } + _ => None, + } + } + + /** + * hack_source : StringValue + */ + fn advance_optional_hack_source(&mut self) -> Option { + match self.peek_token_kind() { + TokenKind::StringLiteral => Some(self.parse_token()), + TokenKind::BlockStringLiteral => Some(self.parse_token()), + _ => None, + } + } + /** * FieldsDefinition : { FieldDefinition+ } */ @@ -921,26 +1448,58 @@ impl<'a> Parser<'a> { ) } + /** + * FieldsDefinition : { FieldDefinition+ } + */ + fn advance_fields_definition(&mut self) -> ParseResult { + self.advance_optional_delimited_nonempty_list( + TokenKind::OpenBrace, + TokenKind::CloseBrace, + Self::advance_field_definition_impl, + ) + } + /** * FieldDefinition : * - Description? Name ArgumentsDefinition? : Type Directives? */ fn parse_field_definition_impl(&mut self) -> ParseResult { + let start = self.index(); let description = self.parse_optional_description(); + let hack_source = self.parse_optional_hack_source(); let name = self.parse_identifier()?; let arguments = self.parse_argument_defs()?; self.parse_kind(TokenKind::Colon)?; let type_ = self.parse_type_annotation()?; let directives = self.parse_constant_directives()?; + let end = self.index(); + let span = Span::new(start, end); Ok(FieldDefinition { name, type_, arguments, directives, description, + hack_source, + span, }) } + /** + * FieldDefinition : + * - Description? Name ArgumentsDefinition? : Type Directives? + */ + fn advance_field_definition_impl(&mut self) -> ParseResult { + self.advance_optional_description(); // description + self.advance_optional_hack_source(); // hack_source + self.advance_identifier()?; // name + self.advance_argument_defs()?; // arguments + self.advance_kind(TokenKind::Colon)?; + self.advance_type_annotation()?; // type_ + self.advance_constant_directives()?; // directives + Ok(self.current) + } + fn parse_field_definition_stub_impl(&mut self) -> ParseResult { let name = self.parse_identifier()?; let arguments = self.parse_argument_defs()?; @@ -958,29 +1517,60 @@ impl<'a> Parser<'a> { ) } + /** + * ArgumentsDefinition : ( InputValueDefinition+ ) + */ + fn advance_argument_defs(&mut self) -> ParseResult { + self.advance_optional_delimited_nonempty_list( + TokenKind::OpenParen, + TokenKind::CloseParen, + Self::advance_input_value_def, + ) + } + /** * InputValueDefinition : * - Description? Name : Type DefaultValue? Directives? */ fn parse_input_value_def(&mut self) -> ParseResult { + let start = self.index(); self.parse_optional_description(); let name = self.parse_identifier()?; self.parse_kind(TokenKind::Colon)?; let type_ = self.parse_type_annotation()?; - let default_value = if self.parse_optional_kind(TokenKind::Equals).is_some() { - Some(self.parse_constant_value()?) + let default_value = if self.peek_token_kind() == TokenKind::Equals { + Some(self.parse_default_value()?) } else { None }; let directives = self.parse_constant_directives()?; + let end = self.index(); + let span = Span::new(start, end); Ok(InputValueDefinition { name, type_, default_value, directives, + span, }) } + /** + * InputValueDefinition : + * - Description? Name : Type DefaultValue? Directives? + */ + fn advance_input_value_def(&mut self) -> ParseResult { + self.advance_optional_description(); + self.advance_identifier()?; // name + self.advance_kind(TokenKind::Colon)?; + self.advance_type_annotation()?; // type_ + if self.peek_token_kind() == TokenKind::Equals { + self.advance_default_value()?; // default_value + } + self.advance_constant_directives()?; // directives + Ok(self.current) + } + /** * ImplementsInterfaces : * - implements `&`? NamedType @@ -999,6 +1589,23 @@ impl<'a> Parser<'a> { Ok(interfaces) } + /** + * ImplementsInterfaces : + * - implements `&`? NamedType + * - ImplementsInterfaces & NamedType + */ + fn advance_implements_interfaces(&mut self) -> ParseResult { + if self.peek_keyword("implements") { + self.advance_token(); + self.advance_optional_kind(TokenKind::Ampersand); + self.advance_identifier()?; + while self.advance_optional_kind(TokenKind::Ampersand).is_some() { + self.advance_identifier()?; + } + } + Ok(self.current) + } + /// FragmentDefinition : fragment FragmentName TypeCondition Directives? SelectionSet fn parse_fragment_definition(&mut self) -> ParseResult { let start = self.index(); @@ -1126,6 +1733,13 @@ impl<'a> Parser<'a> { }) } + /// DefaultValue : = Value[Const] + fn advance_default_value(&mut self) -> ParseResult { + self.advance_kind(TokenKind::Equals)?; // equals + self.advance_constant_value()?; // value + Ok(self.current) + } + /// Type : /// NamedType /// ListType @@ -1169,6 +1783,37 @@ impl<'a> Parser<'a> { } } + /// Type : + /// NamedType + /// ListType + /// NonNullType + fn advance_type_annotation(&mut self) -> ParseResult { + let token = self.peek(); + match token.kind { + TokenKind::Identifier => { + self.advance_identifier()?; + () + } + TokenKind::OpenBracket => { + self.advance_kind(TokenKind::OpenBracket)?; // open + self.advance_type_annotation()?; // type_ + self.advance_kind(TokenKind::CloseBracket)?; // close + } + _ => { + let error = Diagnostic::error( + SyntaxError::ExpectedTypeAnnotation, + Location::new(self.source_location, token.span), + ); + self.record_error(error); + return Err(()); + } + }; + if self.peek_token_kind() == TokenKind::Exclamation { + self.advance_kind(TokenKind::Exclamation)?; // exclamation + } + Ok(self.current) + } + /// Directives[Const] : Directive[?Const]+ fn parse_directives(&mut self) -> ParseResult> { self.parse_list(|s| s.peek_kind(TokenKind::At), |s| s.parse_directive_impl()) @@ -1185,6 +1830,17 @@ impl<'a> Parser<'a> { } } + fn advance_constant_directives(&mut self) -> ParseResult { + if self.peek_token_kind() == TokenKind::At { + self.advance_list( + |s| s.peek_kind(TokenKind::At), + |s| s.advance_constant_directive(), + ) + } else { + Ok(self.current) + } + } + /// Directive[Const] : @ Name Arguments[?Const]? fn parse_directive_impl(&mut self) -> ParseResult { let start = self.index(); @@ -1213,6 +1869,13 @@ impl<'a> Parser<'a> { }) } + fn advance_constant_directive(&mut self) -> ParseResult { + self.advance_kind(TokenKind::At)?; // at + self.advance_identifier()?; // name + self.advance_optional_constant_arguments()?; // arguments + Ok(self.current) + } + /// TypeCondition : on NamedType /// NamedType : Name fn parse_type_condition(&mut self) -> ParseResult { @@ -1419,7 +2082,7 @@ impl<'a> Parser<'a> { )); let name = Identifier { span: node.token.span, - token: node.token.clone(), + token: node.token, value: node.value, }; last_arg.span.end = last_arg.colon.span.end; @@ -1540,6 +2203,14 @@ impl<'a> Parser<'a> { ) } + fn advance_optional_constant_arguments(&mut self) -> ParseResult { + self.advance_optional_delimited_nonempty_list( + TokenKind::OpenParen, + TokenKind::CloseParen, + Self::parse_constant_argument, + ) + } + /// Argument[Const] : Name : Value[?Const] fn parse_argument(&mut self) -> ParseResult { let start = self.index(); @@ -1554,6 +2225,7 @@ impl<'a> Parser<'a> { value, }) } + /// Argument[Const=true] : Name : Value[Const=true] fn parse_constant_argument(&mut self) -> ParseResult { let start = self.index(); @@ -1569,6 +2241,14 @@ impl<'a> Parser<'a> { }) } + /// Argument[Const=true] : Name : Value[Const=true] + fn advance_constant_argument(&mut self) -> ParseResult { + self.advance_identifier()?; // name + self.advance_kind(TokenKind::Colon)?; // colon + self.advance_constant_value()?; // value + Ok(self.current) + } + /// Value[?Const] : /// [~Const] Variable /// ListValue[?Const] @@ -1672,6 +2352,31 @@ impl<'a> Parser<'a> { } } + /// Value[Const=true] : + /// IntValue + /// FloatValue + /// StringValue + /// BooleanValue + /// NullValue + /// EnumValue + /// ListValue[Const=true] + /// ObjectValue[Const=true] + fn advance_constant_value(&mut self) -> ParseResult { + match self.peek_token_kind() { + TokenKind::OpenBracket => { + self.advance_delimited_list(TokenKind::OpenBracket, TokenKind::CloseBracket, |s| { + s.advance_constant_value() + }) + } + TokenKind::OpenBrace => { + self.advance_delimited_list(TokenKind::OpenBrace, TokenKind::CloseBrace, |s| { + s.advance_constant_argument() + }) + } + _ => self.advance_literal_value(), + } + } + /// IntValue /// FloatValue /// StringValue @@ -1792,6 +2497,16 @@ impl<'a> Parser<'a> { } } + /// IntValue + /// FloatValue + /// StringValue + /// BooleanValue + /// NullValue + /// EnumValue + fn advance_literal_value(&mut self) -> ParseResult { + Ok(self.advance_token()) + } + /// Variable : $ Name fn parse_variable_identifier(&mut self) -> ParseResult { let start = self.index(); @@ -1844,6 +2559,22 @@ impl<'a> Parser<'a> { } } + /// Name :: /[_A-Za-z][_0-9A-Za-z]*/ + fn advance_identifier(&mut self) -> ParseResult { + let token = self.parse_token(); + match token.kind { + TokenKind::Identifier => Ok(token), + _ => { + let error = Diagnostic::error( + SyntaxError::Expected(TokenKind::Identifier), + Location::new(self.source_location, token.span), + ); + self.record_error(error); + Err(()) + } + } + } + fn parse_identifier_with_error_recovery(&mut self) -> Identifier { match self.peek_token_kind() { TokenKind::Identifier => { @@ -1882,6 +2613,18 @@ impl<'a> Parser<'a> { Ok(items) } + /// * + fn advance_list(&mut self, peek: F1, advance: F2) -> ParseResult + where + F1: Fn(&mut Self) -> bool, + F2: Fn(&mut Self) -> ParseResult, + { + while peek(self) { + advance(self)?; + } + Ok(self.current) + } + /// Parse delimited items into a `List` /// * fn parse_delimited_list( @@ -1909,6 +2652,26 @@ impl<'a> Parser<'a> { }) } + /// Advance delimited items of a `List` + /// * + fn advance_delimited_list( + &mut self, + start_kind: TokenKind, + end_kind: TokenKind, + advance: F, + ) -> ParseResult + where + F: Fn(&mut Self) -> ParseResult, + { + self.advance_kind(start_kind)?; + while !self.peek_kind(end_kind) { + advance(self)?; + } + self.advance_kind(end_kind)?; + + Ok(self.current) + } + /// Parse delimited items into a `List` /// + fn parse_delimited_nonempty_list( @@ -1932,7 +2695,7 @@ impl<'a> Parser<'a> { let token = self.empty_token(); return Ok(List { span: token.span, - start: token.clone(), + start: token, items: vec![], end: token, }); @@ -1960,6 +2723,47 @@ impl<'a> Parser<'a> { }) } + /// Advance to last token of delimited items + /// + + fn advance_delimited_nonempty_list( + &mut self, + start_kind: TokenKind, + end_kind: TokenKind, + advance: F, + ) -> ParseResult + where + F: Fn(&mut Self) -> ParseResult, + { + if !self.peek_kind(start_kind) { + let error = Diagnostic::error( + SyntaxError::Expected(start_kind), + Location::new( + self.source_location, + Span::new(self.end_index, self.index()), + ), + ); + self.record_error(error); + return Ok(self.empty_token()); + } + let start = self.parse_token(); + let mut has_items = false; + while !self.peek_kind(end_kind) { + has_items = true; + advance(self)?; + } + let end = self.advance_kind(end_kind)?; + let span = Span::new(start.span.start, end.span.end); + + if !has_items { + self.record_error(Diagnostic::error( + SyntaxError::ExpectedNonEmptyList, + Location::new(self.source_location, span), + )); + } + + Ok(end) + } + /// ( + )? fn parse_optional_delimited_nonempty_list( &mut self, @@ -1979,6 +2783,23 @@ impl<'a> Parser<'a> { } } + /// ( + )? + fn advance_optional_delimited_nonempty_list( + &mut self, + start_kind: TokenKind, + end_kind: TokenKind, + parse: F, + ) -> ParseResult + where + F: Fn(&mut Self) -> ParseResult, + { + if self.peek_token_kind() == start_kind { + self.advance_delimited_nonempty_list(start_kind, end_kind, parse) + } else { + Ok(self.current) + } + } + /// A &str for the source of the inner span of the given token. fn source(&self, token: &Token) -> &str { let (raw_start, raw_end) = token.span.as_usize(); @@ -2020,6 +2841,10 @@ impl<'a> Parser<'a> { } } + fn advance_kind(&mut self, expected: TokenKind) -> ParseResult { + self.parse_kind(expected) + } + /// Parse the next token if it has the expected kind. fn parse_optional_kind(&mut self, expected: TokenKind) -> Option { if self.peek_kind(expected) { @@ -2029,6 +2854,14 @@ impl<'a> Parser<'a> { } } + fn advance_optional_kind(&mut self, expected: TokenKind) -> Option { + if self.peek_kind(expected) { + Some(self.advance_token()) + } else { + None + } + } + /// Return true if the current token is an Identifier matching the given keyword. fn peek_keyword(&self, expected: &'static str) -> bool { self.peek_kind(TokenKind::Identifier) && self.source(self.peek()) == expected @@ -2050,6 +2883,10 @@ impl<'a> Parser<'a> { } } + fn advance_keyword(&mut self, expected: &'static str) -> ParseResult { + self.parse_keyword(expected) + } + /// Get the byte offset of the *start* of the current token fn index(&self) -> u32 { self.current.span.start @@ -2094,6 +2931,10 @@ impl<'a> Parser<'a> { } } + fn advance_token(&mut self) -> Token { + self.parse_token() + } + fn lexer_span(&self) -> Span { let span: Span = self.lexer.span().into(); span.with_offset(self.offset) diff --git a/compiler/crates/graphql-syntax/src/syntax_error.rs b/compiler/crates/graphql-syntax/src/syntax_error.rs index 8d0787ec45075..2fbeda7bfab6f 100644 --- a/compiler/crates/graphql-syntax/src/syntax_error.rs +++ b/compiler/crates/graphql-syntax/src/syntax_error.rs @@ -9,7 +9,19 @@ use thiserror::Error; use crate::lexer::TokenKind; -#[derive(Clone, Copy, Debug, Error, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Copy, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +#[serde(tag = "type")] pub enum SyntaxError { #[error("Expected a {0}")] Expected(TokenKind), diff --git a/compiler/crates/graphql-syntax/tests/advance_schema_document.rs b/compiler/crates/graphql-syntax/tests/advance_schema_document.rs new file mode 100644 index 0000000000000..b72080d6b93e3 --- /dev/null +++ b/compiler/crates/graphql-syntax/tests/advance_schema_document.rs @@ -0,0 +1,34 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::Diagnostic; +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use graphql_cli::DiagnosticPrinter; +use graphql_syntax::parse_schema_document_into_type_system_definitions; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + parse_schema_document_into_type_system_definitions( + fixture.content, + SourceLocationKey::standalone(fixture.file_name), + ) + .map(|x| format!("{:#?}", x)) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) +} + +// NOTE: copied from graphql-test-helpers to avoid cyclic dependency breaking Rust Analyzer +fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { + let printer = + DiagnosticPrinter::new(|_| Some(TextSource::from_whole_document(source.to_string()))); + let mut printed = diagnostics + .iter() + .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) + .collect::>(); + printed.sort(); + printed.join("\n\n") +} diff --git a/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/schema_kitchen_sink.expected b/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/schema_kitchen_sink.expected new file mode 100644 index 0000000000000..3e88859e1232c --- /dev/null +++ b/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/schema_kitchen_sink.expected @@ -0,0 +1,199 @@ +==================================== INPUT ==================================== +# source: https://github.com/graphql/graphql-js/blob/5d109ec32f60b593b721037cd2944d2c07420006/src/__fixtures__/schema-kitchen-sink.graphql + +"""This is a description of the schema as a whole.""" +schema { + query: QueryType + mutation: MutationType +} + +""" +This is a description +of the `Foo` type. +""" +type Foo implements Bar & Baz & Two { + "Description of the `one` field." + one: Type + """ + This is a description of the `two` field. + """ + two( + """ + This is a description of the `argument` argument. + """ + argument: InputType! + ): Type + """This is a description of the `three` field.""" + three(argument: InputType, other: String): Int + four(argument: String = "string"): String + five(argument: [String] = ["string", "string"]): String + six(argument: InputType = {key: "value"}): Type + seven(argument: Int = null): Type +} + +type AnnotatedObject @onObject(arg: "value") { + annotatedField(arg: Type = "default" @onArgumentDefinition): Type @onField +} + +type UndefinedType + +extend type Foo { + seven(argument: [String]): Type +} + +extend type Foo @onType + +interface Bar { + one: Type + four(argument: String = "string"): String +} + +interface AnnotatedInterface @onInterface { + annotatedField(arg: Type @onArgumentDefinition): Type @onField +} + +interface UndefinedInterface + +extend interface Bar implements Two { + two(argument: InputType!): Type +} + +extend interface Bar @onInterface + +interface Baz implements Bar & Two { + one: Type + two(argument: InputType!): Type + four(argument: String = "string"): String +} + +union Feed = + | Story + | Article + | Advert + +union AnnotatedUnion @onUnion = A | B + +union AnnotatedUnionTwo @onUnion = | A | B + +union UndefinedUnion + +extend union Feed = Photo | Video + +extend union Feed @onUnion + +scalar CustomScalar + +scalar AnnotatedScalar @onScalar + +extend scalar CustomScalar @onScalar + +enum Site { + """ + This is a description of the `DESKTOP` value + """ + DESKTOP + + """This is a description of the `MOBILE` value""" + MOBILE + + "This is a description of the `WEB` value" + WEB +} + +enum AnnotatedEnum @onEnum { + ANNOTATED_VALUE @onEnumValue + OTHER_VALUE +} + +enum UndefinedEnum + +extend enum Site { + VR +} + +extend enum Site @onEnum + +input InputType { + key: String! + answer: Int = 42 +} + +input AnnotatedInput @onInputObject { + annotatedField: Type @onInputFieldDefinition +} + +input UndefinedInput + +extend input InputType { + other: Float = 1.23e4 @onInputFieldDefinition +} + +extend input InputType @onInputObject + +""" +This is a description of the `@skip` directive +""" +directive @skip( + """This is a description of the `if` argument""" + if: Boolean! @onArgumentDefinition +) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + +directive @include(if: Boolean!) + on FIELD + | FRAGMENT_SPREAD + | INLINE_FRAGMENT + +directive @include2(if: Boolean!) on + | FIELD + | FRAGMENT_SPREAD + | INLINE_FRAGMENT + +directive @myRepeatableDir(name: String!) repeatable on + | OBJECT + | INTERFACE + +extend schema @onSchema + +extend schema @onSchema { + subscription: SubscriptionType +} +==================================== OUTPUT =================================== +[ + "\"\"\"This is a description of the schema as a whole.\"\"\"\nschema {\n query: QueryType\n mutation: MutationType\n}\n\n", + "\"\"\"\nThis is a description\nof the `Foo` type.\n\"\"\"\ntype Foo implements Bar & Baz & Two {\n \"Description of the `one` field.\"\n one: Type\n \"\"\"\n This is a description of the `two` field.\n \"\"\"\n two(\n \"\"\"\n This is a description of the `argument` argument.\n \"\"\"\n argument: InputType!\n ): Type\n \"\"\"This is a description of the `three` field.\"\"\"\n three(argument: InputType, other: String): Int\n four(argument: String = \"string\"): String\n five(argument: [String] = [\"string\", \"string\"]): String\n six(argument: InputType = {key: \"value\"}): Type\n seven(argument: Int = null): Type\n}\n\n", + "type AnnotatedObject @onObject(arg: \"value\") {\n annotatedField(arg: Type = \"default\" @onArgumentDefinition): Type @onField\n}\n\n", + "type UndefinedType\n\n", + "extend type Foo {\n seven(argument: [String]): Type\n}\n\n", + "extend type Foo @onType\n\n", + "interface Bar {\n one: Type\n four(argument: String = \"string\"): String\n}\n\n", + "interface AnnotatedInterface @onInterface {\n annotatedField(arg: Type @onArgumentDefinition): Type @onField\n}\n\n", + "interface UndefinedInterface\n\n", + "extend interface Bar implements Two {\n two(argument: InputType!): Type\n}\n\n", + "extend interface Bar @onInterface\n\n", + "interface Baz implements Bar & Two {\n one: Type\n two(argument: InputType!): Type\n four(argument: String = \"string\"): String\n}\n\n", + "union Feed =\n | Story\n | Article\n | Advert\n\n", + "union AnnotatedUnion @onUnion = A | B\n\n", + "union AnnotatedUnionTwo @onUnion = | A | B\n\n", + "union UndefinedUnion\n\n", + "extend union Feed = Photo | Video\n\n", + "extend union Feed @onUnion\n\n", + "scalar CustomScalar\n\n", + "scalar AnnotatedScalar @onScalar\n\n", + "extend scalar CustomScalar @onScalar\n\n", + "enum Site {\n \"\"\"\n This is a description of the `DESKTOP` value\n \"\"\"\n DESKTOP\n\n \"\"\"This is a description of the `MOBILE` value\"\"\"\n MOBILE\n\n \"This is a description of the `WEB` value\"\n WEB\n}\n\n", + "enum AnnotatedEnum @onEnum {\n ANNOTATED_VALUE @onEnumValue\n OTHER_VALUE\n}\n\n", + "enum UndefinedEnum\n\n", + "extend enum Site {\n VR\n}\n\n", + "extend enum Site @onEnum\n\n", + "input InputType {\n key: String!\n answer: Int = 42\n}\n\n", + "input AnnotatedInput @onInputObject {\n annotatedField: Type @onInputFieldDefinition\n}\n\n", + "input UndefinedInput\n\n", + "extend input InputType {\n other: Float = 1.23e4 @onInputFieldDefinition\n}\n\n", + "extend input InputType @onInputObject\n\n", + "\"\"\"\nThis is a description of the `@skip` directive\n\"\"\"\ndirective @skip(\n \"\"\"This is a description of the `if` argument\"\"\"\n if: Boolean! @onArgumentDefinition\n) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT\n\n", + "directive @include(if: Boolean!)\n on FIELD\n | FRAGMENT_SPREAD\n | INLINE_FRAGMENT\n\n", + "directive @include2(if: Boolean!) on\n | FIELD\n | FRAGMENT_SPREAD\n | INLINE_FRAGMENT\n\n", + "directive @myRepeatableDir(name: String!) repeatable on\n | OBJECT\n | INTERFACE\n\n", + "extend schema @onSchema\n\n", + "extend schema @onSchema {\n subscription: SubscriptionType\n}\n", +] diff --git a/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/schema_kitchen_sink.graphql b/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/schema_kitchen_sink.graphql new file mode 100644 index 0000000000000..f34c02c6a8c9b --- /dev/null +++ b/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/schema_kitchen_sink.graphql @@ -0,0 +1,158 @@ +# source: https://github.com/graphql/graphql-js/blob/5d109ec32f60b593b721037cd2944d2c07420006/src/__fixtures__/schema-kitchen-sink.graphql + +"""This is a description of the schema as a whole.""" +schema { + query: QueryType + mutation: MutationType +} + +""" +This is a description +of the `Foo` type. +""" +type Foo implements Bar & Baz & Two { + "Description of the `one` field." + one: Type + """ + This is a description of the `two` field. + """ + two( + """ + This is a description of the `argument` argument. + """ + argument: InputType! + ): Type + """This is a description of the `three` field.""" + three(argument: InputType, other: String): Int + four(argument: String = "string"): String + five(argument: [String] = ["string", "string"]): String + six(argument: InputType = {key: "value"}): Type + seven(argument: Int = null): Type +} + +type AnnotatedObject @onObject(arg: "value") { + annotatedField(arg: Type = "default" @onArgumentDefinition): Type @onField +} + +type UndefinedType + +extend type Foo { + seven(argument: [String]): Type +} + +extend type Foo @onType + +interface Bar { + one: Type + four(argument: String = "string"): String +} + +interface AnnotatedInterface @onInterface { + annotatedField(arg: Type @onArgumentDefinition): Type @onField +} + +interface UndefinedInterface + +extend interface Bar implements Two { + two(argument: InputType!): Type +} + +extend interface Bar @onInterface + +interface Baz implements Bar & Two { + one: Type + two(argument: InputType!): Type + four(argument: String = "string"): String +} + +union Feed = + | Story + | Article + | Advert + +union AnnotatedUnion @onUnion = A | B + +union AnnotatedUnionTwo @onUnion = | A | B + +union UndefinedUnion + +extend union Feed = Photo | Video + +extend union Feed @onUnion + +scalar CustomScalar + +scalar AnnotatedScalar @onScalar + +extend scalar CustomScalar @onScalar + +enum Site { + """ + This is a description of the `DESKTOP` value + """ + DESKTOP + + """This is a description of the `MOBILE` value""" + MOBILE + + "This is a description of the `WEB` value" + WEB +} + +enum AnnotatedEnum @onEnum { + ANNOTATED_VALUE @onEnumValue + OTHER_VALUE +} + +enum UndefinedEnum + +extend enum Site { + VR +} + +extend enum Site @onEnum + +input InputType { + key: String! + answer: Int = 42 +} + +input AnnotatedInput @onInputObject { + annotatedField: Type @onInputFieldDefinition +} + +input UndefinedInput + +extend input InputType { + other: Float = 1.23e4 @onInputFieldDefinition +} + +extend input InputType @onInputObject + +""" +This is a description of the `@skip` directive +""" +directive @skip( + """This is a description of the `if` argument""" + if: Boolean! @onArgumentDefinition +) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + +directive @include(if: Boolean!) + on FIELD + | FRAGMENT_SPREAD + | INLINE_FRAGMENT + +directive @include2(if: Boolean!) on + | FIELD + | FRAGMENT_SPREAD + | INLINE_FRAGMENT + +directive @myRepeatableDir(name: String!) repeatable on + | OBJECT + | INTERFACE + +extend schema @onSchema + +extend schema @onSchema { + subscription: SubscriptionType +} diff --git a/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/schema_with_leading_comment.expected b/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/schema_with_leading_comment.expected new file mode 100644 index 0000000000000..6325a11085d25 --- /dev/null +++ b/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/schema_with_leading_comment.expected @@ -0,0 +1,10 @@ +==================================== INPUT ==================================== +"""Description of the schema as per https://github.com/graphql/graphql-spec/pull/466""" +schema { + query: RootQueryType + mutation: RootMutationType +} +==================================== OUTPUT =================================== +[ + "\"\"\"Description of the schema as per https://github.com/graphql/graphql-spec/pull/466\"\"\"\nschema {\n query: RootQueryType\n mutation: RootMutationType\n}\n", +] diff --git a/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/schema_with_leading_comment.graphql b/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/schema_with_leading_comment.graphql new file mode 100644 index 0000000000000..9fd7f90730fc3 --- /dev/null +++ b/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/schema_with_leading_comment.graphql @@ -0,0 +1,5 @@ +"""Description of the schema as per https://github.com/graphql/graphql-spec/pull/466""" +schema { + query: RootQueryType + mutation: RootMutationType +} diff --git a/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/type_definition.expected b/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/type_definition.expected new file mode 100644 index 0000000000000..42beaf41de3d2 --- /dev/null +++ b/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/type_definition.expected @@ -0,0 +1,22 @@ +==================================== INPUT ==================================== +type Basic + +type Photo { + url: String +} + +type User implements Actor & Node @strong(field_name: "id") { + id: ID! + name(language: Language): String! +} + +type OnlyInterfaces implements + & Actor + & Node +==================================== OUTPUT =================================== +[ + "type Basic\n\n", + "type Photo {\n url: String\n}\n\n", + "type User implements Actor & Node @strong(field_name: \"id\") {\n id: ID!\n name(language: Language): String!\n}\n\n", + "type OnlyInterfaces implements\n & Actor\n & Node\n", +] diff --git a/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/type_definition.graphql b/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/type_definition.graphql new file mode 100644 index 0000000000000..33a90bd515d9c --- /dev/null +++ b/compiler/crates/graphql-syntax/tests/advance_schema_document/fixtures/type_definition.graphql @@ -0,0 +1,14 @@ +type Basic + +type Photo { + url: String +} + +type User implements Actor & Node @strong(field_name: "id") { + id: ID! + name(language: Language): String! +} + +type OnlyInterfaces implements + & Actor + & Node diff --git a/compiler/crates/graphql-syntax/tests/advance_schema_document_test.rs b/compiler/crates/graphql-syntax/tests/advance_schema_document_test.rs new file mode 100644 index 0000000000000..78b5a370bf81b --- /dev/null +++ b/compiler/crates/graphql-syntax/tests/advance_schema_document_test.rs @@ -0,0 +1,34 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @generated SignedSource<<1b78966281c5964a162ff1c5662713ed>> + */ + +mod advance_schema_document; + +use advance_schema_document::transform_fixture; +use fixture_tests::test_fixture; + +#[tokio::test] +async fn schema_kitchen_sink() { + let input = include_str!("advance_schema_document/fixtures/schema_kitchen_sink.graphql"); + let expected = include_str!("advance_schema_document/fixtures/schema_kitchen_sink.expected"); + test_fixture(transform_fixture, file!(), "schema_kitchen_sink.graphql", "advance_schema_document/fixtures/schema_kitchen_sink.expected", input, expected).await; +} + +#[tokio::test] +async fn schema_with_leading_comment() { + let input = include_str!("advance_schema_document/fixtures/schema_with_leading_comment.graphql"); + let expected = include_str!("advance_schema_document/fixtures/schema_with_leading_comment.expected"); + test_fixture(transform_fixture, file!(), "schema_with_leading_comment.graphql", "advance_schema_document/fixtures/schema_with_leading_comment.expected", input, expected).await; +} + +#[tokio::test] +async fn type_definition() { + let input = include_str!("advance_schema_document/fixtures/type_definition.graphql"); + let expected = include_str!("advance_schema_document/fixtures/type_definition.expected"); + test_fixture(transform_fixture, file!(), "type_definition.graphql", "advance_schema_document/fixtures/type_definition.expected", input, expected).await; +} diff --git a/compiler/crates/graphql-syntax/tests/parse_document.rs b/compiler/crates/graphql-syntax/tests/parse_document.rs new file mode 100644 index 0000000000000..be879688b5fb9 --- /dev/null +++ b/compiler/crates/graphql-syntax/tests/parse_document.rs @@ -0,0 +1,34 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::Diagnostic; +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use graphql_cli::DiagnosticPrinter; +use graphql_syntax::parse_document; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + parse_document( + fixture.content, + SourceLocationKey::standalone(fixture.file_name), + ) + .map(|x| format!("{:#?}", x)) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) +} + +// NOTE: copied from graphql-test-helpers to avoid cyclic dependency breaking Rust Analyzer +fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { + let printer = + DiagnosticPrinter::new(|_| Some(TextSource::from_whole_document(source.to_string()))); + let mut printed = diagnostics + .iter() + .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) + .collect::>(); + printed.sort(); + printed.join("\n\n") +} diff --git a/compiler/crates/graphql-syntax/tests/parse_document/fixtures/mixed.expected b/compiler/crates/graphql-syntax/tests/parse_document/fixtures/mixed.expected index cdf68e4cfecd4..5fbecd4c45d99 100644 --- a/compiler/crates/graphql-syntax/tests/parse_document/fixtures/mixed.expected +++ b/compiler/crates/graphql-syntax/tests/parse_document/fixtures/mixed.expected @@ -27,6 +27,7 @@ Document { value: "Foo", }, directives: [], + span: 0:12, }, ), ), @@ -84,6 +85,8 @@ Document { arguments: None, directives: [], description: None, + hack_source: None, + span: 58:66, }, ], end: Token { @@ -92,6 +95,7 @@ Document { }, }, ), + span: 44:69, }, ), ), diff --git a/compiler/crates/graphql-syntax/tests/parse_document/mod.rs b/compiler/crates/graphql-syntax/tests/parse_document/mod.rs deleted file mode 100644 index a8af7b246a087..0000000000000 --- a/compiler/crates/graphql-syntax/tests/parse_document/mod.rs +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::Diagnostic; -use common::SourceLocationKey; -use common::TextSource; -use fixture_tests::Fixture; -use graphql_cli::DiagnosticPrinter; -use graphql_syntax::parse_document; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - parse_document( - fixture.content, - SourceLocationKey::standalone(fixture.file_name), - ) - .map(|x| format!("{:#?}", x)) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) -} - -// NOTE: copied from graphql-test-helpers to avoid cyclic dependency breaking Rust Analyzer -fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { - let printer = - DiagnosticPrinter::new(|_| Some(TextSource::from_whole_document(source.to_string()))); - let mut printed = diagnostics - .iter() - .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) - .collect::>(); - printed.sort(); - printed.join("\n\n") -} diff --git a/compiler/crates/graphql-syntax/tests/parse_document_test.rs b/compiler/crates/graphql-syntax/tests/parse_document_test.rs index 3422e8fac4bb1..2197fa36b7099 100644 --- a/compiler/crates/graphql-syntax/tests/parse_document_test.rs +++ b/compiler/crates/graphql-syntax/tests/parse_document_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<> */ mod parse_document; @@ -12,16 +12,16 @@ mod parse_document; use parse_document::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn invalid_definition_invalid() { +#[tokio::test] +async fn invalid_definition_invalid() { let input = include_str!("parse_document/fixtures/invalid_definition.invalid.graphql"); let expected = include_str!("parse_document/fixtures/invalid_definition.invalid.expected"); - test_fixture(transform_fixture, "invalid_definition.invalid.graphql", "parse_document/fixtures/invalid_definition.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid_definition.invalid.graphql", "parse_document/fixtures/invalid_definition.invalid.expected", input, expected).await; } -#[test] -fn mixed() { +#[tokio::test] +async fn mixed() { let input = include_str!("parse_document/fixtures/mixed.graphql"); let expected = include_str!("parse_document/fixtures/mixed.expected"); - test_fixture(transform_fixture, "mixed.graphql", "parse_document/fixtures/mixed.expected", input, expected); + test_fixture(transform_fixture, file!(), "mixed.graphql", "parse_document/fixtures/mixed.expected", input, expected).await; } diff --git a/compiler/crates/graphql-syntax/tests/parse_document_with_features.rs b/compiler/crates/graphql-syntax/tests/parse_document_with_features.rs new file mode 100644 index 0000000000000..9a747d03c8306 --- /dev/null +++ b/compiler/crates/graphql-syntax/tests/parse_document_with_features.rs @@ -0,0 +1,40 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::Diagnostic; +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use graphql_cli::DiagnosticPrinter; +use graphql_syntax::parse_document_with_features; +use graphql_syntax::FragmentArgumentSyntaxKind; +use graphql_syntax::ParserFeatures; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + parse_document_with_features( + fixture.content, + SourceLocationKey::standalone(fixture.file_name), + ParserFeatures { + fragment_argument_capability: + FragmentArgumentSyntaxKind::SpreadArgumentsAndFragmentVariableDefinitions, + }, + ) + .map(|x| format!("{:#?}", x)) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) +} + +// NOTE: copied from graphql-test-helpers to avoid cyclic dependency breaking Rust Analyzer +fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { + let printer = + DiagnosticPrinter::new(|_| Some(TextSource::from_whole_document(source.to_string()))); + let mut printed = diagnostics + .iter() + .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) + .collect::>(); + printed.sort(); + printed.join("\n\n") +} diff --git a/compiler/crates/graphql-syntax/tests/parse_document_with_features/mod.rs b/compiler/crates/graphql-syntax/tests/parse_document_with_features/mod.rs deleted file mode 100644 index 49e821efc548f..0000000000000 --- a/compiler/crates/graphql-syntax/tests/parse_document_with_features/mod.rs +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::Diagnostic; -use common::SourceLocationKey; -use common::TextSource; -use fixture_tests::Fixture; -use graphql_cli::DiagnosticPrinter; -use graphql_syntax::parse_document_with_features; -use graphql_syntax::FragmentArgumentSyntaxKind; -use graphql_syntax::ParserFeatures; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - parse_document_with_features( - fixture.content, - SourceLocationKey::standalone(fixture.file_name), - ParserFeatures { - fragment_argument_capability: - FragmentArgumentSyntaxKind::SpreadArgumentsAndFragmentVariableDefinitions, - }, - ) - .map(|x| format!("{:#?}", x)) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) -} - -// NOTE: copied from graphql-test-helpers to avoid cyclic dependency breaking Rust Analyzer -fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { - let printer = - DiagnosticPrinter::new(|_| Some(TextSource::from_whole_document(source.to_string()))); - let mut printed = diagnostics - .iter() - .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) - .collect::>(); - printed.sort(); - printed.join("\n\n") -} diff --git a/compiler/crates/graphql-syntax/tests/parse_document_with_features_test.rs b/compiler/crates/graphql-syntax/tests/parse_document_with_features_test.rs index 668324a7e68c3..f085af3988ae4 100644 --- a/compiler/crates/graphql-syntax/tests/parse_document_with_features_test.rs +++ b/compiler/crates/graphql-syntax/tests/parse_document_with_features_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<467adf22a47715eb35bf37efa6f69f7f>> + * @generated SignedSource<<9c844c4fef105f96cf2d9748be7fb136>> */ mod parse_document_with_features; @@ -12,30 +12,30 @@ mod parse_document_with_features; use parse_document_with_features::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn fragment_with_empty_vardefs_invalid() { +#[tokio::test] +async fn fragment_with_empty_vardefs_invalid() { let input = include_str!("parse_document_with_features/fixtures/fragment_with_empty_vardefs.invalid.graphql"); let expected = include_str!("parse_document_with_features/fixtures/fragment_with_empty_vardefs.invalid.expected"); - test_fixture(transform_fixture, "fragment_with_empty_vardefs.invalid.graphql", "parse_document_with_features/fixtures/fragment_with_empty_vardefs.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_with_empty_vardefs.invalid.graphql", "parse_document_with_features/fixtures/fragment_with_empty_vardefs.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_variable_defs() { +#[tokio::test] +async fn fragment_with_variable_defs() { let input = include_str!("parse_document_with_features/fixtures/fragment_with_variable_defs.graphql"); let expected = include_str!("parse_document_with_features/fixtures/fragment_with_variable_defs.expected"); - test_fixture(transform_fixture, "fragment_with_variable_defs.graphql", "parse_document_with_features/fixtures/fragment_with_variable_defs.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_with_variable_defs.graphql", "parse_document_with_features/fixtures/fragment_with_variable_defs.expected", input, expected).await; } -#[test] -fn spread_with_arguments() { +#[tokio::test] +async fn spread_with_arguments() { let input = include_str!("parse_document_with_features/fixtures/spread_with_arguments.graphql"); let expected = include_str!("parse_document_with_features/fixtures/spread_with_arguments.expected"); - test_fixture(transform_fixture, "spread_with_arguments.graphql", "parse_document_with_features/fixtures/spread_with_arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "spread_with_arguments.graphql", "parse_document_with_features/fixtures/spread_with_arguments.expected", input, expected).await; } -#[test] -fn spread_with_empty_arguments_invalid() { +#[tokio::test] +async fn spread_with_empty_arguments_invalid() { let input = include_str!("parse_document_with_features/fixtures/spread_with_empty_arguments.invalid.graphql"); let expected = include_str!("parse_document_with_features/fixtures/spread_with_empty_arguments.invalid.expected"); - test_fixture(transform_fixture, "spread_with_empty_arguments.invalid.graphql", "parse_document_with_features/fixtures/spread_with_empty_arguments.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "spread_with_empty_arguments.invalid.graphql", "parse_document_with_features/fixtures/spread_with_empty_arguments.invalid.expected", input, expected).await; } diff --git a/compiler/crates/graphql-syntax/tests/parse_executable_document.rs b/compiler/crates/graphql-syntax/tests/parse_executable_document.rs new file mode 100644 index 0000000000000..f0883a6ccbe85 --- /dev/null +++ b/compiler/crates/graphql-syntax/tests/parse_executable_document.rs @@ -0,0 +1,34 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::Diagnostic; +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use graphql_cli::DiagnosticPrinter; +use graphql_syntax::parse_executable; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + parse_executable( + fixture.content, + SourceLocationKey::standalone(fixture.file_name), + ) + .map(|x| format!("{:#?}", x)) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) +} + +// NOTE: copied from graphql-test-helpers to avoid cyclic dependency breaking Rust Analyzer +fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { + let printer = + DiagnosticPrinter::new(|_| Some(TextSource::from_whole_document(source.to_string()))); + let mut printed = diagnostics + .iter() + .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) + .collect::>(); + printed.sort(); + printed.join("\n\n") +} diff --git a/compiler/crates/graphql-syntax/tests/parse_executable_document/mod.rs b/compiler/crates/graphql-syntax/tests/parse_executable_document/mod.rs deleted file mode 100644 index 8f431dafadf69..0000000000000 --- a/compiler/crates/graphql-syntax/tests/parse_executable_document/mod.rs +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::Diagnostic; -use common::SourceLocationKey; -use common::TextSource; -use fixture_tests::Fixture; -use graphql_cli::DiagnosticPrinter; -use graphql_syntax::parse_executable; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - parse_executable( - fixture.content, - SourceLocationKey::standalone(fixture.file_name), - ) - .map(|x| format!("{:#?}", x)) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) -} - -// NOTE: copied from graphql-test-helpers to avoid cyclic dependency breaking Rust Analyzer -fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { - let printer = - DiagnosticPrinter::new(|_| Some(TextSource::from_whole_document(source.to_string()))); - let mut printed = diagnostics - .iter() - .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) - .collect::>(); - printed.sort(); - printed.join("\n\n") -} diff --git a/compiler/crates/graphql-syntax/tests/parse_executable_document_test.rs b/compiler/crates/graphql-syntax/tests/parse_executable_document_test.rs index 0cf7bfdbb82bc..7071897bca421 100644 --- a/compiler/crates/graphql-syntax/tests/parse_executable_document_test.rs +++ b/compiler/crates/graphql-syntax/tests/parse_executable_document_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<4934dcac68b7399c330d6808663cc57f>> */ mod parse_executable_document; @@ -12,93 +12,93 @@ mod parse_executable_document; use parse_executable_document::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn block_string() { +#[tokio::test] +async fn block_string() { let input = include_str!("parse_executable_document/fixtures/block_string.graphql"); let expected = include_str!("parse_executable_document/fixtures/block_string.expected"); - test_fixture(transform_fixture, "block_string.graphql", "parse_executable_document/fixtures/block_string.expected", input, expected); + test_fixture(transform_fixture, file!(), "block_string.graphql", "parse_executable_document/fixtures/block_string.expected", input, expected).await; } -#[test] -fn fragment_with_variable_defs_invalid() { +#[tokio::test] +async fn fragment_with_variable_defs_invalid() { let input = include_str!("parse_executable_document/fixtures/fragment_with_variable_defs.invalid.graphql"); let expected = include_str!("parse_executable_document/fixtures/fragment_with_variable_defs.invalid.expected"); - test_fixture(transform_fixture, "fragment_with_variable_defs.invalid.graphql", "parse_executable_document/fixtures/fragment_with_variable_defs.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_with_variable_defs.invalid.graphql", "parse_executable_document/fixtures/fragment_with_variable_defs.invalid.expected", input, expected).await; } -#[test] -fn incomplete_field_alias() { +#[tokio::test] +async fn incomplete_field_alias() { let input = include_str!("parse_executable_document/fixtures/incomplete_field_alias.graphql"); let expected = include_str!("parse_executable_document/fixtures/incomplete_field_alias.expected"); - test_fixture(transform_fixture, "incomplete_field_alias.graphql", "parse_executable_document/fixtures/incomplete_field_alias.expected", input, expected); + test_fixture(transform_fixture, file!(), "incomplete_field_alias.graphql", "parse_executable_document/fixtures/incomplete_field_alias.expected", input, expected).await; } -#[test] -fn incorrect_variable_name_invalid() { +#[tokio::test] +async fn incorrect_variable_name_invalid() { let input = include_str!("parse_executable_document/fixtures/incorrect_variable_name.invalid.graphql"); let expected = include_str!("parse_executable_document/fixtures/incorrect_variable_name.invalid.expected"); - test_fixture(transform_fixture, "incorrect_variable_name.invalid.graphql", "parse_executable_document/fixtures/incorrect_variable_name.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "incorrect_variable_name.invalid.graphql", "parse_executable_document/fixtures/incorrect_variable_name.invalid.expected", input, expected).await; } -#[test] -fn invalid_number() { +#[tokio::test] +async fn invalid_number() { let input = include_str!("parse_executable_document/fixtures/invalid_number.graphql"); let expected = include_str!("parse_executable_document/fixtures/invalid_number.expected"); - test_fixture(transform_fixture, "invalid_number.graphql", "parse_executable_document/fixtures/invalid_number.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid_number.graphql", "parse_executable_document/fixtures/invalid_number.expected", input, expected).await; } -#[test] -fn keyword_as_name() { +#[tokio::test] +async fn keyword_as_name() { let input = include_str!("parse_executable_document/fixtures/keyword_as_name.graphql"); let expected = include_str!("parse_executable_document/fixtures/keyword_as_name.expected"); - test_fixture(transform_fixture, "keyword_as_name.graphql", "parse_executable_document/fixtures/keyword_as_name.expected", input, expected); + test_fixture(transform_fixture, file!(), "keyword_as_name.graphql", "parse_executable_document/fixtures/keyword_as_name.expected", input, expected).await; } -#[test] -fn kitchen_sink() { +#[tokio::test] +async fn kitchen_sink() { let input = include_str!("parse_executable_document/fixtures/kitchen-sink.graphql"); let expected = include_str!("parse_executable_document/fixtures/kitchen-sink.expected"); - test_fixture(transform_fixture, "kitchen-sink.graphql", "parse_executable_document/fixtures/kitchen-sink.expected", input, expected); + test_fixture(transform_fixture, file!(), "kitchen-sink.graphql", "parse_executable_document/fixtures/kitchen-sink.expected", input, expected).await; } -#[test] -fn list_of_enum() { +#[tokio::test] +async fn list_of_enum() { let input = include_str!("parse_executable_document/fixtures/list_of_enum.graphql"); let expected = include_str!("parse_executable_document/fixtures/list_of_enum.expected"); - test_fixture(transform_fixture, "list_of_enum.graphql", "parse_executable_document/fixtures/list_of_enum.expected", input, expected); + test_fixture(transform_fixture, file!(), "list_of_enum.graphql", "parse_executable_document/fixtures/list_of_enum.expected", input, expected).await; } -#[test] -fn missing_zero_on_float_invalid() { +#[tokio::test] +async fn missing_zero_on_float_invalid() { let input = include_str!("parse_executable_document/fixtures/missing_zero_on_float.invalid.graphql"); let expected = include_str!("parse_executable_document/fixtures/missing_zero_on_float.invalid.expected"); - test_fixture(transform_fixture, "missing_zero_on_float.invalid.graphql", "parse_executable_document/fixtures/missing_zero_on_float.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "missing_zero_on_float.invalid.graphql", "parse_executable_document/fixtures/missing_zero_on_float.invalid.expected", input, expected).await; } -#[test] -fn multiple_parse_errors_invalid() { +#[tokio::test] +async fn multiple_parse_errors_invalid() { let input = include_str!("parse_executable_document/fixtures/multiple_parse_errors.invalid.graphql"); let expected = include_str!("parse_executable_document/fixtures/multiple_parse_errors.invalid.expected"); - test_fixture(transform_fixture, "multiple_parse_errors.invalid.graphql", "parse_executable_document/fixtures/multiple_parse_errors.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple_parse_errors.invalid.graphql", "parse_executable_document/fixtures/multiple_parse_errors.invalid.expected", input, expected).await; } -#[test] -fn space_in_variable() { +#[tokio::test] +async fn space_in_variable() { let input = include_str!("parse_executable_document/fixtures/space_in_variable.graphql"); let expected = include_str!("parse_executable_document/fixtures/space_in_variable.expected"); - test_fixture(transform_fixture, "space_in_variable.graphql", "parse_executable_document/fixtures/space_in_variable.expected", input, expected); + test_fixture(transform_fixture, file!(), "space_in_variable.graphql", "parse_executable_document/fixtures/space_in_variable.expected", input, expected).await; } -#[test] -fn spread_with_arguments_invalid() { +#[tokio::test] +async fn spread_with_arguments_invalid() { let input = include_str!("parse_executable_document/fixtures/spread_with_arguments.invalid.graphql"); let expected = include_str!("parse_executable_document/fixtures/spread_with_arguments.invalid.expected"); - test_fixture(transform_fixture, "spread_with_arguments.invalid.graphql", "parse_executable_document/fixtures/spread_with_arguments.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "spread_with_arguments.invalid.graphql", "parse_executable_document/fixtures/spread_with_arguments.invalid.expected", input, expected).await; } -#[test] -fn unterminated_string_invalid() { +#[tokio::test] +async fn unterminated_string_invalid() { let input = include_str!("parse_executable_document/fixtures/unterminated_string.invalid.graphql"); let expected = include_str!("parse_executable_document/fixtures/unterminated_string.invalid.expected"); - test_fixture(transform_fixture, "unterminated_string.invalid.graphql", "parse_executable_document/fixtures/unterminated_string.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unterminated_string.invalid.graphql", "parse_executable_document/fixtures/unterminated_string.invalid.expected", input, expected).await; } diff --git a/compiler/crates/graphql-syntax/tests/parse_executable_document_with_error_recovery.rs b/compiler/crates/graphql-syntax/tests/parse_executable_document_with_error_recovery.rs new file mode 100644 index 0000000000000..d3908aefba919 --- /dev/null +++ b/compiler/crates/graphql-syntax/tests/parse_executable_document_with_error_recovery.rs @@ -0,0 +1,37 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::Diagnostic; +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use graphql_cli::DiagnosticPrinter; +use graphql_syntax::parse_executable_with_error_recovery; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let result = parse_executable_with_error_recovery( + fixture.content, + SourceLocationKey::standalone(fixture.file_name), + ); + Ok(format!( + "{:#?}\nErrors:\n{}", + result.item, + diagnostics_to_sorted_string(fixture.content, &result.diagnostics) + )) +} + +// NOTE: copied from graphql-test-helpers to avoid cyclic dependency breaking Rust Analyzer +fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { + let printer = + DiagnosticPrinter::new(|_| Some(TextSource::from_whole_document(source.to_string()))); + let mut printed = diagnostics + .iter() + .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) + .collect::>(); + printed.sort(); + printed.join("\n\n") +} diff --git a/compiler/crates/graphql-syntax/tests/parse_executable_document_with_error_recovery/mod.rs b/compiler/crates/graphql-syntax/tests/parse_executable_document_with_error_recovery/mod.rs deleted file mode 100644 index a0e47e79649f6..0000000000000 --- a/compiler/crates/graphql-syntax/tests/parse_executable_document_with_error_recovery/mod.rs +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::Diagnostic; -use common::SourceLocationKey; -use common::TextSource; -use fixture_tests::Fixture; -use graphql_cli::DiagnosticPrinter; -use graphql_syntax::parse_executable_with_error_recovery; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let result = parse_executable_with_error_recovery( - fixture.content, - SourceLocationKey::standalone(fixture.file_name), - ); - Ok(format!( - "{:#?}\nErrors:\n{}", - result.item, - diagnostics_to_sorted_string(fixture.content, &result.diagnostics) - )) -} - -// NOTE: copied from graphql-test-helpers to avoid cyclic dependency breaking Rust Analyzer -fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { - let printer = - DiagnosticPrinter::new(|_| Some(TextSource::from_whole_document(source.to_string()))); - let mut printed = diagnostics - .iter() - .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) - .collect::>(); - printed.sort(); - printed.join("\n\n") -} diff --git a/compiler/crates/graphql-syntax/tests/parse_executable_document_with_error_recovery_test.rs b/compiler/crates/graphql-syntax/tests/parse_executable_document_with_error_recovery_test.rs index 0484a66a2049c..9e1dd4c187ae0 100644 --- a/compiler/crates/graphql-syntax/tests/parse_executable_document_with_error_recovery_test.rs +++ b/compiler/crates/graphql-syntax/tests/parse_executable_document_with_error_recovery_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<74f4b149daa4ad7e9564404219116b8d>> + * @generated SignedSource<<88d37ff7b1c29682fe64409a2914c1b1>> */ mod parse_executable_document_with_error_recovery; @@ -12,114 +12,114 @@ mod parse_executable_document_with_error_recovery; use parse_executable_document_with_error_recovery::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn argument_missing_identifier() { +#[tokio::test] +async fn argument_missing_identifier() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-missing-identifier.graphql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-missing-identifier.expected"); - test_fixture(transform_fixture, "argument-missing-identifier.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-missing-identifier.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument-missing-identifier.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-missing-identifier.expected", input, expected).await; } -#[test] -fn argument_missing_identifier_2() { +#[tokio::test] +async fn argument_missing_identifier_2() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-missing-identifier-2.grahql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-missing-identifier-2.expected"); - test_fixture(transform_fixture, "argument-missing-identifier-2.grahql", "parse_executable_document_with_error_recovery/fixtures/argument-missing-identifier-2.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument-missing-identifier-2.grahql", "parse_executable_document_with_error_recovery/fixtures/argument-missing-identifier-2.expected", input, expected).await; } -#[test] -fn argument_missing_value() { +#[tokio::test] +async fn argument_missing_value() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-missing-value.graphql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-missing-value.expected"); - test_fixture(transform_fixture, "argument-missing-value.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-missing-value.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument-missing-value.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-missing-value.expected", input, expected).await; } -#[test] -fn argument_missing_value_2() { +#[tokio::test] +async fn argument_missing_value_2() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-missing-value-2.graphql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-missing-value-2.expected"); - test_fixture(transform_fixture, "argument-missing-value-2.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-missing-value-2.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument-missing-value-2.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-missing-value-2.expected", input, expected).await; } -#[test] -fn argument_name_only() { +#[tokio::test] +async fn argument_name_only() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-name-only.graphql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-name-only.expected"); - test_fixture(transform_fixture, "argument-name-only.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-name-only.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument-name-only.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-name-only.expected", input, expected).await; } -#[test] -fn argument_name_only_2() { +#[tokio::test] +async fn argument_name_only_2() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-name-only-2.graphql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-name-only-2.expected"); - test_fixture(transform_fixture, "argument-name-only-2.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-name-only-2.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument-name-only-2.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-name-only-2.expected", input, expected).await; } -#[test] -fn argument_value_only() { +#[tokio::test] +async fn argument_value_only() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-value-only.graphql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-value-only.expected"); - test_fixture(transform_fixture, "argument-value-only.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-value-only.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument-value-only.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-value-only.expected", input, expected).await; } -#[test] -fn argument_value_only_2() { +#[tokio::test] +async fn argument_value_only_2() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-value-only-2.graphql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-value-only-2.expected"); - test_fixture(transform_fixture, "argument-value-only-2.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-value-only-2.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument-value-only-2.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-value-only-2.expected", input, expected).await; } -#[test] -fn argument_value_only_3() { +#[tokio::test] +async fn argument_value_only_3() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-value-only-3.grahql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-value-only-3.expected"); - test_fixture(transform_fixture, "argument-value-only-3.grahql", "parse_executable_document_with_error_recovery/fixtures/argument-value-only-3.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument-value-only-3.grahql", "parse_executable_document_with_error_recovery/fixtures/argument-value-only-3.expected", input, expected).await; } -#[test] -fn argument_without_closing_paren() { +#[tokio::test] +async fn argument_without_closing_paren() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-without-closing-paren.graphql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/argument-without-closing-paren.expected"); - test_fixture(transform_fixture, "argument-without-closing-paren.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-without-closing-paren.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument-without-closing-paren.graphql", "parse_executable_document_with_error_recovery/fixtures/argument-without-closing-paren.expected", input, expected).await; } -#[test] -fn directive_without_name() { +#[tokio::test] +async fn directive_without_name() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/directive-without-name.graphql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/directive-without-name.expected"); - test_fixture(transform_fixture, "directive-without-name.graphql", "parse_executable_document_with_error_recovery/fixtures/directive-without-name.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive-without-name.graphql", "parse_executable_document_with_error_recovery/fixtures/directive-without-name.expected", input, expected).await; } -#[test] -fn empty_argument_list() { +#[tokio::test] +async fn empty_argument_list() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/empty-argument-list.graphql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/empty-argument-list.expected"); - test_fixture(transform_fixture, "empty-argument-list.graphql", "parse_executable_document_with_error_recovery/fixtures/empty-argument-list.expected", input, expected); + test_fixture(transform_fixture, file!(), "empty-argument-list.graphql", "parse_executable_document_with_error_recovery/fixtures/empty-argument-list.expected", input, expected).await; } -#[test] -fn empty_linked_field() { +#[tokio::test] +async fn empty_linked_field() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/empty-linked-field.graphql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/empty-linked-field.expected"); - test_fixture(transform_fixture, "empty-linked-field.graphql", "parse_executable_document_with_error_recovery/fixtures/empty-linked-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "empty-linked-field.graphql", "parse_executable_document_with_error_recovery/fixtures/empty-linked-field.expected", input, expected).await; } -#[test] -fn inline_fragment_without_selection() { +#[tokio::test] +async fn inline_fragment_without_selection() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/inline-fragment-without-selection.graphql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/inline-fragment-without-selection.expected"); - test_fixture(transform_fixture, "inline-fragment-without-selection.graphql", "parse_executable_document_with_error_recovery/fixtures/inline-fragment-without-selection.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-without-selection.graphql", "parse_executable_document_with_error_recovery/fixtures/inline-fragment-without-selection.expected", input, expected).await; } -#[test] -fn type_in_argument_value() { +#[tokio::test] +async fn type_in_argument_value() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/type-in-argument-value.graphql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/type-in-argument-value.expected"); - test_fixture(transform_fixture, "type-in-argument-value.graphql", "parse_executable_document_with_error_recovery/fixtures/type-in-argument-value.expected", input, expected); + test_fixture(transform_fixture, file!(), "type-in-argument-value.graphql", "parse_executable_document_with_error_recovery/fixtures/type-in-argument-value.expected", input, expected).await; } -#[test] -fn variable_definition_with_directive() { +#[tokio::test] +async fn variable_definition_with_directive() { let input = include_str!("parse_executable_document_with_error_recovery/fixtures/variable-definition-with-directive.graphql"); let expected = include_str!("parse_executable_document_with_error_recovery/fixtures/variable-definition-with-directive.expected"); - test_fixture(transform_fixture, "variable-definition-with-directive.graphql", "parse_executable_document_with_error_recovery/fixtures/variable-definition-with-directive.expected", input, expected); + test_fixture(transform_fixture, file!(), "variable-definition-with-directive.graphql", "parse_executable_document_with_error_recovery/fixtures/variable-definition-with-directive.expected", input, expected).await; } diff --git a/compiler/crates/graphql-syntax/tests/parse_schema_document.rs b/compiler/crates/graphql-syntax/tests/parse_schema_document.rs new file mode 100644 index 0000000000000..8580c31a38538 --- /dev/null +++ b/compiler/crates/graphql-syntax/tests/parse_schema_document.rs @@ -0,0 +1,34 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::Diagnostic; +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use graphql_cli::DiagnosticPrinter; +use graphql_syntax::parse_schema_document; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + parse_schema_document( + fixture.content, + SourceLocationKey::standalone(fixture.file_name), + ) + .map(|x| format!("{:#?}", x)) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) +} + +// NOTE: copied from graphql-test-helpers to avoid cyclic dependency breaking Rust Analyzer +fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { + let printer = + DiagnosticPrinter::new(|_| Some(TextSource::from_whole_document(source.to_string()))); + let mut printed = diagnostics + .iter() + .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) + .collect::>(); + printed.sort(); + printed.join("\n\n") +} diff --git a/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/directive_description.expected b/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/directive_description.expected index 7450859cb230f..d92bb07999ca0 100644 --- a/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/directive_description.expected +++ b/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/directive_description.expected @@ -31,6 +31,8 @@ SchemaDocument { value: "My Directive", }, ), + hack_source: None, + span: 21:54, }, ), ], diff --git a/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/field_description.expected b/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/field_description.expected index ee1272027c2b7..617cd14230b24 100644 --- a/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/field_description.expected +++ b/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/field_description.expected @@ -82,6 +82,8 @@ SchemaDocument { value: "Single line field description", }, ), + hack_source: None, + span: 13:62, }, FieldDefinition { name: Identifier { @@ -115,6 +117,8 @@ SchemaDocument { value: "Block field description", }, ), + hack_source: None, + span: 62:116, }, FieldDefinition { name: Identifier { @@ -148,6 +152,8 @@ SchemaDocument { value: "Multiline block field description which is so long\nthat it spans onto a second line.", }, ), + hack_source: None, + span: 116:241, }, ], end: Token { @@ -156,6 +162,7 @@ SchemaDocument { }, }, ), + span: 0:244, }, ), ObjectTypeExtension( @@ -210,6 +217,8 @@ SchemaDocument { value: "Single line extended field description", }, ), + hack_source: None, + span: 264:331, }, FieldDefinition { name: Identifier { @@ -243,6 +252,8 @@ SchemaDocument { value: "Block field description", }, ), + hack_source: None, + span: 331:394, }, FieldDefinition { name: Identifier { @@ -276,6 +287,8 @@ SchemaDocument { value: "Multiline block field description which is so long\nthat it spans onto a second line.", }, ), + hack_source: None, + span: 394:528, }, ], end: Token { @@ -284,6 +297,7 @@ SchemaDocument { }, }, ), + span: 256:530, }, ), ], diff --git a/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/schema_kitchen_sink.expected b/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/schema_kitchen_sink.expected index 7428119aa527d..95e1a6467a367 100644 --- a/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/schema_kitchen_sink.expected +++ b/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/schema_kitchen_sink.expected @@ -181,6 +181,7 @@ SchemaDocument { }, value: "QueryType", }, + span: 205:224, }, OperationTypeDefinition { operation: Mutation, @@ -192,6 +193,7 @@ SchemaDocument { }, value: "MutationType", }, + span: 224:247, }, ], end: Token { @@ -199,6 +201,7 @@ SchemaDocument { kind: CloseBrace, }, }, + span: 194:250, }, ), ObjectTypeDefinition( @@ -278,6 +281,8 @@ SchemaDocument { value: "Description of the `one` field.", }, ), + hack_source: None, + span: 339:387, }, FieldDefinition { name: Identifier { @@ -340,6 +345,7 @@ SchemaDocument { ), default_value: None, directives: [], + span: 452:545, }, ], end: Token { @@ -358,6 +364,8 @@ SchemaDocument { value: "This is a description of the `two` field.", }, ), + hack_source: None, + span: 387:555, }, FieldDefinition { name: Identifier { @@ -411,6 +419,7 @@ SchemaDocument { ), default_value: None, directives: [], + span: 613:634, }, InputValueDefinition { name: Identifier { @@ -435,6 +444,7 @@ SchemaDocument { ), default_value: None, directives: [], + span: 634:647, }, ], end: Token { @@ -453,6 +463,8 @@ SchemaDocument { value: "This is a description of the `three` field.", }, ), + hack_source: None, + span: 555:656, }, FieldDefinition { name: Identifier { @@ -505,17 +517,25 @@ SchemaDocument { }, ), default_value: Some( - String( - StringNode { - token: Token { - span: 680:688, - kind: StringLiteral, - }, - value: "string", + DefaultValue { + span: 678:688, + equals: Token { + span: 678:679, + kind: Equals, }, - ), + value: String( + StringNode { + token: Token { + span: 680:688, + kind: StringLiteral, + }, + value: "string", + }, + ), + }, ), directives: [], + span: 661:688, }, ], end: Token { @@ -526,6 +546,8 @@ SchemaDocument { ), directives: [], description: None, + hack_source: None, + span: 656:700, }, FieldDefinition { name: Identifier { @@ -591,41 +613,49 @@ SchemaDocument { }, ), default_value: Some( - List( - List { - span: 726:746, - start: Token { - span: 726:727, - kind: OpenBracket, - }, - items: [ - String( - StringNode { - token: Token { - span: 727:735, - kind: StringLiteral, + DefaultValue { + span: 724:746, + equals: Token { + span: 724:725, + kind: Equals, + }, + value: List( + List { + span: 726:746, + start: Token { + span: 726:727, + kind: OpenBracket, + }, + items: [ + String( + StringNode { + token: Token { + span: 727:735, + kind: StringLiteral, + }, + value: "string", }, - value: "string", - }, - ), - String( - StringNode { - token: Token { - span: 737:745, - kind: StringLiteral, + ), + String( + StringNode { + token: Token { + span: 737:745, + kind: StringLiteral, + }, + value: "string", }, - value: "string", - }, - ), - ], - end: Token { - span: 745:746, - kind: CloseBracket, + ), + ], + end: Token { + span: 745:746, + kind: CloseBracket, + }, }, - }, - ), + ), + }, ), directives: [], + span: 705:746, }, ], end: Token { @@ -636,6 +666,8 @@ SchemaDocument { ), directives: [], description: None, + hack_source: None, + span: 700:758, }, FieldDefinition { name: Identifier { @@ -688,47 +720,55 @@ SchemaDocument { }, ), default_value: Some( - Object( - List { - span: 784:798, - start: Token { - span: 784:785, - kind: OpenBrace, - }, - items: [ - ConstantArgument { - span: 785:797, - name: Identifier { - span: 785:788, - token: Token { + DefaultValue { + span: 782:798, + equals: Token { + span: 782:783, + kind: Equals, + }, + value: Object( + List { + span: 784:798, + start: Token { + span: 784:785, + kind: OpenBrace, + }, + items: [ + ConstantArgument { + span: 785:797, + name: Identifier { span: 785:788, - kind: Identifier, - }, - value: "key", - }, - colon: Token { - span: 788:789, - kind: Colon, - }, - value: String( - StringNode { token: Token { - span: 790:797, - kind: StringLiteral, + span: 785:788, + kind: Identifier, }, - value: "value", + value: "key", }, - ), + colon: Token { + span: 788:789, + kind: Colon, + }, + value: String( + StringNode { + token: Token { + span: 790:797, + kind: StringLiteral, + }, + value: "value", + }, + ), + }, + ], + end: Token { + span: 797:798, + kind: CloseBrace, }, - ], - end: Token { - span: 797:798, - kind: CloseBrace, }, - }, - ), + ), + }, ), directives: [], + span: 762:798, }, ], end: Token { @@ -739,6 +779,8 @@ SchemaDocument { ), directives: [], description: None, + hack_source: None, + span: 758:808, }, FieldDefinition { name: Identifier { @@ -791,14 +833,22 @@ SchemaDocument { }, ), default_value: Some( - Null( - Token { - span: 830:834, - kind: Identifier, + DefaultValue { + span: 828:834, + equals: Token { + span: 828:829, + kind: Equals, }, - ), + value: Null( + Token { + span: 830:834, + kind: Identifier, + }, + ), + }, ), directives: [], + span: 814:834, }, ], end: Token { @@ -809,6 +859,8 @@ SchemaDocument { ), directives: [], description: None, + hack_source: None, + span: 808:842, }, ], end: Token { @@ -817,6 +869,7 @@ SchemaDocument { }, }, ), + span: 299:845, }, ), ObjectTypeDefinition( @@ -945,15 +998,22 @@ SchemaDocument { }, ), default_value: Some( - String( - StringNode { - token: Token { - span: 921:930, - kind: StringLiteral, - }, - value: "default", + DefaultValue { + span: 919:930, + equals: Token { + span: 919:920, + kind: Equals, }, - ), + value: String( + StringNode { + token: Token { + span: 921:930, + kind: StringLiteral, + }, + value: "default", + }, + ), + }, ), directives: [ ConstantDirective { @@ -973,6 +1033,7 @@ SchemaDocument { arguments: None, }, ], + span: 909:952, }, ], end: Token { @@ -1000,6 +1061,8 @@ SchemaDocument { }, ], description: None, + hack_source: None, + span: 894:969, }, ], end: Token { @@ -1008,6 +1071,7 @@ SchemaDocument { }, }, ), + span: 845:972, }, ), ObjectTypeDefinition( @@ -1023,6 +1087,7 @@ SchemaDocument { interfaces: [], directives: [], fields: None, + span: 972:992, }, ), ObjectTypeExtension( @@ -1110,6 +1175,7 @@ SchemaDocument { ), default_value: None, directives: [], + span: 1018:1036, }, ], end: Token { @@ -1120,6 +1186,8 @@ SchemaDocument { ), directives: [], description: None, + hack_source: None, + span: 1012:1044, }, ], end: Token { @@ -1128,6 +1196,7 @@ SchemaDocument { }, }, ), + span: 1004:1047, }, ), ObjectTypeExtension( @@ -1160,6 +1229,7 @@ SchemaDocument { }, ], fields: None, + span: 1059:1072, }, ), InterfaceTypeDefinition( @@ -1206,6 +1276,8 @@ SchemaDocument { arguments: None, directives: [], description: None, + hack_source: None, + span: 1090:1102, }, FieldDefinition { name: Identifier { @@ -1258,17 +1330,25 @@ SchemaDocument { }, ), default_value: Some( - String( - StringNode { - token: Token { - span: 1126:1134, - kind: StringLiteral, - }, - value: "string", + DefaultValue { + span: 1124:1134, + equals: Token { + span: 1124:1125, + kind: Equals, }, - ), + value: String( + StringNode { + token: Token { + span: 1126:1134, + kind: StringLiteral, + }, + value: "string", + }, + ), + }, ), directives: [], + span: 1107:1134, }, ], end: Token { @@ -1279,6 +1359,8 @@ SchemaDocument { ), directives: [], description: None, + hack_source: None, + span: 1102:1144, }, ], end: Token { @@ -1287,6 +1369,7 @@ SchemaDocument { }, }, ), + span: 1072:1147, }, ), InterfaceTypeDefinition( @@ -1395,6 +1478,7 @@ SchemaDocument { arguments: None, }, ], + span: 1208:1239, }, ], end: Token { @@ -1422,6 +1506,8 @@ SchemaDocument { }, ], description: None, + hack_source: None, + span: 1193:1256, }, ], end: Token { @@ -1430,6 +1516,7 @@ SchemaDocument { }, }, ), + span: 1147:1259, }, ), InterfaceTypeDefinition( @@ -1445,6 +1532,7 @@ SchemaDocument { interfaces: [], directives: [], fields: None, + span: 1259:1289, }, ), InterfaceTypeExtension( @@ -1537,6 +1625,7 @@ SchemaDocument { ), default_value: None, directives: [], + span: 1333:1353, }, ], end: Token { @@ -1547,6 +1636,8 @@ SchemaDocument { ), directives: [], description: None, + hack_source: None, + span: 1329:1361, }, ], end: Token { @@ -1555,6 +1646,7 @@ SchemaDocument { }, }, ), + span: 1306:1364, }, ), InterfaceTypeExtension( @@ -1587,6 +1679,7 @@ SchemaDocument { }, ], fields: None, + span: 1381:1399, }, ), InterfaceTypeDefinition( @@ -1650,6 +1743,8 @@ SchemaDocument { arguments: None, directives: [], description: None, + hack_source: None, + span: 1438:1450, }, FieldDefinition { name: Identifier { @@ -1712,6 +1807,7 @@ SchemaDocument { ), default_value: None, directives: [], + span: 1454:1474, }, ], end: Token { @@ -1722,6 +1818,8 @@ SchemaDocument { ), directives: [], description: None, + hack_source: None, + span: 1450:1484, }, FieldDefinition { name: Identifier { @@ -1774,17 +1872,25 @@ SchemaDocument { }, ), default_value: Some( - String( - StringNode { - token: Token { - span: 1508:1516, - kind: StringLiteral, - }, - value: "string", + DefaultValue { + span: 1506:1516, + equals: Token { + span: 1506:1507, + kind: Equals, }, - ), + value: String( + StringNode { + token: Token { + span: 1508:1516, + kind: StringLiteral, + }, + value: "string", + }, + ), + }, ), directives: [], + span: 1489:1516, }, ], end: Token { @@ -1795,6 +1901,8 @@ SchemaDocument { ), directives: [], description: None, + hack_source: None, + span: 1484:1526, }, ], end: Token { @@ -1803,6 +1911,7 @@ SchemaDocument { }, }, ), + span: 1399:1529, }, ), UnionTypeDefinition( @@ -1842,6 +1951,7 @@ SchemaDocument { value: "Advert", }, ], + span: 1529:1576, }, ), UnionTypeDefinition( @@ -1890,6 +2000,7 @@ SchemaDocument { value: "B", }, ], + span: 1576:1615, }, ), UnionTypeDefinition( @@ -1938,6 +2049,7 @@ SchemaDocument { value: "B", }, ], + span: 1615:1659, }, ), UnionTypeDefinition( @@ -1952,6 +2064,7 @@ SchemaDocument { }, directives: [], members: [], + span: 1659:1681, }, ), UnionTypeExtension( @@ -1983,6 +2096,7 @@ SchemaDocument { value: "Video", }, ], + span: 1694:1716, }, ), UnionTypeExtension( @@ -2014,6 +2128,7 @@ SchemaDocument { }, ], members: [], + span: 1729:1744, }, ), ScalarTypeDefinition( @@ -2027,6 +2142,7 @@ SchemaDocument { value: "CustomScalar", }, directives: [], + span: 1744:1765, }, ), ScalarTypeDefinition( @@ -2057,6 +2173,7 @@ SchemaDocument { arguments: None, }, ], + span: 1765:1799, }, ), ScalarTypeExtension( @@ -2087,6 +2204,7 @@ SchemaDocument { arguments: None, }, ], + span: 1813:1837, }, ), EnumTypeDefinition( @@ -2118,6 +2236,7 @@ SchemaDocument { value: "DESKTOP", }, directives: [], + span: 1851:1921, }, EnumValueDefinition { name: Identifier { @@ -2129,6 +2248,7 @@ SchemaDocument { value: "MOBILE", }, directives: [], + span: 1921:1983, }, EnumValueDefinition { name: Identifier { @@ -2140,6 +2260,7 @@ SchemaDocument { value: "WEB", }, directives: [], + span: 1983:2032, }, ], end: Token { @@ -2148,6 +2269,7 @@ SchemaDocument { }, }, ), + span: 1837:2035, }, ), EnumTypeDefinition( @@ -2213,6 +2335,7 @@ SchemaDocument { arguments: None, }, ], + span: 2066:2097, }, EnumValueDefinition { name: Identifier { @@ -2224,6 +2347,7 @@ SchemaDocument { value: "OTHER_VALUE", }, directives: [], + span: 2097:2109, }, ], end: Token { @@ -2232,6 +2356,7 @@ SchemaDocument { }, }, ), + span: 2035:2112, }, ), EnumTypeDefinition( @@ -2246,6 +2371,7 @@ SchemaDocument { }, directives: [], values: None, + span: 2112:2132, }, ), EnumTypeExtension( @@ -2277,6 +2403,7 @@ SchemaDocument { value: "VR", }, directives: [], + span: 2153:2156, }, ], end: Token { @@ -2285,6 +2412,7 @@ SchemaDocument { }, }, ), + span: 2144:2159, }, ), EnumTypeExtension( @@ -2316,6 +2444,7 @@ SchemaDocument { }, ], values: None, + span: 2171:2185, }, ), InputObjectTypeDefinition( @@ -2369,6 +2498,7 @@ SchemaDocument { ), default_value: None, directives: [], + span: 2205:2220, }, InputValueDefinition { name: Identifier { @@ -2392,17 +2522,25 @@ SchemaDocument { }, ), default_value: Some( - Int( - IntNode { - token: Token { - span: 2234:2236, - kind: IntegerLiteral, + DefaultValue { + span: 2232:2236, + equals: Token { + span: 2232:2233, + kind: Equals, + }, + value: Int( + IntNode { + token: Token { + span: 2234:2236, + kind: IntegerLiteral, + }, + value: 42, }, - value: 42, - }, - ), + ), + }, ), directives: [], + span: 2220:2237, }, ], end: Token { @@ -2411,6 +2549,7 @@ SchemaDocument { }, }, ), + span: 2185:2240, }, ), InputObjectTypeDefinition( @@ -2489,6 +2628,7 @@ SchemaDocument { arguments: None, }, ], + span: 2280:2325, }, ], end: Token { @@ -2497,6 +2637,7 @@ SchemaDocument { }, }, ), + span: 2240:2328, }, ), InputObjectTypeDefinition( @@ -2511,6 +2652,7 @@ SchemaDocument { }, directives: [], fields: None, + span: 2328:2350, }, ), InputObjectTypeExtension( @@ -2554,16 +2696,23 @@ SchemaDocument { }, ), default_value: Some( - Float( - FloatNode { - token: Token { - span: 2392:2398, - kind: FloatLiteral, + DefaultValue { + span: 2390:2398, + equals: Token { + span: 2390:2391, + kind: Equals, + }, + value: Float( + FloatNode { + token: Token { + span: 2392:2398, + kind: FloatLiteral, + }, + value: 12300, + source_value: "1.23e4", }, - value: 12300, - source_value: "1.23e4", - }, - ), + ), + }, ), directives: [ ConstantDirective { @@ -2583,6 +2732,7 @@ SchemaDocument { arguments: None, }, ], + span: 2377:2423, }, ], end: Token { @@ -2591,6 +2741,7 @@ SchemaDocument { }, }, ), + span: 2363:2426, }, ), InputObjectTypeExtension( @@ -2622,6 +2773,7 @@ SchemaDocument { }, ], fields: None, + span: 2439:2465, }, ), DirectiveDefinition( @@ -2691,6 +2843,7 @@ SchemaDocument { arguments: None, }, ], + span: 2539:2625, }, ], end: Token { @@ -2714,6 +2867,8 @@ SchemaDocument { value: "This is a description of the `@skip` directive", }, ), + hack_source: None, + span: 2520:2673, }, ), DirectiveDefinition( @@ -2766,6 +2921,7 @@ SchemaDocument { ), default_value: None, directives: [], + span: 2692:2704, }, ], end: Token { @@ -2781,6 +2937,8 @@ SchemaDocument { InlineFragment, ], description: None, + hack_source: None, + span: 2673:2760, }, ), DirectiveDefinition( @@ -2833,6 +2991,7 @@ SchemaDocument { ), default_value: None, directives: [], + span: 2780:2792, }, ], end: Token { @@ -2848,6 +3007,8 @@ SchemaDocument { InlineFragment, ], description: None, + hack_source: None, + span: 2760:2848, }, ), DirectiveDefinition( @@ -2900,6 +3061,7 @@ SchemaDocument { ), default_value: None, directives: [], + span: 2875:2888, }, ], end: Token { @@ -2914,6 +3076,8 @@ SchemaDocument { Interface, ], description: None, + hack_source: None, + span: 2848:2930, }, ), SchemaExtension( @@ -2937,6 +3101,7 @@ SchemaDocument { }, ], operation_types: None, + span: 2944:2955, }, ), SchemaExtension( @@ -2977,6 +3142,7 @@ SchemaDocument { }, value: "SubscriptionType", }, + span: 2983:3014, }, ], end: Token { @@ -2985,6 +3151,7 @@ SchemaDocument { }, }, ), + span: 2969:3016, }, ), ], diff --git a/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/schema_with_leading_comment.expected b/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/schema_with_leading_comment.expected index 4e37c284eca96..ee2dc1eae5b04 100644 --- a/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/schema_with_leading_comment.expected +++ b/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/schema_with_leading_comment.expected @@ -28,6 +28,7 @@ SchemaDocument { }, value: "RootQueryType", }, + span: 99:122, }, OperationTypeDefinition { operation: Mutation, @@ -39,6 +40,7 @@ SchemaDocument { }, value: "RootMutationType", }, + span: 122:149, }, ], end: Token { @@ -46,6 +48,7 @@ SchemaDocument { kind: CloseBrace, }, }, + span: 88:151, }, ), ], diff --git a/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/type_definition.expected b/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/type_definition.expected index cee96673b8409..748381187471b 100644 --- a/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/type_definition.expected +++ b/compiler/crates/graphql-syntax/tests/parse_schema_document/fixtures/type_definition.expected @@ -30,6 +30,7 @@ SchemaDocument { interfaces: [], directives: [], fields: None, + span: 0:12, }, ), ObjectTypeDefinition( @@ -76,6 +77,8 @@ SchemaDocument { arguments: None, directives: [], description: None, + hack_source: None, + span: 27:39, }, ], end: Token { @@ -84,6 +87,7 @@ SchemaDocument { }, }, ), + span: 12:42, }, ), ObjectTypeDefinition( @@ -211,6 +215,8 @@ SchemaDocument { arguments: None, directives: [], description: None, + hack_source: None, + span: 106:116, }, FieldDefinition { name: Identifier { @@ -273,6 +279,7 @@ SchemaDocument { ), default_value: None, directives: [], + span: 121:139, }, ], end: Token { @@ -283,6 +290,8 @@ SchemaDocument { ), directives: [], description: None, + hack_source: None, + span: 116:150, }, ], end: Token { @@ -291,6 +300,7 @@ SchemaDocument { }, }, ), + span: 42:153, }, ), ObjectTypeDefinition( @@ -323,6 +333,7 @@ SchemaDocument { ], directives: [], fields: None, + span: 153:203, }, ), ], diff --git a/compiler/crates/graphql-syntax/tests/parse_schema_document/mod.rs b/compiler/crates/graphql-syntax/tests/parse_schema_document/mod.rs deleted file mode 100644 index b141ddac8b7cc..0000000000000 --- a/compiler/crates/graphql-syntax/tests/parse_schema_document/mod.rs +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::Diagnostic; -use common::SourceLocationKey; -use common::TextSource; -use fixture_tests::Fixture; -use graphql_cli::DiagnosticPrinter; -use graphql_syntax::parse_schema_document; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - parse_schema_document( - fixture.content, - SourceLocationKey::standalone(fixture.file_name), - ) - .map(|x| format!("{:#?}", x)) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) -} - -// NOTE: copied from graphql-test-helpers to avoid cyclic dependency breaking Rust Analyzer -fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { - let printer = - DiagnosticPrinter::new(|_| Some(TextSource::from_whole_document(source.to_string()))); - let mut printed = diagnostics - .iter() - .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) - .collect::>(); - printed.sort(); - printed.join("\n\n") -} diff --git a/compiler/crates/graphql-syntax/tests/parse_schema_document_test.rs b/compiler/crates/graphql-syntax/tests/parse_schema_document_test.rs index 439f572fb4b43..3d2409114e9b7 100644 --- a/compiler/crates/graphql-syntax/tests/parse_schema_document_test.rs +++ b/compiler/crates/graphql-syntax/tests/parse_schema_document_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<5807697a03122f4474b041010ef2dc34>> + * @generated SignedSource<> */ mod parse_schema_document; @@ -12,37 +12,37 @@ mod parse_schema_document; use parse_schema_document::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn directive_description() { +#[tokio::test] +async fn directive_description() { let input = include_str!("parse_schema_document/fixtures/directive_description.graphql"); let expected = include_str!("parse_schema_document/fixtures/directive_description.expected"); - test_fixture(transform_fixture, "directive_description.graphql", "parse_schema_document/fixtures/directive_description.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive_description.graphql", "parse_schema_document/fixtures/directive_description.expected", input, expected).await; } -#[test] -fn field_description() { +#[tokio::test] +async fn field_description() { let input = include_str!("parse_schema_document/fixtures/field_description.graphql"); let expected = include_str!("parse_schema_document/fixtures/field_description.expected"); - test_fixture(transform_fixture, "field_description.graphql", "parse_schema_document/fixtures/field_description.expected", input, expected); + test_fixture(transform_fixture, file!(), "field_description.graphql", "parse_schema_document/fixtures/field_description.expected", input, expected).await; } -#[test] -fn schema_kitchen_sink() { +#[tokio::test] +async fn schema_kitchen_sink() { let input = include_str!("parse_schema_document/fixtures/schema_kitchen_sink.graphql"); let expected = include_str!("parse_schema_document/fixtures/schema_kitchen_sink.expected"); - test_fixture(transform_fixture, "schema_kitchen_sink.graphql", "parse_schema_document/fixtures/schema_kitchen_sink.expected", input, expected); + test_fixture(transform_fixture, file!(), "schema_kitchen_sink.graphql", "parse_schema_document/fixtures/schema_kitchen_sink.expected", input, expected).await; } -#[test] -fn schema_with_leading_comment() { +#[tokio::test] +async fn schema_with_leading_comment() { let input = include_str!("parse_schema_document/fixtures/schema_with_leading_comment.graphql"); let expected = include_str!("parse_schema_document/fixtures/schema_with_leading_comment.expected"); - test_fixture(transform_fixture, "schema_with_leading_comment.graphql", "parse_schema_document/fixtures/schema_with_leading_comment.expected", input, expected); + test_fixture(transform_fixture, file!(), "schema_with_leading_comment.graphql", "parse_schema_document/fixtures/schema_with_leading_comment.expected", input, expected).await; } -#[test] -fn type_definition() { +#[tokio::test] +async fn type_definition() { let input = include_str!("parse_schema_document/fixtures/type_definition.graphql"); let expected = include_str!("parse_schema_document/fixtures/type_definition.expected"); - test_fixture(transform_fixture, "type_definition.graphql", "parse_schema_document/fixtures/type_definition.expected", input, expected); + test_fixture(transform_fixture, file!(), "type_definition.graphql", "parse_schema_document/fixtures/type_definition.expected", input, expected).await; } diff --git a/compiler/crates/graphql-syntax/tests/print.rs b/compiler/crates/graphql-syntax/tests/print.rs new file mode 100644 index 0000000000000..744aff6067660 --- /dev/null +++ b/compiler/crates/graphql-syntax/tests/print.rs @@ -0,0 +1,25 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_syntax::parse_schema_document; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let type_system_definitions = + parse_schema_document(fixture.content, SourceLocationKey::generated()) + .expect("Failed to parse definitions") + .definitions; + + let result = type_system_definitions + .iter() + .map(|node| format!("{}", node)) + .collect::>() + .join("\n"); + + Ok(result) +} diff --git a/compiler/crates/graphql-syntax/tests/print/fixtures/schema.expected b/compiler/crates/graphql-syntax/tests/print/fixtures/schema.expected index 7fceb2037a537..263448661b0a5 100644 --- a/compiler/crates/graphql-syntax/tests/print/fixtures/schema.expected +++ b/compiler/crates/graphql-syntax/tests/print/fixtures/schema.expected @@ -21,11 +21,19 @@ type Mutation { success: boolean } -interface XIGHuman @source(schema: "instagram", name: "Human") { +interface HasName { name: String } -extend interface XIGHuman { +interface HasNickname { + nickname: String +} + +interface XIGHuman implements Animal @source(schema: "instagram", name: "Human") { + name: String +} + +extend interface XIGHuman implements HasNickname { nickname: String } @@ -104,11 +112,19 @@ type Mutation { success: boolean } -interface XIGHuman @source(schema: "instagram", name: "Human") { +interface HasName { + name: String +} + +interface HasNickname { + nickname: String +} + +interface XIGHuman implements Animal @source(schema: "instagram", name: "Human") { name: String } -extend interface XIGHuman { +extend interface XIGHuman implements HasNickname { nickname: String } diff --git a/compiler/crates/graphql-syntax/tests/print/fixtures/schema.graphql b/compiler/crates/graphql-syntax/tests/print/fixtures/schema.graphql index e677c87ae264d..1a7e20ac2943a 100644 --- a/compiler/crates/graphql-syntax/tests/print/fixtures/schema.graphql +++ b/compiler/crates/graphql-syntax/tests/print/fixtures/schema.graphql @@ -20,11 +20,19 @@ type Mutation { success: boolean } -interface XIGHuman @source(schema: "instagram", name: "Human") { +interface HasName { name: String } -extend interface XIGHuman { +interface HasNickname { + nickname: String +} + +interface XIGHuman implements Animal @source(schema: "instagram", name: "Human") { + name: String +} + +extend interface XIGHuman implements HasNickname { nickname: String } diff --git a/compiler/crates/graphql-syntax/tests/print/mod.rs b/compiler/crates/graphql-syntax/tests/print/mod.rs deleted file mode 100644 index ade8205103fc4..0000000000000 --- a/compiler/crates/graphql-syntax/tests/print/mod.rs +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_syntax::parse_schema_document; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let type_system_definitions = - parse_schema_document(fixture.content, SourceLocationKey::generated()) - .expect("Failed to parse definitions") - .definitions; - - let result = type_system_definitions - .iter() - .map(|node| format!("{}", node)) - .collect::>() - .join("\n"); - - Ok(result) -} diff --git a/compiler/crates/graphql-syntax/tests/print_test.rs b/compiler/crates/graphql-syntax/tests/print_test.rs index dbba2061d08f7..31be41d709aaa 100644 --- a/compiler/crates/graphql-syntax/tests/print_test.rs +++ b/compiler/crates/graphql-syntax/tests/print_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<> */ mod print; @@ -12,9 +12,9 @@ mod print; use print::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn schema() { +#[tokio::test] +async fn schema() { let input = include_str!("print/fixtures/schema.graphql"); let expected = include_str!("print/fixtures/schema.expected"); - test_fixture(transform_fixture, "schema.graphql", "print/fixtures/schema.expected", input, expected); + test_fixture(transform_fixture, file!(), "schema.graphql", "print/fixtures/schema.expected", input, expected).await; } diff --git a/compiler/crates/graphql-test-helpers/Cargo.toml b/compiler/crates/graphql-test-helpers/Cargo.toml index ebd123c6cf344..b6f482b324340 100644 --- a/compiler/crates/graphql-test-helpers/Cargo.toml +++ b/compiler/crates/graphql-test-helpers/Cargo.toml @@ -1,16 +1,20 @@ # @generated by autocargo from //relay/oss/crates/graphql-test-helpers:graphql-test-helpers + [package] name = "graphql-test-helpers" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] common = { path = "../common" } fixture-tests = { path = "../fixture-tests" } +fnv = "1.0" graphql-cli = { path = "../graphql-cli" } graphql-ir = { path = "../graphql-ir" } graphql-syntax = { path = "../graphql-syntax" } graphql-text-printer = { path = "../graphql-text-printer" } relay-test-schema = { path = "../relay-test-schema" } +walkdir = "2.3" diff --git a/compiler/crates/graphql-test-helpers/src/lib.rs b/compiler/crates/graphql-test-helpers/src/lib.rs index 3b4c6082e7d59..be2a75c25d0b1 100644 --- a/compiler/crates/graphql-test-helpers/src/lib.rs +++ b/compiler/crates/graphql-test-helpers/src/lib.rs @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +mod project_fixture; +mod temp_dir; use std::collections::HashMap; use std::sync::Arc; @@ -24,8 +26,10 @@ use graphql_syntax::parse_executable; use graphql_text_printer::print_fragment; use graphql_text_printer::print_operation; use graphql_text_printer::PrinterOptions; +pub use project_fixture::ProjectFixture; use relay_test_schema::get_test_schema; use relay_test_schema::get_test_schema_with_located_extensions; +pub use temp_dir::TestDir; pub fn apply_transform_for_test(fixture: &Fixture<'_>, transform: T) -> Result where @@ -43,7 +47,7 @@ where let extension_location = SourceLocationKey::embedded(fixture.file_name, 1); sources_map.insert(extension_location, extensions_text.to_string()); - get_test_schema_with_located_extensions(*extensions_text, extension_location) + get_test_schema_with_located_extensions(extensions_text, extension_location) } else { get_test_schema() }; @@ -56,6 +60,7 @@ where fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, relay_mode: Some(RelayMode), default_anonymous_operation_name: None, + allow_custom_scalar_literals: true, // for compatibility }, ); let ir = ir_result.map_err(|diagnostics| { diff --git a/compiler/crates/graphql-test-helpers/src/project_fixture.rs b/compiler/crates/graphql-test-helpers/src/project_fixture.rs new file mode 100644 index 0000000000000..fb0e227733ab0 --- /dev/null +++ b/compiler/crates/graphql-test-helpers/src/project_fixture.rs @@ -0,0 +1,122 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::fs; +use std::path::Path; +use std::path::PathBuf; +use std::path::MAIN_SEPARATOR; + +use fnv::FnvHashMap; +use walkdir::WalkDir; + +/// A file format that supports encoding multiple files in a directory as a +/// single file. Useful for encoding a multi-file setup as input into a +/// compiler test. +/// +/// Inspired by rust-analyzer's fixture format: +/// https://github.com/rust-lang/rust-analyzer/blob/d3cc3bc00e310ff49268ce0c593eaa6bf4724bbd/crates/test-utils/src/fixture.rs +pub struct ProjectFixture { + // Relative path to file contents + files: FnvHashMap, +} + +impl ProjectFixture { + /// Parse a fixture file. Useful for parsing an existing fixture test. + pub fn deserialize(input: &str) -> Self { + let mut files: FnvHashMap = Default::default(); + let mut file_name: Option = None; + let mut content: Vec = Vec::new(); + for line in input.lines() { + if line.starts_with("//- ") { + if let Some(prev_file_name) = file_name { + files.insert(prev_file_name, content.join("\n")); + content = Vec::new(); + } + file_name = Some(PathBuf::from(line.trim_start_matches("//- ").trim())); + } else { + content.push(line.to_string()) + } + } + if let Some(prev_file_name) = file_name { + files.insert(prev_file_name, content.join("\n")); + } + + Self { files } + } + + /// Serialize ProjectFixture as a fixture file string. + /// Useful for encoding the results of a compiler integration test as + /// a single output file. + pub fn serialize(&self) -> String { + let mut sorted: Vec<_> = self.files.clone().into_iter().collect(); + sorted.sort_by(|x, y| x.0.cmp(&y.0)); + + let mut output: String = Default::default(); + + for (file_name, content) in sorted { + output.push_str(&format!("//- {}\n", format_normalized_path(&file_name))); + output.push_str(&content); + output.push('\n'); + } + + output + } + + /// Write the files contained in this ProjectFixture to a directory. + /// Useful for writing a fixture file to a temp directory before running an + /// integration test. + pub fn write_to_dir(&self, dir: &Path) { + fs::create_dir_all(dir).expect("Expected to create temp dir"); + + for (file_name, content) in &self.files { + let file_path = dir.join(file_name); + fs::create_dir_all(file_path.clone().parent().unwrap()) + .expect("Expected to create dir"); + fs::write(file_path, content).expect("Expected to write file"); + } + } + + /// Construct a ProjectFixture from an existing directory on disk. + /// Useful for collecting the output of an integration test which + /// has resulted in files being written to disk. + pub fn read_from_dir(dir: &Path) -> Self { + let mut files: FnvHashMap = Default::default(); + for entry in WalkDir::new(dir).into_iter() { + let dir_entry = entry.expect("To get entry"); + if dir_entry.metadata().expect("foo").is_file() { + let relative_path = dir_entry + .path() + .strip_prefix(dir) + .expect("Paths should be relative."); + let content = fs::read_to_string(dir_entry.path()).expect("To read file"); + files.insert(relative_path.into(), content); + } + } + + Self { files } + } + + /// Remove files in another ProjectFixture from this ProjectFixture. + /// Useful for removing pre-existing files from an output ProjectFixture. + pub fn remove_files(&mut self, other: Self) { + for other_file in other.files.keys() { + self.files.remove(other_file); + } + } + + /// Return files map + pub fn files(&self) -> &FnvHashMap { + &self.files + } +} + +// Stringify a path such that it's stable across operating systems. +fn format_normalized_path(path: &Path) -> String { + path.to_string_lossy() + .to_string() + .replace(MAIN_SEPARATOR, "/") +} diff --git a/compiler/crates/graphql-test-helpers/src/temp_dir.rs b/compiler/crates/graphql-test-helpers/src/temp_dir.rs new file mode 100644 index 0000000000000..91de3df1c710b --- /dev/null +++ b/compiler/crates/graphql-test-helpers/src/temp_dir.rs @@ -0,0 +1,72 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::fs; +use std::io; +use std::path::Path; +use std::path::PathBuf; +use std::sync::atomic::AtomicUsize; +use std::sync::atomic::Ordering; + +// Borrowed from Rust Analyzer +// https://github.com/matklad/rust-analyzer/blob/15c4b3fa7f9d97029d64a7e13a12aa1ee42316d7/crates/rust-analyzer/tests/slow-tests/testdir.rs +pub struct TestDir { + path: PathBuf, + keep: bool, +} + +impl TestDir { + pub fn new() -> TestDir { + let base = std::env::temp_dir().join("testdir"); + let pid = std::process::id(); + + static CNT: AtomicUsize = AtomicUsize::new(0); + for _ in 0..100 { + let cnt = CNT.fetch_add(1, Ordering::Relaxed); + let path = base.join(format!("{}_{}", pid, cnt)); + if path.is_dir() { + continue; + } + fs::create_dir_all(&path).unwrap(); + return TestDir { path, keep: false }; + } + panic!("Failed to create a temporary directory") + } + #[allow(unused)] + pub fn keep(mut self) -> TestDir { + self.keep = true; + self + } + pub fn path(&self) -> &Path { + &self.path + } +} + +impl Drop for TestDir { + fn drop(&mut self) { + if self.keep { + return; + } + remove_dir_all(&self.path).unwrap() + } +} + +#[cfg(not(windows))] +fn remove_dir_all(path: &Path) -> io::Result<()> { + fs::remove_dir_all(path) +} + +#[cfg(windows)] +fn remove_dir_all(path: &Path) -> io::Result<()> { + for _ in 0..99 { + if fs::remove_dir_all(path).is_ok() { + return Ok(()); + } + std::thread::sleep(std::time::Duration::from_millis(10)) + } + fs::remove_dir_all(path) +} diff --git a/compiler/crates/graphql-text-printer/Cargo.toml b/compiler/crates/graphql-text-printer/Cargo.toml index 436602dc9882f..ee1e2d867c662 100644 --- a/compiler/crates/graphql-text-printer/Cargo.toml +++ b/compiler/crates/graphql-text-printer/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/graphql-text-printer:[graphql-text-compact_test,graphql-text-printer,graphql-text-printer-operation-printer_test,graphql-text-printer_test,print_ast_test] + [package] name = "graphql-text-printer" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -30,3 +32,4 @@ schema = { path = "../schema" } fixture-tests = { path = "../fixture-tests" } relay-test-schema = { path = "../relay-test-schema" } relay-transforms = { path = "../relay-transforms" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/graphql-text-printer/src/print_full_operation.rs b/compiler/crates/graphql-text-printer/src/print_full_operation.rs index f22a0ec2f9227..c9f059503e55c 100644 --- a/compiler/crates/graphql-text-printer/src/print_full_operation.rs +++ b/compiler/crates/graphql-text-printer/src/print_full_operation.rs @@ -72,7 +72,7 @@ impl<'s> OperationPrinter<'s> { } } -impl<'s, 'ir> Visitor for OperationPrinter<'s> { +impl<'s> Visitor for OperationPrinter<'s> { const NAME: &'static str = "OperationPrinter"; const VISIT_ARGUMENTS: bool = false; const VISIT_DIRECTIVES: bool = false; diff --git a/compiler/crates/graphql-text-printer/src/print_to_text.rs b/compiler/crates/graphql-text-printer/src/print_to_text.rs index 931a158b61b1f..dd7b2c4bdba0b 100644 --- a/compiler/crates/graphql-text-printer/src/print_to_text.rs +++ b/compiler/crates/graphql-text-printer/src/print_to_text.rs @@ -143,7 +143,7 @@ pub fn write_selections( mut result: &mut impl Write, ) -> FmtResult { let mut printer = Printer::new(schema, &mut result, PrinterOptions::default()); - printer.print_selections(selections) + printer.print_selections(selections, "unknown".intern()) } pub fn write_selection( @@ -222,7 +222,7 @@ impl<'schema, 'writer, W: Write> Printer<'schema, 'writer, W> { write!(self.writer, "{} {}", operation.kind, operation.name.item)?; self.print_variable_definitions(&operation.variable_definitions)?; self.print_directives(&operation.directives, None, None)?; - self.print_selections(&operation.selections) + self.print_selections(&operation.selections, operation.name.item.0) } fn print_fragment(mut self, fragment: &FragmentDefinition) -> FmtResult { @@ -243,10 +243,10 @@ impl<'schema, 'writer, W: Write> Printer<'schema, 'writer, W> { None, Some(&fragment.variable_definitions), )?; - self.print_selections(&fragment.selections) + self.print_selections(&fragment.selections, fragment_name.0) } - fn print_selections(&mut self, selections: &[Selection]) -> FmtResult { + fn print_selections(&mut self, selections: &[Selection], name: StringKey) -> FmtResult { let len = selections.len(); if len > 0 { self.print_optional_space()?; @@ -266,7 +266,8 @@ impl<'schema, 'writer, W: Write> Printer<'schema, 'writer, W> { write!(self.writer, "}}")?; } else { panic!( - "Cannot print empty selections. Please, check transforms that may produce invalid selections." + "Cannot print empty selections for {}. Please, check transforms that may produce invalid selections.", + name ); } Ok(()) @@ -306,7 +307,7 @@ impl<'schema, 'writer, W: Write> Printer<'schema, 'writer, W> { self.print_alias_and_name(&field.alias, schema_field.name.item)?; self.print_arguments(&field.arguments)?; self.print_directives(&field.directives, conditions, None)?; - self.print_selections(&field.selections)?; + self.print_selections(&field.selections, schema_field.name.item)?; Ok(()) } @@ -342,7 +343,17 @@ impl<'schema, 'writer, W: Write> Printer<'schema, 'writer, W> { )?; }; self.print_directives(&field.directives, conditions, None)?; - self.print_selections(&field.selections) + + let name = if let Some(type_condition) = field.type_condition { + format!( + "... on {}", + self.schema.get_type_name(type_condition).lookup() + ) + .intern() + } else { + "...".intern() + }; + self.print_selections(&field.selections, name) } fn print_condition( diff --git a/compiler/crates/graphql-text-printer/tests/compact.rs b/compiler/crates/graphql-text-printer/tests/compact.rs new file mode 100644 index 0000000000000..263dfd3c617bf --- /dev/null +++ b/compiler/crates/graphql-text-printer/tests/compact.rs @@ -0,0 +1,52 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::node_identifier::LocationAgnosticPartialEq; +use graphql_ir::ExecutableDefinition; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_text_printer::print_full_operation; +use graphql_text_printer::PrinterOptions; +use relay_test_schema::TEST_SCHEMA; +use relay_transforms::RelayLocationAgnosticBehavior; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let initial_ast = parse_executable(fixture.content, source_location).unwrap(); + let initial_ir = build(&TEST_SCHEMA, &initial_ast.definitions).unwrap(); + let initial_ir_copy = initial_ir.clone(); + let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), initial_ir.clone()); + let options = PrinterOptions { + compact: true, + ..Default::default() + }; + + // Print the IR into a GraphQL string for the fixture + let output = initial_ir + .into_iter() + .filter_map(|definition| match definition { + ExecutableDefinition::Operation(operation) => Some(operation), + _ => None, + }) + .map(|operation| print_full_operation(&program, &operation, options)) + .collect::>() + .join("\n\n"); + + // Roundtrip the output back into an IR + let roundtrip_ast = parse_executable(output.as_str(), SourceLocationKey::Generated).unwrap(); + let roundtrip_ir = build(&TEST_SCHEMA, &roundtrip_ast.definitions).unwrap(); + + // Check the roundtripped IR matches the initial IR to ensure we printed a valid schema + assert!(roundtrip_ir.location_agnostic_eq::(&initial_ir_copy)); + + Ok(output) +} diff --git a/compiler/crates/graphql-text-printer/tests/compact/mod.rs b/compiler/crates/graphql-text-printer/tests/compact/mod.rs deleted file mode 100644 index 68869747f158c..0000000000000 --- a/compiler/crates/graphql-text-printer/tests/compact/mod.rs +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::node_identifier::LocationAgnosticPartialEq; -use graphql_ir::ExecutableDefinition; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_text_printer::print_full_operation; -use graphql_text_printer::PrinterOptions; -use relay_test_schema::TEST_SCHEMA; -use relay_transforms::RelayLocationAgnosticBehavior; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let initial_ast = parse_executable(fixture.content, source_location).unwrap(); - let initial_ir = build(&TEST_SCHEMA, &initial_ast.definitions).unwrap(); - let initial_ir_copy = initial_ir.clone(); - let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), initial_ir.clone()); - let options = PrinterOptions { - compact: true, - ..Default::default() - }; - - // Print the IR into a GraphQL string for the fixture - let output = initial_ir - .into_iter() - .filter_map(|definition| match definition { - ExecutableDefinition::Operation(operation) => Some(operation), - _ => None, - }) - .map(|operation| print_full_operation(&program, &operation, options)) - .collect::>() - .join("\n\n"); - - // Roundtrip the output back into an IR - let roundtrip_ast = parse_executable(output.as_str(), SourceLocationKey::Generated).unwrap(); - let roundtrip_ir = build(&TEST_SCHEMA, &roundtrip_ast.definitions).unwrap(); - - // Check the roundtripped IR matches the initial IR to ensure we printed a valid schema - assert!(roundtrip_ir.location_agnostic_eq::(&initial_ir_copy)); - - Ok(output) -} diff --git a/compiler/crates/graphql-text-printer/tests/compact_test.rs b/compiler/crates/graphql-text-printer/tests/compact_test.rs index 490f1db1e6ed6..a0005f40dd80a 100644 --- a/compiler/crates/graphql-text-printer/tests/compact_test.rs +++ b/compiler/crates/graphql-text-printer/tests/compact_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<6d1db34f88e5cb87dff2002fd51965c4>> + * @generated SignedSource<<481acea758e645152bb6410ebadfb27c>> */ mod compact; @@ -12,51 +12,51 @@ mod compact; use compact::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn basic_directives() { +#[tokio::test] +async fn basic_directives() { let input = include_str!("compact/fixtures/basic_directives.graphql"); let expected = include_str!("compact/fixtures/basic_directives.expected"); - test_fixture(transform_fixture, "basic_directives.graphql", "compact/fixtures/basic_directives.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_directives.graphql", "compact/fixtures/basic_directives.expected", input, expected).await; } -#[test] -fn basic_query() { +#[tokio::test] +async fn basic_query() { let input = include_str!("compact/fixtures/basic_query.graphql"); let expected = include_str!("compact/fixtures/basic_query.expected"); - test_fixture(transform_fixture, "basic_query.graphql", "compact/fixtures/basic_query.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_query.graphql", "compact/fixtures/basic_query.expected", input, expected).await; } -#[test] -fn basic_var_defs() { +#[tokio::test] +async fn basic_var_defs() { let input = include_str!("compact/fixtures/basic_var_defs.graphql"); let expected = include_str!("compact/fixtures/basic_var_defs.expected"); - test_fixture(transform_fixture, "basic_var_defs.graphql", "compact/fixtures/basic_var_defs.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_var_defs.graphql", "compact/fixtures/basic_var_defs.expected", input, expected).await; } -#[test] -fn compact_test() { +#[tokio::test] +async fn compact_test() { let input = include_str!("compact/fixtures/compact_test.graphql"); let expected = include_str!("compact/fixtures/compact_test.expected"); - test_fixture(transform_fixture, "compact_test.graphql", "compact/fixtures/compact_test.expected", input, expected); + test_fixture(transform_fixture, file!(), "compact_test.graphql", "compact/fixtures/compact_test.expected", input, expected).await; } -#[test] -fn empty_args() { +#[tokio::test] +async fn empty_args() { let input = include_str!("compact/fixtures/empty_args.graphql"); let expected = include_str!("compact/fixtures/empty_args.expected"); - test_fixture(transform_fixture, "empty_args.graphql", "compact/fixtures/empty_args.expected", input, expected); + test_fixture(transform_fixture, file!(), "empty_args.graphql", "compact/fixtures/empty_args.expected", input, expected).await; } -#[test] -fn kitchen_sink() { +#[tokio::test] +async fn kitchen_sink() { let input = include_str!("compact/fixtures/kitchen-sink.graphql"); let expected = include_str!("compact/fixtures/kitchen-sink.expected"); - test_fixture(transform_fixture, "kitchen-sink.graphql", "compact/fixtures/kitchen-sink.expected", input, expected); + test_fixture(transform_fixture, file!(), "kitchen-sink.graphql", "compact/fixtures/kitchen-sink.expected", input, expected).await; } -#[test] -fn single_value_array_of_objects() { +#[tokio::test] +async fn single_value_array_of_objects() { let input = include_str!("compact/fixtures/single-value-array-of-objects.graphql"); let expected = include_str!("compact/fixtures/single-value-array-of-objects.expected"); - test_fixture(transform_fixture, "single-value-array-of-objects.graphql", "compact/fixtures/single-value-array-of-objects.expected", input, expected); + test_fixture(transform_fixture, file!(), "single-value-array-of-objects.graphql", "compact/fixtures/single-value-array-of-objects.expected", input, expected).await; } diff --git a/compiler/crates/graphql-text-printer/tests/operation_printer.rs b/compiler/crates/graphql-text-printer/tests/operation_printer.rs new file mode 100644 index 0000000000000..29eaee6ab44e4 --- /dev/null +++ b/compiler/crates/graphql-text-printer/tests/operation_printer.rs @@ -0,0 +1,50 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::ExecutableDefinition; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_text_printer::print_full_operation; +use relay_test_schema::TEST_SCHEMA; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir = build(&TEST_SCHEMA, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); + + build(&TEST_SCHEMA, &ast.definitions) + .map(|definitions| { + definitions + .into_iter() + .filter_map(|definition| { + if let ExecutableDefinition::Operation(operation) = definition { + Some(print_full_operation( + &program, + &operation, + Default::default(), + )) + } else { + None + } + }) + .collect::>() + .join("\n\n\n\n") + }) + .map_err(|errors| { + errors + .into_iter() + .map(|error| format!("{:?}", error)) + .collect::>() + .join("\n\n") + }) +} diff --git a/compiler/crates/graphql-text-printer/tests/operation_printer/mod.rs b/compiler/crates/graphql-text-printer/tests/operation_printer/mod.rs deleted file mode 100644 index 88ae4bfbc91b6..0000000000000 --- a/compiler/crates/graphql-text-printer/tests/operation_printer/mod.rs +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::ExecutableDefinition; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_text_printer::print_full_operation; -use relay_test_schema::TEST_SCHEMA; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir = build(&TEST_SCHEMA, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); - - build(&TEST_SCHEMA, &ast.definitions) - .map(|definitions| { - definitions - .into_iter() - .filter_map(|definition| { - if let ExecutableDefinition::Operation(operation) = definition { - Some(print_full_operation( - &program, - &operation, - Default::default(), - )) - } else { - None - } - }) - .collect::>() - .join("\n\n\n\n") - }) - .map_err(|errors| { - errors - .into_iter() - .map(|error| format!("{:?}", error)) - .collect::>() - .join("\n\n") - }) -} diff --git a/compiler/crates/graphql-text-printer/tests/operation_printer_test.rs b/compiler/crates/graphql-text-printer/tests/operation_printer_test.rs index aa5c86d284a54..7be9da9803bff 100644 --- a/compiler/crates/graphql-text-printer/tests/operation_printer_test.rs +++ b/compiler/crates/graphql-text-printer/tests/operation_printer_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<7cba6c2fe0d54fa5abd9ec9b9a70bb05>> + * @generated SignedSource<<984b72363f1562fb67037c5a7b76f244>> */ mod operation_printer; @@ -12,37 +12,37 @@ mod operation_printer; use operation_printer::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn field_arguments() { +#[tokio::test] +async fn field_arguments() { let input = include_str!("operation_printer/fixtures/field-arguments.graphql"); let expected = include_str!("operation_printer/fixtures/field-arguments.expected"); - test_fixture(transform_fixture, "field-arguments.graphql", "operation_printer/fixtures/field-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "field-arguments.graphql", "operation_printer/fixtures/field-arguments.expected", input, expected).await; } -#[test] -fn multiple_queries_with_same_fragment() { +#[tokio::test] +async fn multiple_queries_with_same_fragment() { let input = include_str!("operation_printer/fixtures/multiple-queries-with-same-fragment.graphql"); let expected = include_str!("operation_printer/fixtures/multiple-queries-with-same-fragment.expected"); - test_fixture(transform_fixture, "multiple-queries-with-same-fragment.graphql", "operation_printer/fixtures/multiple-queries-with-same-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple-queries-with-same-fragment.graphql", "operation_printer/fixtures/multiple-queries-with-same-fragment.expected", input, expected).await; } -#[test] -fn query_variables() { +#[tokio::test] +async fn query_variables() { let input = include_str!("operation_printer/fixtures/query-variables.graphql"); let expected = include_str!("operation_printer/fixtures/query-variables.expected"); - test_fixture(transform_fixture, "query-variables.graphql", "operation_printer/fixtures/query-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-variables.graphql", "operation_printer/fixtures/query-variables.expected", input, expected).await; } -#[test] -fn query_with_fragment_spreads() { +#[tokio::test] +async fn query_with_fragment_spreads() { let input = include_str!("operation_printer/fixtures/query-with-fragment-spreads.graphql"); let expected = include_str!("operation_printer/fixtures/query-with-fragment-spreads.expected"); - test_fixture(transform_fixture, "query-with-fragment-spreads.graphql", "operation_printer/fixtures/query-with-fragment-spreads.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-fragment-spreads.graphql", "operation_printer/fixtures/query-with-fragment-spreads.expected", input, expected).await; } -#[test] -fn query_with_nested_fragment_srpeads() { +#[tokio::test] +async fn query_with_nested_fragment_srpeads() { let input = include_str!("operation_printer/fixtures/query-with-nested-fragment-srpeads.graphql"); let expected = include_str!("operation_printer/fixtures/query-with-nested-fragment-srpeads.expected"); - test_fixture(transform_fixture, "query-with-nested-fragment-srpeads.graphql", "operation_printer/fixtures/query-with-nested-fragment-srpeads.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-nested-fragment-srpeads.graphql", "operation_printer/fixtures/query-with-nested-fragment-srpeads.expected", input, expected).await; } diff --git a/compiler/crates/graphql-text-printer/tests/print.rs b/compiler/crates/graphql-text-printer/tests/print.rs new file mode 100644 index 0000000000000..e0a7c80e74f6c --- /dev/null +++ b/compiler/crates/graphql-text-printer/tests/print.rs @@ -0,0 +1,37 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_syntax::parse_executable_with_features; +use graphql_syntax::FragmentArgumentSyntaxKind; +use graphql_syntax::ParserFeatures; +use graphql_text_printer::print_ir; +use relay_test_schema::TEST_SCHEMA; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable_with_features( + fixture.content, + source_location, + ParserFeatures { + fragment_argument_capability: + FragmentArgumentSyntaxKind::SpreadArgumentsAndFragmentVariableDefinitions, + }, + ) + .unwrap(); + build(&TEST_SCHEMA, &ast.definitions) + .map(|definitions| print_ir(&TEST_SCHEMA, &definitions).join("\n\n")) + .map_err(|errors| { + errors + .into_iter() + .map(|error| format!("{:?}", error)) + .collect::>() + .join("\n\n") + }) +} diff --git a/compiler/crates/graphql-text-printer/tests/print/mod.rs b/compiler/crates/graphql-text-printer/tests/print/mod.rs deleted file mode 100644 index bebb8d5b59ffd..0000000000000 --- a/compiler/crates/graphql-text-printer/tests/print/mod.rs +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_syntax::parse_executable_with_features; -use graphql_syntax::FragmentArgumentSyntaxKind; -use graphql_syntax::ParserFeatures; -use graphql_text_printer::print_ir; -use relay_test_schema::TEST_SCHEMA; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable_with_features( - fixture.content, - source_location, - ParserFeatures { - fragment_argument_capability: - FragmentArgumentSyntaxKind::SpreadArgumentsAndFragmentVariableDefinitions, - }, - ) - .unwrap(); - build(&TEST_SCHEMA, &ast.definitions) - .map(|definitions| print_ir(&TEST_SCHEMA, &definitions).join("\n\n")) - .map_err(|errors| { - errors - .into_iter() - .map(|error| format!("{:?}", error)) - .collect::>() - .join("\n\n") - }) -} diff --git a/compiler/crates/graphql-text-printer/tests/print_ast.rs b/compiler/crates/graphql-text-printer/tests/print_ast.rs new file mode 100644 index 0000000000000..54b89240a7efe --- /dev/null +++ b/compiler/crates/graphql-text-printer/tests/print_ast.rs @@ -0,0 +1,23 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_syntax::parse_executable; +use graphql_text_printer::print_executable_definition_ast; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(fixture.content, source_location).unwrap(); + + Ok(ast + .definitions + .iter() + .map(print_executable_definition_ast) + .collect::>() + .join("\n")) +} diff --git a/compiler/crates/graphql-text-printer/tests/print_ast/mod.rs b/compiler/crates/graphql-text-printer/tests/print_ast/mod.rs deleted file mode 100644 index 8ca4799cc133e..0000000000000 --- a/compiler/crates/graphql-text-printer/tests/print_ast/mod.rs +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_syntax::parse_executable; -use graphql_text_printer::print_executable_definition_ast; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(fixture.content, source_location).unwrap(); - - Ok(ast - .definitions - .iter() - .map(|definition| print_executable_definition_ast(definition)) - .collect::>() - .join("\n")) -} diff --git a/compiler/crates/graphql-text-printer/tests/print_ast_test.rs b/compiler/crates/graphql-text-printer/tests/print_ast_test.rs index 4add494434769..5b1c140f3d5ac 100644 --- a/compiler/crates/graphql-text-printer/tests/print_ast_test.rs +++ b/compiler/crates/graphql-text-printer/tests/print_ast_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<708e29f86e0dd17944c1f5367368da39>> */ mod print_ast; @@ -12,142 +12,142 @@ mod print_ast; use print_ast::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn basic_arg_defs() { +#[tokio::test] +async fn basic_arg_defs() { let input = include_str!("print_ast/fixtures/basic_arg_defs.graphql"); let expected = include_str!("print_ast/fixtures/basic_arg_defs.expected"); - test_fixture(transform_fixture, "basic_arg_defs.graphql", "print_ast/fixtures/basic_arg_defs.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_arg_defs.graphql", "print_ast/fixtures/basic_arg_defs.expected", input, expected).await; } -#[test] -fn basic_arg_defs_type() { +#[tokio::test] +async fn basic_arg_defs_type() { let input = include_str!("print_ast/fixtures/basic_arg_defs_type.graphql"); let expected = include_str!("print_ast/fixtures/basic_arg_defs_type.expected"); - test_fixture(transform_fixture, "basic_arg_defs_type.graphql", "print_ast/fixtures/basic_arg_defs_type.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_arg_defs_type.graphql", "print_ast/fixtures/basic_arg_defs_type.expected", input, expected).await; } -#[test] -fn basic_directives() { +#[tokio::test] +async fn basic_directives() { let input = include_str!("print_ast/fixtures/basic_directives.graphql"); let expected = include_str!("print_ast/fixtures/basic_directives.expected"); - test_fixture(transform_fixture, "basic_directives.graphql", "print_ast/fixtures/basic_directives.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_directives.graphql", "print_ast/fixtures/basic_directives.expected", input, expected).await; } -#[test] -fn basic_fragment() { +#[tokio::test] +async fn basic_fragment() { let input = include_str!("print_ast/fixtures/basic_fragment.graphql"); let expected = include_str!("print_ast/fixtures/basic_fragment.expected"); - test_fixture(transform_fixture, "basic_fragment.graphql", "print_ast/fixtures/basic_fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_fragment.graphql", "print_ast/fixtures/basic_fragment.expected", input, expected).await; } -#[test] -fn basic_inline_fragments() { +#[tokio::test] +async fn basic_inline_fragments() { let input = include_str!("print_ast/fixtures/basic_inline_fragments.graphql"); let expected = include_str!("print_ast/fixtures/basic_inline_fragments.expected"); - test_fixture(transform_fixture, "basic_inline_fragments.graphql", "print_ast/fixtures/basic_inline_fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_inline_fragments.graphql", "print_ast/fixtures/basic_inline_fragments.expected", input, expected).await; } -#[test] -fn basic_list_object_values() { +#[tokio::test] +async fn basic_list_object_values() { let input = include_str!("print_ast/fixtures/basic_list_object_values.graphql"); let expected = include_str!("print_ast/fixtures/basic_list_object_values.expected"); - test_fixture(transform_fixture, "basic_list_object_values.graphql", "print_ast/fixtures/basic_list_object_values.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_list_object_values.graphql", "print_ast/fixtures/basic_list_object_values.expected", input, expected).await; } -#[test] -fn basic_query() { +#[tokio::test] +async fn basic_query() { let input = include_str!("print_ast/fixtures/basic_query.graphql"); let expected = include_str!("print_ast/fixtures/basic_query.expected"); - test_fixture(transform_fixture, "basic_query.graphql", "print_ast/fixtures/basic_query.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_query.graphql", "print_ast/fixtures/basic_query.expected", input, expected).await; } -#[test] -fn basic_query_with_float() { +#[tokio::test] +async fn basic_query_with_float() { let input = include_str!("print_ast/fixtures/basic_query_with_float.graphql"); let expected = include_str!("print_ast/fixtures/basic_query_with_float.expected"); - test_fixture(transform_fixture, "basic_query_with_float.graphql", "print_ast/fixtures/basic_query_with_float.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_query_with_float.graphql", "print_ast/fixtures/basic_query_with_float.expected", input, expected).await; } -#[test] -fn basic_var_defs() { +#[tokio::test] +async fn basic_var_defs() { let input = include_str!("print_ast/fixtures/basic_var_defs.graphql"); let expected = include_str!("print_ast/fixtures/basic_var_defs.expected"); - test_fixture(transform_fixture, "basic_var_defs.graphql", "print_ast/fixtures/basic_var_defs.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_var_defs.graphql", "print_ast/fixtures/basic_var_defs.expected", input, expected).await; } -#[test] -fn basic_var_defs_with_directives() { +#[tokio::test] +async fn basic_var_defs_with_directives() { let input = include_str!("print_ast/fixtures/basic_var_defs_with_directives.graphql"); let expected = include_str!("print_ast/fixtures/basic_var_defs_with_directives.expected"); - test_fixture(transform_fixture, "basic_var_defs_with_directives.graphql", "print_ast/fixtures/basic_var_defs_with_directives.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_var_defs_with_directives.graphql", "print_ast/fixtures/basic_var_defs_with_directives.expected", input, expected).await; } -#[test] -fn conditions() { +#[tokio::test] +async fn conditions() { let input = include_str!("print_ast/fixtures/conditions.graphql"); let expected = include_str!("print_ast/fixtures/conditions.expected"); - test_fixture(transform_fixture, "conditions.graphql", "print_ast/fixtures/conditions.expected", input, expected); + test_fixture(transform_fixture, file!(), "conditions.graphql", "print_ast/fixtures/conditions.expected", input, expected).await; } -#[test] -fn empty_args() { +#[tokio::test] +async fn empty_args() { let input = include_str!("print_ast/fixtures/empty_args.graphql"); let expected = include_str!("print_ast/fixtures/empty_args.expected"); - test_fixture(transform_fixture, "empty_args.graphql", "print_ast/fixtures/empty_args.expected", input, expected); + test_fixture(transform_fixture, file!(), "empty_args.graphql", "print_ast/fixtures/empty_args.expected", input, expected).await; } -#[test] -fn kitchen_sink() { +#[tokio::test] +async fn kitchen_sink() { let input = include_str!("print_ast/fixtures/kitchen-sink.graphql"); let expected = include_str!("print_ast/fixtures/kitchen-sink.expected"); - test_fixture(transform_fixture, "kitchen-sink.graphql", "print_ast/fixtures/kitchen-sink.expected", input, expected); + test_fixture(transform_fixture, file!(), "kitchen-sink.graphql", "print_ast/fixtures/kitchen-sink.expected", input, expected).await; } -#[test] -fn lowercase_enum_fragment_arg() { +#[tokio::test] +async fn lowercase_enum_fragment_arg() { let input = include_str!("print_ast/fixtures/lowercase-enum-fragment-arg.graphql"); let expected = include_str!("print_ast/fixtures/lowercase-enum-fragment-arg.expected"); - test_fixture(transform_fixture, "lowercase-enum-fragment-arg.graphql", "print_ast/fixtures/lowercase-enum-fragment-arg.expected", input, expected); + test_fixture(transform_fixture, file!(), "lowercase-enum-fragment-arg.graphql", "print_ast/fixtures/lowercase-enum-fragment-arg.expected", input, expected).await; } -#[test] -fn nested_conditions() { +#[tokio::test] +async fn nested_conditions() { let input = include_str!("print_ast/fixtures/nested_conditions.graphql"); let expected = include_str!("print_ast/fixtures/nested_conditions.expected"); - test_fixture(transform_fixture, "nested_conditions.graphql", "print_ast/fixtures/nested_conditions.expected", input, expected); + test_fixture(transform_fixture, file!(), "nested_conditions.graphql", "print_ast/fixtures/nested_conditions.expected", input, expected).await; } -#[test] -fn single_value_array_of_objects() { +#[tokio::test] +async fn single_value_array_of_objects() { let input = include_str!("print_ast/fixtures/single-value-array-of-objects.graphql"); let expected = include_str!("print_ast/fixtures/single-value-array-of-objects.expected"); - test_fixture(transform_fixture, "single-value-array-of-objects.graphql", "print_ast/fixtures/single-value-array-of-objects.expected", input, expected); + test_fixture(transform_fixture, file!(), "single-value-array-of-objects.graphql", "print_ast/fixtures/single-value-array-of-objects.expected", input, expected).await; } -#[test] -fn string_enum_arg_invalid() { +#[tokio::test] +async fn string_enum_arg_invalid() { let input = include_str!("print_ast/fixtures/string-enum-arg.invalid.graphql"); let expected = include_str!("print_ast/fixtures/string-enum-arg.invalid.expected"); - test_fixture(transform_fixture, "string-enum-arg.invalid.graphql", "print_ast/fixtures/string-enum-arg.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "string-enum-arg.invalid.graphql", "print_ast/fixtures/string-enum-arg.invalid.expected", input, expected).await; } -#[test] -fn string_enum_fragment_arg() { +#[tokio::test] +async fn string_enum_fragment_arg() { let input = include_str!("print_ast/fixtures/string-enum-fragment-arg.graphql"); let expected = include_str!("print_ast/fixtures/string-enum-fragment-arg.expected"); - test_fixture(transform_fixture, "string-enum-fragment-arg.graphql", "print_ast/fixtures/string-enum-fragment-arg.expected", input, expected); + test_fixture(transform_fixture, file!(), "string-enum-fragment-arg.graphql", "print_ast/fixtures/string-enum-fragment-arg.expected", input, expected).await; } -#[test] -fn string_enum_fragment_arg_with_complex_input() { +#[tokio::test] +async fn string_enum_fragment_arg_with_complex_input() { let input = include_str!("print_ast/fixtures/string-enum-fragment-arg-with-complex-input.graphql"); let expected = include_str!("print_ast/fixtures/string-enum-fragment-arg-with-complex-input.expected"); - test_fixture(transform_fixture, "string-enum-fragment-arg-with-complex-input.graphql", "print_ast/fixtures/string-enum-fragment-arg-with-complex-input.expected", input, expected); + test_fixture(transform_fixture, file!(), "string-enum-fragment-arg-with-complex-input.graphql", "print_ast/fixtures/string-enum-fragment-arg-with-complex-input.expected", input, expected).await; } -#[test] -fn unknown_enum_arg_invalid() { +#[tokio::test] +async fn unknown_enum_arg_invalid() { let input = include_str!("print_ast/fixtures/unknown-enum-arg.invalid.graphql"); let expected = include_str!("print_ast/fixtures/unknown-enum-arg.invalid.expected"); - test_fixture(transform_fixture, "unknown-enum-arg.invalid.graphql", "print_ast/fixtures/unknown-enum-arg.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unknown-enum-arg.invalid.graphql", "print_ast/fixtures/unknown-enum-arg.invalid.expected", input, expected).await; } diff --git a/compiler/crates/graphql-text-printer/tests/print_test.rs b/compiler/crates/graphql-text-printer/tests/print_test.rs index 85e7f6f5dda39..28e81a24f61d3 100644 --- a/compiler/crates/graphql-text-printer/tests/print_test.rs +++ b/compiler/crates/graphql-text-printer/tests/print_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<> */ mod print; @@ -12,121 +12,121 @@ mod print; use print::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn basic_arg_defs() { +#[tokio::test] +async fn basic_arg_defs() { let input = include_str!("print/fixtures/basic_arg_defs.graphql"); let expected = include_str!("print/fixtures/basic_arg_defs.expected"); - test_fixture(transform_fixture, "basic_arg_defs.graphql", "print/fixtures/basic_arg_defs.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_arg_defs.graphql", "print/fixtures/basic_arg_defs.expected", input, expected).await; } -#[test] -fn basic_directives() { +#[tokio::test] +async fn basic_directives() { let input = include_str!("print/fixtures/basic_directives.graphql"); let expected = include_str!("print/fixtures/basic_directives.expected"); - test_fixture(transform_fixture, "basic_directives.graphql", "print/fixtures/basic_directives.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_directives.graphql", "print/fixtures/basic_directives.expected", input, expected).await; } -#[test] -fn basic_fragment() { +#[tokio::test] +async fn basic_fragment() { let input = include_str!("print/fixtures/basic_fragment.graphql"); let expected = include_str!("print/fixtures/basic_fragment.expected"); - test_fixture(transform_fixture, "basic_fragment.graphql", "print/fixtures/basic_fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_fragment.graphql", "print/fixtures/basic_fragment.expected", input, expected).await; } -#[test] -fn basic_inline_fragments() { +#[tokio::test] +async fn basic_inline_fragments() { let input = include_str!("print/fixtures/basic_inline_fragments.graphql"); let expected = include_str!("print/fixtures/basic_inline_fragments.expected"); - test_fixture(transform_fixture, "basic_inline_fragments.graphql", "print/fixtures/basic_inline_fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_inline_fragments.graphql", "print/fixtures/basic_inline_fragments.expected", input, expected).await; } -#[test] -fn basic_list_object_values() { +#[tokio::test] +async fn basic_list_object_values() { let input = include_str!("print/fixtures/basic_list_object_values.graphql"); let expected = include_str!("print/fixtures/basic_list_object_values.expected"); - test_fixture(transform_fixture, "basic_list_object_values.graphql", "print/fixtures/basic_list_object_values.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_list_object_values.graphql", "print/fixtures/basic_list_object_values.expected", input, expected).await; } -#[test] -fn basic_query() { +#[tokio::test] +async fn basic_query() { let input = include_str!("print/fixtures/basic_query.graphql"); let expected = include_str!("print/fixtures/basic_query.expected"); - test_fixture(transform_fixture, "basic_query.graphql", "print/fixtures/basic_query.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_query.graphql", "print/fixtures/basic_query.expected", input, expected).await; } -#[test] -fn basic_var_defs() { +#[tokio::test] +async fn basic_var_defs() { let input = include_str!("print/fixtures/basic_var_defs.graphql"); let expected = include_str!("print/fixtures/basic_var_defs.expected"); - test_fixture(transform_fixture, "basic_var_defs.graphql", "print/fixtures/basic_var_defs.expected", input, expected); + test_fixture(transform_fixture, file!(), "basic_var_defs.graphql", "print/fixtures/basic_var_defs.expected", input, expected).await; } -#[test] -fn conditions() { +#[tokio::test] +async fn conditions() { let input = include_str!("print/fixtures/conditions.graphql"); let expected = include_str!("print/fixtures/conditions.expected"); - test_fixture(transform_fixture, "conditions.graphql", "print/fixtures/conditions.expected", input, expected); + test_fixture(transform_fixture, file!(), "conditions.graphql", "print/fixtures/conditions.expected", input, expected).await; } -#[test] -fn empty_args() { +#[tokio::test] +async fn empty_args() { let input = include_str!("print/fixtures/empty_args.graphql"); let expected = include_str!("print/fixtures/empty_args.expected"); - test_fixture(transform_fixture, "empty_args.graphql", "print/fixtures/empty_args.expected", input, expected); + test_fixture(transform_fixture, file!(), "empty_args.graphql", "print/fixtures/empty_args.expected", input, expected).await; } -#[test] -fn fragment_variables() { +#[tokio::test] +async fn fragment_variables() { let input = include_str!("print/fixtures/fragment_variables.graphql"); let expected = include_str!("print/fixtures/fragment_variables.expected"); - test_fixture(transform_fixture, "fragment_variables.graphql", "print/fixtures/fragment_variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_variables.graphql", "print/fixtures/fragment_variables.expected", input, expected).await; } -#[test] -fn kitchen_sink() { +#[tokio::test] +async fn kitchen_sink() { let input = include_str!("print/fixtures/kitchen-sink.graphql"); let expected = include_str!("print/fixtures/kitchen-sink.expected"); - test_fixture(transform_fixture, "kitchen-sink.graphql", "print/fixtures/kitchen-sink.expected", input, expected); + test_fixture(transform_fixture, file!(), "kitchen-sink.graphql", "print/fixtures/kitchen-sink.expected", input, expected).await; } -#[test] -fn nested_conditions() { +#[tokio::test] +async fn nested_conditions() { let input = include_str!("print/fixtures/nested_conditions.graphql"); let expected = include_str!("print/fixtures/nested_conditions.expected"); - test_fixture(transform_fixture, "nested_conditions.graphql", "print/fixtures/nested_conditions.expected", input, expected); + test_fixture(transform_fixture, file!(), "nested_conditions.graphql", "print/fixtures/nested_conditions.expected", input, expected).await; } -#[test] -fn single_value_array_of_objects() { +#[tokio::test] +async fn single_value_array_of_objects() { let input = include_str!("print/fixtures/single-value-array-of-objects.graphql"); let expected = include_str!("print/fixtures/single-value-array-of-objects.expected"); - test_fixture(transform_fixture, "single-value-array-of-objects.graphql", "print/fixtures/single-value-array-of-objects.expected", input, expected); + test_fixture(transform_fixture, file!(), "single-value-array-of-objects.graphql", "print/fixtures/single-value-array-of-objects.expected", input, expected).await; } -#[test] -fn string_enum_arg_invalid() { +#[tokio::test] +async fn string_enum_arg_invalid() { let input = include_str!("print/fixtures/string-enum-arg.invalid.graphql"); let expected = include_str!("print/fixtures/string-enum-arg.invalid.expected"); - test_fixture(transform_fixture, "string-enum-arg.invalid.graphql", "print/fixtures/string-enum-arg.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "string-enum-arg.invalid.graphql", "print/fixtures/string-enum-arg.invalid.expected", input, expected).await; } -#[test] -fn string_enum_fragment_arg() { +#[tokio::test] +async fn string_enum_fragment_arg() { let input = include_str!("print/fixtures/string-enum-fragment-arg.graphql"); let expected = include_str!("print/fixtures/string-enum-fragment-arg.expected"); - test_fixture(transform_fixture, "string-enum-fragment-arg.graphql", "print/fixtures/string-enum-fragment-arg.expected", input, expected); + test_fixture(transform_fixture, file!(), "string-enum-fragment-arg.graphql", "print/fixtures/string-enum-fragment-arg.expected", input, expected).await; } -#[test] -fn string_enum_fragment_arg_with_complex_input() { +#[tokio::test] +async fn string_enum_fragment_arg_with_complex_input() { let input = include_str!("print/fixtures/string-enum-fragment-arg-with-complex-input.graphql"); let expected = include_str!("print/fixtures/string-enum-fragment-arg-with-complex-input.expected"); - test_fixture(transform_fixture, "string-enum-fragment-arg-with-complex-input.graphql", "print/fixtures/string-enum-fragment-arg-with-complex-input.expected", input, expected); + test_fixture(transform_fixture, file!(), "string-enum-fragment-arg-with-complex-input.graphql", "print/fixtures/string-enum-fragment-arg-with-complex-input.expected", input, expected).await; } -#[test] -fn unknown_enum_arg_invalid() { +#[tokio::test] +async fn unknown_enum_arg_invalid() { let input = include_str!("print/fixtures/unknown-enum-arg.invalid.graphql"); let expected = include_str!("print/fixtures/unknown-enum-arg.invalid.expected"); - test_fixture(transform_fixture, "unknown-enum-arg.invalid.graphql", "print/fixtures/unknown-enum-arg.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unknown-enum-arg.invalid.graphql", "print/fixtures/unknown-enum-arg.invalid.expected", input, expected).await; } diff --git a/compiler/crates/graphql-watchman/Cargo.toml b/compiler/crates/graphql-watchman/Cargo.toml index a3af6c6ebacd3..78446fcdfb5e0 100644 --- a/compiler/crates/graphql-watchman/Cargo.toml +++ b/compiler/crates/graphql-watchman/Cargo.toml @@ -1,13 +1,15 @@ # @generated by autocargo from //relay/oss/crates/graphql-watchman:graphql-watchman + [package] name = "graphql-watchman" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] log = { version = "0.4.17", features = ["kv_unstable", "kv_unstable_std"] } -serde = { version = "1.0.136", features = ["derive", "rc"] } +serde = { version = "1.0.185", features = ["derive", "rc"] } serde_bser = "0.3" watchman_client = "0.8.0" diff --git a/compiler/crates/graphql-watchman/src/subscription.rs b/compiler/crates/graphql-watchman/src/subscription.rs index 1b5feb877c639..434c22728601e 100644 --- a/compiler/crates/graphql-watchman/src/subscription.rs +++ b/compiler/crates/graphql-watchman/src/subscription.rs @@ -118,8 +118,8 @@ impl WatchmanFileSourceSubscription { /// TODO: Make this dynamic on the default branch name. fn get_base_hg_revision(commit_hash: Option) -> Option { let output = Command::new("hg") - .arg("log".to_string()) - .arg("-r".to_string()) + .arg("log") + .arg("-r") .arg(format!( "ancestor(master, {})", commit_hash.unwrap_or_else(|| ".".to_string()) diff --git a/compiler/crates/intern/Cargo.toml b/compiler/crates/intern/Cargo.toml index 13def7a85df02..8120caea8e793 100644 --- a/compiler/crates/intern/Cargo.toml +++ b/compiler/crates/intern/Cargo.toml @@ -1,24 +1,26 @@ # @generated by autocargo from //relay/oss/crates/intern:intern + [package] name = "intern" version = "0.1.0" authors = ["Facebook"] edition = "2021" description = "Intern data into a 32-bit id" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] fnv = "1.0" -hashbrown = { version = "0.12.3", features = ["raw", "serde"] } -indexmap = { version = "1.9.2", features = ["arbitrary", "rayon", "serde-1"] } +hashbrown = { version = "0.14.3", features = ["raw", "serde"] } +indexmap = { version = "2.2.6", features = ["arbitrary", "rayon", "serde"] } once_cell = "1.12" -parking_lot = { version = "0.11.2", features = ["send_guard"] } -serde = { version = "1.0.136", features = ["derive", "rc"] } +parking_lot = { version = "0.12.1", features = ["send_guard"] } +serde = { version = "1.0.185", features = ["derive", "rc"] } serde_bytes = "0.11" -serde_derive = "1.0" +serde_derive = "1.0.185" smallvec = { version = "1.6.1", features = ["serde", "union"] } [dev-dependencies] bincode = "1.3.3" rand = { version = "0.8", features = ["small_rng"] } -serde_json = { version = "1.0.79", features = ["float_roundtrip", "unbounded_depth"] } +serde_json = { version = "1.0.100", features = ["float_roundtrip", "unbounded_depth"] } diff --git a/compiler/crates/intern/src/atomic_arena.rs b/compiler/crates/intern/src/atomic_arena.rs index f82bb336bf393..55f5b9f69c1fa 100644 --- a/compiler/crates/intern/src/atomic_arena.rs +++ b/compiler/crates/intern/src/atomic_arena.rs @@ -121,7 +121,7 @@ pub struct AtomicArena<'a, T> { next_biased_index: AtomicU32, /// buckets in reverse order, starting from the back and working /// forwards. Capacity for bucket i is bucket_capacity(i). - buckets: [AtomicPtr>; NUM_SIZES as usize], + buckets: [AtomicPtr>; NUM_SIZES], bucket_alloc_mutex: Mutex<()>, } @@ -264,7 +264,7 @@ impl<'a, T> AtomicArena<'a, T> { /// it calls `slice_for_slot_slow` to allocate it. #[inline] fn slice_for_slot(&self, a: usize) -> NonNull> { - if let Some(curr) = NonNull::new(self.buckets[a as usize].load(Ordering::Acquire)) { + if let Some(curr) = NonNull::new(self.buckets[a].load(Ordering::Acquire)) { curr } else { self.slice_for_slot_slow(a) @@ -281,10 +281,10 @@ impl<'a, T> AtomicArena<'a, T> { // as an `Acquire / Release` pair. let lock = self.bucket_alloc_mutex.lock(); // Relaxed load because we know we're competing with prior lock holders now. - if let Some(curr) = NonNull::new(self.buckets[a as usize].load(Ordering::Relaxed)) { + if let Some(curr) = NonNull::new(self.buckets[a].load(Ordering::Relaxed)) { return curr; } - let cap = bucket_capacity(a) as usize; + let cap = bucket_capacity(a); // Allocate bucket as vector, then prise it apart since we // only care about capacity and pointer. We use MaybeUninit // because we are tracking slot validity across all buckets @@ -300,7 +300,7 @@ impl<'a, T> AtomicArena<'a, T> { memory_consistency_assert!(acap == cap || std::mem::size_of::() == 0); memory_consistency_assert_eq!(len, 0); if let Some(nn_ptr) = NonNull::new(ptr) { - self.buckets[a as usize].store(ptr, Ordering::Release); + self.buckets[a].store(ptr, Ordering::Release); drop(lock); nn_ptr } else { @@ -344,11 +344,11 @@ impl<'a, T> AtomicArena<'a, T> { // the current (uninitialized) contents of this bucket // entry before writing the new contents. This can yield // a hard-to-debug segfault in the internals of malloc. - let e_ptr: *mut MaybeUninit = unsafe { e_ptr.add(b as usize) }; + let e_ptr: *mut MaybeUninit = unsafe { e_ptr.add(b) }; unsafe { *e_ptr = MaybeUninit::new(element); } - let e: &T = unsafe { &*(&*e_ptr).as_ptr() }; + let e: &T = unsafe { &*(*e_ptr).as_ptr() }; ( Ref { phantom: PhantomData, @@ -383,15 +383,13 @@ impl<'a, T> AtomicArena<'a, T> { // Get bucket address, but do *not* allocate a bucket. // Ordering::Relaxed is OK because we got a Ref in a // thread-safe way in get(..). - self.buckets - .get_unchecked(a as usize) - .load(Ordering::Relaxed) + self.buckets.get_unchecked(a).load(Ordering::Relaxed) }; // Sanity check bucket. Again, won't catch all unsafe accesses. memory_consistency_assert!(!e_ptr.is_null()); unsafe { // Read the added element, and strip the MaybeUnit wrapper to yield a &T. - let r: &MaybeUninit = &*e_ptr.add(b as usize); + let r: &MaybeUninit = &*e_ptr.add(b); &*r.as_ptr() } } @@ -443,7 +441,7 @@ impl<'a, T> Drop for AtomicArena<'a, T> { // efficient as simply calling Drop on individual // elements, and is vastly simpler than calling the // mem apis directly. - Vec::from_raw_parts(b_ptr, sz as usize, cap as usize) + Vec::from_raw_parts(b_ptr, sz, cap) }; drop(iv); bucket.store(ptr::null_mut(), Ordering::Relaxed) @@ -672,8 +670,8 @@ mod tests { use super::*; - static mut ZERO: Zero<&str> = Zero::new("zero"); - static STRING_ARENA: AtomicArena<'static, &str> = AtomicArena::with_zero(unsafe { &ZERO }); + static ZERO: Zero<&str> = Zero::new("zero"); + static STRING_ARENA: AtomicArena<'static, &str> = AtomicArena::with_zero(&ZERO); /// For internal testing purposes we permit the unsafe synthesis of Refs. fn mk_ref<'a, T>(index: u32) -> Ref<'a, T> { @@ -711,7 +709,7 @@ mod tests { assert_eq!(b, bucket_capacity(0) - 1); // Check thresholds for s in (MIN_SHIFT + 1)..(U32_BITS as u32) { - let i = 1 << (s as u32); + let i = 1 << s; let (a0, b0) = index(i - 1); let (a1, b1) = index(i); assert_eq!(a0, U32_BITS - s as usize); @@ -867,7 +865,7 @@ mod tests { let mut avail: Arc> = Arc::new(Vec::with_capacity(N as usize)); Arc::get_mut(&mut avail) .unwrap() - .resize_with(N as usize, || AtomicU32::new(10 * N as u32)); + .resize_with(N as usize, || AtomicU32::new(10 * N)); // Make sure we don't just run the producer or all the // consumers without interleaving them. let progress = Arc::new((Mutex::new(0u32), Condvar::new())); @@ -883,11 +881,8 @@ mod tests { let n = i * WRITERS + k; let id = arena.add(n as usize); assert!(id.index() < N); - assert_eq!( - avail[id.index() as usize].load(Ordering::Acquire), - 10 * N as u32 - ); - avail[id.index() as usize].store(n as u32, Ordering::Release); + assert_eq!(avail[id.index() as usize].load(Ordering::Acquire), 10 * N); + avail[id.index() as usize].store(n, Ordering::Release); if k == 0 && i == next_poke { let (lock, cvar) = &*progress; *lock.lock() = i; @@ -939,7 +934,7 @@ mod tests { for i in 0..N { let a = avail[i as usize].load(Ordering::Relaxed); if a >= N { - fail.push((a, i as u32)); + fail.push((a, i)); } } assert!(fail.is_empty(), "{:?}", fail); diff --git a/compiler/crates/intern/src/intern.rs b/compiler/crates/intern/src/intern.rs index c09e404c4f5cc..d11e3847f61ec 100644 --- a/compiler/crates/intern/src/intern.rs +++ b/compiler/crates/intern/src/intern.rs @@ -292,7 +292,7 @@ impl InternTable { /// Usually you can rely on `deref` to do this implicitly. #[inline] fn get(&'static self, r: Id) -> &Id::Intern { - &*self.arena.get(r.unwrap()) + self.arena.get(r.unwrap()) } /// Getter that checks for the need to allocate. @@ -699,6 +699,21 @@ macro_rules! intern_struct { }; } +pub trait Lookup { + fn lookup(self) -> &'static str; +} + +#[macro_export] +macro_rules! impl_lookup { + ($named:ident) => { + impl Lookup for $named { + fn lookup(self) -> &'static str { + self.0.lookup() + } + } + }; +} + #[cfg(test)] mod tests { use serde_derive::Deserialize; @@ -846,18 +861,3 @@ mod tests { assert_eq!(deserialized, val); } } - -pub trait Lookup { - fn lookup(self) -> &'static str; -} - -#[macro_export] -macro_rules! impl_lookup { - ($named:ident) => { - impl Lookup for $named { - fn lookup(self) -> &'static str { - self.0.lookup() - } - } - }; -} diff --git a/compiler/crates/intern/src/lib.rs b/compiler/crates/intern/src/lib.rs index fb44de17e276d..14fb5b0585fae 100644 --- a/compiler/crates/intern/src/lib.rs +++ b/compiler/crates/intern/src/lib.rs @@ -45,7 +45,8 @@ //! //! Simply import `intern::string` and go to town: //! ``` -//! use intern::string::{self, StringId}; +//! use intern::string; +//! use intern::string::StringId; //! //! let a: StringId = string::intern("a"); //! let b = string::intern("b"); @@ -62,27 +63,33 @@ //! static reference to the interned object referred to by `my_id`. //! ``` //! # #[macro_use] -//! use intern::{InternId, InternSerdes, intern_struct}; -//! use serde_derive::{Deserialize, Serialize}; +//! use intern::intern_struct; +//! use intern::InternId; +//! use intern::InternSerdes; +//! use serde_derive::Deserialize; +//! use serde_derive::Serialize; //! //! #[derive(Debug, PartialEq, Eq, Hash, Deserialize, Serialize)] -//! struct MyType{ v: i64 } +//! struct MyType { +//! v: i64, +//! } //! //! intern_struct! { //! struct MyId = Intern { serdes("InternSerdes"); } //! } //! //! # fn main() { -//! let m1 = MyType{ v: 1 }; -//! let m2 = MyType{ v: 1 }; -//! let m3 = MyType{ v: -57 }; +//! let m1 = MyType { v: 1 }; +//! let m2 = MyType { v: 1 }; +//! let m3 = MyType { v: -57 }; //! let i1 = MyId::intern(m1); //! let i2 = MyId::intern(m2); //! let i3 = MyId::intern(m3); //! assert_eq!(i1, i2); //! assert_eq!(i1.get().v, 1); //! assert!(i1 != i3); -//! assert_eq!(i3.v, -57); // Uses Deref +//! // Uses Deref +//! assert_eq!(i3.v, -57); //! # } //! ``` //! diff --git a/compiler/crates/intern/src/sharded_set.rs b/compiler/crates/intern/src/sharded_set.rs index 794c21f4d8d6d..f5d4b97cd8d2c 100644 --- a/compiler/crates/intern/src/sharded_set.rs +++ b/compiler/crates/intern/src/sharded_set.rs @@ -10,7 +10,6 @@ use std::collections::hash_map::RandomState; use std::fmt; use std::hash::BuildHasher; use std::hash::Hash; -use std::hash::Hasher; use hashbrown::raw::RawTable; use parking_lot::RwLock; @@ -111,9 +110,7 @@ impl ShardedSet { } fn hash_one(build_hasher: &B, x: T) -> u64 { - let mut hasher = build_hasher.build_hasher(); - x.hash(&mut hasher); - hasher.finish() + build_hasher.hash_one(&x) } pub struct InsertLock<'a, T, S = RandomState> { diff --git a/compiler/crates/intern/src/small_bytes.rs b/compiler/crates/intern/src/small_bytes.rs index 1efa26ebf915a..93685571dc8e5 100644 --- a/compiler/crates/intern/src/small_bytes.rs +++ b/compiler/crates/intern/src/small_bytes.rs @@ -57,7 +57,7 @@ impl SmallBytes { impl Debug for SmallBytes { fn fmt(&self, f: &mut Formatter<'_>) -> Result { let s: String = - String::from_utf8(self.iter().map(|b| escape_default(*b)).flatten().collect()).unwrap(); + String::from_utf8(self.iter().flat_map(|b| escape_default(*b)).collect()).unwrap(); match self { Small { len, .. } => write!(f, "Small{{len:{},bytes:b\"{}\"}}", *len, s), Large(_) => write!(f, "Large(b\"{}\")", s), @@ -110,7 +110,7 @@ impl From<&[u8]> for SmallBytes { impl From> for SmallBytes { fn from(u: Box<[u8]>) -> SmallBytes { - if let Some(r) = make_small(&*u) { + if let Some(r) = make_small(&u) { r } else { Large(u) @@ -120,7 +120,7 @@ impl From> for SmallBytes { impl From> for SmallBytes { fn from(u: Vec) -> SmallBytes { - if let Some(r) = make_small(&*u) { + if let Some(r) = make_small(&u) { r } else { Large(u.into()) @@ -210,7 +210,7 @@ mod tests { assert_eq!(hash(&l), hash(e)); let v: Vec = Vec::new(); let ll = SmallBytes::from(v); // Consumes v. - let ls: &[u8] = &*ll; + let ls: &[u8] = ≪ assert_eq!(ll, l); assert_eq!(ls, b""); let vs: &[u8] = &Vec::new(); diff --git a/compiler/crates/intern/src/string.rs b/compiler/crates/intern/src/string.rs index 8de9ee46b562b..b4cf020abe1c2 100644 --- a/compiler/crates/intern/src/string.rs +++ b/compiler/crates/intern/src/string.rs @@ -43,7 +43,7 @@ impl BytesId { // Safe because BytesId can only be generated // by a call to intern, which returns the result // of id_to_bytes.push. - &*self.get() + self.get() } } @@ -95,7 +95,7 @@ impl StringId { /// Intern index for the underlying bytes. pub fn index(self) -> u32 { - (self.0).0.index() as u32 + (self.0).0.index() } pub fn from_index_checked(index: u32) -> Option { @@ -391,10 +391,10 @@ mod tests { const WRITERS: usize = 100; const MAX: usize = N / WRITERS; // Array to track index issued to each string. - let mut avail: Arc> = Arc::new(Vec::with_capacity(MAX as usize)); + let mut avail: Arc> = Arc::new(Vec::with_capacity(MAX)); Arc::get_mut(&mut avail) .unwrap() - .resize_with(N as usize, || AtomicU32::new(u32::MAX)); + .resize_with(N, || AtomicU32::new(u32::MAX)); let mut workers = Vec::new(); for k in 0..WRITERS { let avail = avail.clone(); diff --git a/compiler/crates/interner/Cargo.toml b/compiler/crates/interner/Cargo.toml index 8d3ae892189d2..f5e510d01beda 100644 --- a/compiler/crates/interner/Cargo.toml +++ b/compiler/crates/interner/Cargo.toml @@ -1,14 +1,16 @@ # @generated by autocargo from //relay/oss/crates/interner:interner + [package] name = "interner" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] fnv = "1.0" lazy_static = "1.4" once_cell = "1.12" -parking_lot = { version = "0.11.2", features = ["send_guard"] } -serde = { version = "1.0.136", features = ["derive", "rc"] } +parking_lot = { version = "0.12.1", features = ["send_guard"] } +serde = { version = "1.0.185", features = ["derive", "rc"] } diff --git a/compiler/crates/interner/src/macros.rs b/compiler/crates/interner/src/macros.rs index c9ff6c4484951..278502db192dc 100644 --- a/compiler/crates/interner/src/macros.rs +++ b/compiler/crates/interner/src/macros.rs @@ -44,16 +44,14 @@ macro_rules! intern { /// let user: User = User { name }; /// let user_key: UserKey = user.intern(); /// ``` -/// #[macro_export] macro_rules! make_intern { ($name:ident as $alias:ident) => { use lazy_static::lazy_static; - - use crate::Intern; - use crate::InternKey; - use crate::InternTable; - use crate::RawInternKey; + use $crate::Intern; + use $crate::InternKey; + use $crate::InternTable; + use $crate::RawInternKey; lazy_static! { /// Global interning table for this type diff --git a/compiler/crates/js-config-loader/Cargo.toml b/compiler/crates/js-config-loader/Cargo.toml index c8d7a9e1df29e..d4c7b4316566f 100644 --- a/compiler/crates/js-config-loader/Cargo.toml +++ b/compiler/crates/js-config-loader/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/js-config-loader:[js-config-loader,tests] + [package] name = "js-config-loader" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -11,9 +13,9 @@ name = "tests" path = "tests/lib.rs" [dependencies] -serde = { version = "1.0.136", features = ["derive", "rc"] } -serde_json = { version = "1.0.79", features = ["float_roundtrip", "unbounded_depth"] } -thiserror = "1.0.36" +serde = { version = "1.0.185", features = ["derive", "rc"] } +serde_json = { version = "1.0.100", features = ["float_roundtrip", "unbounded_depth"] } +thiserror = "1.0.49" [dev-dependencies] -tempfile = "3.3" +tempfile = "3.8" diff --git a/compiler/crates/js-config-loader/src/loader.rs b/compiler/crates/js-config-loader/src/loader.rs index d0aa44f69d1bc..86c482fe701fd 100644 --- a/compiler/crates/js-config-loader/src/loader.rs +++ b/compiler/crates/js-config-loader/src/loader.rs @@ -24,7 +24,7 @@ pub struct PackageJsonLoader<'a> { } impl<'a, T: for<'de> Deserialize<'de>> Loader for PackageJsonLoader<'a> { fn load(&self, path: &Path) -> Result, ErrorCode> { - let file = File::open(&path).unwrap(); + let file = File::open(path).unwrap(); let reader = BufReader::new(file); let mut package_json: Value = serde_json::from_reader(reader) .map_err(|error| ErrorCode::PackageJsonParseError { error })?; @@ -52,7 +52,7 @@ impl Loader for YamlLoader { pub struct JsonLoader; impl Deserialize<'de> + 'static> Loader for JsonLoader { fn load(&self, path: &Path) -> Result, ErrorCode> { - let file = File::open(&path).unwrap(); + let file = File::open(path).unwrap(); let reader = BufReader::new(file); let config = serde_json::from_reader(reader)?; Ok(Some(config)) @@ -65,7 +65,7 @@ impl Deserialize<'de> + 'static> Loader for JsLoader { let output = Command::new("node") .arg("-e") .arg(r#"process.stdout.write(JSON.stringify(require(process.argv[1])))"#) - .arg(&path) + .arg(path) .output() .expect("failed to execute process. Make sure you have Node installed."); diff --git a/compiler/crates/persist-query/Cargo.toml b/compiler/crates/persist-query/Cargo.toml index d0fc420887624..9e07fe5358852 100644 --- a/compiler/crates/persist-query/Cargo.toml +++ b/compiler/crates/persist-query/Cargo.toml @@ -1,17 +1,19 @@ # @generated by autocargo from //relay/oss/crates/persist-query:persist-query + [package] name = "persist-query" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] -hyper = { version = "0.14.7", features = ["client", "http1", "http2"] } +hyper = { version = "0.14.26", features = ["client", "http1", "http2", "stream"] } hyper-tls = "0.5" -serde = { version = "1.0.136", features = ["derive", "rc"] } -serde_json = { version = "1.0.79", features = ["float_roundtrip", "unbounded_depth"] } -thiserror = "1.0.36" +serde = { version = "1.0.185", features = ["derive", "rc"] } +serde_json = { version = "1.0.100", features = ["float_roundtrip", "unbounded_depth"] } +thiserror = "1.0.49" url = "2.2.2" [features] diff --git a/compiler/crates/persist-query/src/errors.rs b/compiler/crates/persist-query/src/errors.rs index 59ad560348013..71e4330c8b861 100644 --- a/compiler/crates/persist-query/src/errors.rs +++ b/compiler/crates/persist-query/src/errors.rs @@ -34,4 +34,10 @@ pub enum PersistError { #[from] source: std::io::Error, }, + + #[error("Failed parsing response: {source}. Raw response: {raw_response}")] + DetailedResponseParseError { + source: serde_json::Error, + raw_response: String, + }, } diff --git a/compiler/crates/persist-query/src/lib.rs b/compiler/crates/persist-query/src/lib.rs index 44d442476ec53..2719e6c9923cf 100644 --- a/compiler/crates/persist-query/src/lib.rs +++ b/compiler/crates/persist-query/src/lib.rs @@ -72,7 +72,11 @@ pub async fn persist( let client = Client::builder().build(https); let res = client.request(req).await?; let bytes = hyper::body::to_bytes(res.into_body()).await?; - let result: Response = serde_json::from_slice(&bytes)?; + let result: Response = + serde_json::from_slice(&bytes).map_err(|err| PersistError::DetailedResponseParseError { + source: err, + raw_response: String::from_utf8_lossy(&bytes).to_string(), + })?; match result { Response::Success { id } => Ok(id), diff --git a/compiler/crates/relay-bin/Cargo.toml b/compiler/crates/relay-bin/Cargo.toml index bb2269045accc..29b57f7ce60bb 100644 --- a/compiler/crates/relay-bin/Cargo.toml +++ b/compiler/crates/relay-bin/Cargo.toml @@ -1,13 +1,15 @@ # @generated by autocargo from //relay/oss/crates/relay-bin:relay + [package] name = "relay" -version = "15.0.0" +version = "17.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] -clap = { version = "3.2.23", features = ["derive", "env", "regex", "unicode", "wrap_help"] } +clap = { version = "3.2.25", features = ["derive", "env", "regex", "unicode", "wrap_help"] } common = { path = "../common" } intern = { path = "../intern" } log = { version = "0.4.17", features = ["kv_unstable", "kv_unstable_std"] } @@ -16,5 +18,5 @@ relay-lsp = { path = "../relay-lsp" } schema = { path = "../schema" } schema-documentation = { path = "../schema-documentation" } simplelog = "0.10.0" -thiserror = "1.0.36" -tokio = { version = "1.25.0", features = ["full", "test-util", "tracing"] } +thiserror = "1.0.49" +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/relay-bin/src/main.rs b/compiler/crates/relay-bin/src/main.rs index 7539110c76876..654c89916c9b5 100644 --- a/compiler/crates/relay-bin/src/main.rs +++ b/compiler/crates/relay-bin/src/main.rs @@ -15,10 +15,10 @@ use clap::ArgEnum; use clap::Parser; use common::ConsoleLogger; use intern::string_key::Intern; -use intern::Lookup; use log::error; use log::info; use relay_compiler::build_project::artifact_writer::ArtifactValidationWriter; +use relay_compiler::build_project::generate_extra_artifacts::default_generate_extra_artifacts_fn; use relay_compiler::compiler::Compiler; use relay_compiler::config::Config; use relay_compiler::errors::Error as CompilerError; @@ -26,9 +26,13 @@ use relay_compiler::FileSourceKind; use relay_compiler::LocalPersister; use relay_compiler::OperationPersister; use relay_compiler::PersistConfig; +use relay_compiler::ProjectName; use relay_compiler::RemotePersister; use relay_lsp::start_language_server; use relay_lsp::DummyExtraDataProvider; +use relay_lsp::FieldDefinitionSourceInfo; +use relay_lsp::FieldSchemaInfo; +use relay_lsp::LSPExtraDataProvider; use schema::SDLSchema; use schema_documentation::SchemaDocumentationLoader; use simplelog::ColorChoice; @@ -108,6 +112,11 @@ struct LspCommand { /// Verbosity level #[clap(long, arg_enum, default_value = "quiet-with-errors")] output: OutputKind, + + /// Script to be called to lookup the actual definition of a GraphQL entity for + /// implementation-first GraphQL schemas. + #[clap(long)] + locate_command: Option, } #[derive(clap::Subcommand)] @@ -217,7 +226,7 @@ fn set_project_flag(config: &mut Config, projects: Vec) -> Result<(), Er project_config.enabled = false; } for selected_project in projects { - let selected_project = selected_project.intern(); + let selected_project = ProjectName::from(selected_project.intern()); if let Some(project_config) = config.projects.get_mut(&selected_project) { project_config.enabled = true; @@ -229,7 +238,7 @@ fn set_project_flag(config: &mut Config, projects: Vec) -> Result<(), Er config .projects .keys() - .map(|name| name.lookup()) + .map(|name| name.to_string()) .collect::>() .join(", ") ), @@ -237,7 +246,7 @@ fn set_project_flag(config: &mut Config, projects: Vec) -> Result<(), Er } } - return Ok(()); + Ok(()) } async fn handle_compiler_command(command: CompileCommand) -> Result<(), Error> { @@ -257,7 +266,7 @@ async fn handle_compiler_command(command: CompileCommand) -> Result<(), Error> { set_project_flag(&mut config, command.projects)?; if command.validate { - config.artifact_writer = Box::new(ArtifactValidationWriter::default()); + config.artifact_writer = Box::::default(); } config.create_operation_persister = Some(Box::new(|project_config| { @@ -288,6 +297,8 @@ async fn handle_compiler_command(command: CompileCommand) -> Result<(), Error> { ); } + config.generate_extra_artifacts = Some(Box::new(default_generate_extra_artifacts_fn)); + let compiler = Compiler::new(Arc::new(config), Arc::new(ConsoleLogger)); if command.watch { @@ -306,22 +317,78 @@ async fn handle_compiler_command(command: CompileCommand) -> Result<(), Error> { Ok(()) } +struct ExtraDataProvider { + locate_command: String, +} + +impl ExtraDataProvider { + pub fn new(locate_command: String) -> ExtraDataProvider { + ExtraDataProvider { locate_command } + } +} + +impl LSPExtraDataProvider for ExtraDataProvider { + fn fetch_query_stats(&self, _search_token: &str) -> Vec { + vec![] + } + + fn resolve_field_definition( + &self, + project_name: String, + parent_type: String, + field_info: Option, + ) -> Result, String> { + let entity_name = match field_info { + Some(field_info) => format!("{}.{}", parent_type, field_info.name), + None => parent_type, + }; + let result = Command::new(&self.locate_command) + .arg(project_name) + .arg(entity_name) + .output() + .map_err(|e| format!("Failed to run locate command: {}", e))?; + + let result = String::from_utf8(result.stdout).expect("Failed to parse output"); + + // Parse file_path:line_number:column_number + let result_trimmed = result.trim(); + let result = result_trimmed.split(':').collect::>(); + if result.len() != 3 { + return Err(format!( + "Result '{}' did not match expected format. Please return 'file_path:line_number:column_number'", + result_trimmed + )); + } + let file_path = result[0]; + let line_number = result[1].parse::().unwrap() - 1; + + Ok(Some(FieldDefinitionSourceInfo { + file_path: file_path.to_string(), + line_number, + is_local: true, + })) + } +} + async fn handle_lsp_command(command: LspCommand) -> Result<(), Error> { configure_logger(command.output, TerminalMode::Stderr); let config = get_config(command.config)?; + let extra_data_provider: Box = + match command.locate_command { + Some(locate_command) => Box::new(ExtraDataProvider::new(locate_command)), + None => Box::new(DummyExtraDataProvider::new()), + }; + let perf_logger = Arc::new(ConsoleLogger); - let extra_data_provider = Box::new(DummyExtraDataProvider::new()); let schema_documentation_loader: Option>> = None; - let js_language_server = None; start_language_server( config, perf_logger, extra_data_provider, schema_documentation_loader, - js_language_server, ) .await .map_err(|err| Error::LSPError { diff --git a/compiler/crates/relay-codegen/Cargo.toml b/compiler/crates/relay-codegen/Cargo.toml index 5bd98fd5fd80c..1503607bef15e 100644 --- a/compiler/crates/relay-codegen/Cargo.toml +++ b/compiler/crates/relay-codegen/Cargo.toml @@ -1,9 +1,11 @@ -# @generated by autocargo from //relay/oss/crates/relay-codegen:[aliased_fragments_test,react_flight_codegen_test,relay-codegen,relay-codegen-client-edges,relay-codegen-client-extensions,relay-codegen-client-extensions-abstract-types,relay-codegen-connections,relay-codegen-deduped_json_codegen_test,relay-codegen-defer-stream,relay-codegen-json_codegen_test,relay_actor_change_test,request_metadata_test,required_directive_codegen_test,skip_printing_nulls_test] +# @generated by autocargo from //relay/oss/crates/relay-codegen:[aliased_fragments_test,catch_directive_codegen_test,relay-codegen,relay-codegen-client-edges,relay-codegen-client-extensions,relay-codegen-client-extensions-abstract-types,relay-codegen-connections,relay-codegen-deduped_json_codegen_test,relay-codegen-defer-stream,relay-codegen-json_codegen_test,relay-codegen-throw_on_field_error_directive_codegen_test,relay_actor_change_test,request_metadata_test,required_directive_codegen_test,skip_printing_nulls_test] + [package] name = "relay-codegen" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -34,13 +36,18 @@ path = "tests/defer_stream_test.rs" name = "relay_codegen_json_codegen_test" path = "tests/json_codegen_test.rs" +[[test]] +name = "relay_codegen_throw_on_field_error_directive_codegen_test" +path = "tests/throw_on_field_error_directive_codegen_test.rs" + [dependencies] common = { path = "../common" } +docblock-shared = { path = "../docblock-shared" } fnv = "1.0" graphql-ir = { path = "../graphql-ir" } graphql-syntax = { path = "../graphql-syntax" } hex = "0.4.3" -indexmap = { version = "1.9.2", features = ["arbitrary", "rayon", "serde-1"] } +indexmap = { version = "2.2.6", features = ["arbitrary", "rayon", "serde"] } intern = { path = "../intern" } lazy_static = "1.4" md-5 = "0.10" @@ -53,3 +60,4 @@ path-slash = "0.2.1" fixture-tests = { path = "../fixture-tests" } graphql-test-helpers = { path = "../graphql-test-helpers" } relay-test-schema = { path = "../relay-test-schema" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/relay-codegen/src/ast.rs b/compiler/crates/relay-codegen/src/ast.rs index da3486aacbdb4..f0ec76f38939d 100644 --- a/compiler/crates/relay-codegen/src/ast.rs +++ b/compiler/crates/relay-codegen/src/ast.rs @@ -19,12 +19,11 @@ pub struct ObjectEntry { pub value: Primitive, } -/// A helper for creating Vec +/// A helper for creating `Vec` /// For now, field names are defined in `CODEGEN_CONSTANTS #[macro_export] macro_rules! object { { $ ( $(:$func: expr,)* $key:ident: $value:expr,)* } => ({ - use crate::constants::CODEGEN_CONSTANTS; vec![ $( $( @@ -77,6 +76,18 @@ pub struct JSModuleDependency { pub import_name: ModuleImportName, } +#[derive(Eq, PartialEq, Hash, PartialOrd, Ord, Debug, Clone)] +pub struct ResolverModuleReference { + pub field_type: StringKey, + pub resolver_function_name: ModuleImportName, +} + +#[derive(Eq, PartialEq, Hash, Debug)] +pub enum JSModule { + Reference(ResolverModuleReference), + Dependency(JSModuleDependency), +} + #[derive(Eq, PartialEq, Hash, PartialOrd, Ord, Debug, Clone)] pub enum GraphQLModuleDependency { Name(ExecutableDefinitionName), @@ -99,6 +110,7 @@ pub enum Primitive { RawString(String), GraphQLModuleDependency(GraphQLModuleDependency), JSModuleDependency(JSModuleDependency), + ResolverModuleReference(ResolverModuleReference), // Don't include the value in the output when // skip_printing_nulls is enabled @@ -113,12 +125,6 @@ pub enum Primitive { js_module: JSModuleDependency, injected_field_name_details: Option<(StringKey, bool)>, }, - RelayResolverWeakObjectWrapper { - resolver: Box, - key: StringKey, - plural: bool, - live: bool, - }, } impl Primitive { diff --git a/compiler/crates/relay-codegen/src/build_ast.rs b/compiler/crates/relay-codegen/src/build_ast.rs index ce1e4e43876c9..93d034684426e 100644 --- a/compiler/crates/relay-codegen/src/build_ast.rs +++ b/compiler/crates/relay-codegen/src/build_ast.rs @@ -5,8 +5,13 @@ * LICENSE file in the root directory of this source tree. */ +use std::path::PathBuf; + +use common::DirectiveName; use common::NamedItem; +use common::ObjectName; use common::WithLocation; +use docblock_shared::RELAY_RESOLVER_MODEL_INSTANCE_FIELD; use graphql_ir::Argument; use graphql_ir::Condition; use graphql_ir::ConditionValue; @@ -14,6 +19,7 @@ use graphql_ir::ConstantValue; use graphql_ir::Directive; use graphql_ir::ExecutableDefinitionName; use graphql_ir::FragmentDefinition; +use graphql_ir::FragmentDefinitionName; use graphql_ir::FragmentSpread; use graphql_ir::InlineFragment; use graphql_ir::LinkedField; @@ -28,6 +34,7 @@ use graphql_syntax::OperationKind; use intern::string_key::Intern; use intern::string_key::StringKey; use intern::Lookup; +use lazy_static::lazy_static; use md5::Digest; use md5::Md5; use relay_config::JsModuleFormat; @@ -37,9 +44,16 @@ use relay_transforms::extract_handle_field_directives; use relay_transforms::extract_values_from_handle_field_directive; use relay_transforms::generate_abstract_type_refinement_key; use relay_transforms::get_fragment_filename; +use relay_transforms::get_normalization_operation_name; +use relay_transforms::get_resolver_fragment_dependency_name; +use relay_transforms::relay_resolvers::get_resolver_info; +use relay_transforms::relay_resolvers::resolver_import_alias; +use relay_transforms::relay_resolvers::ResolverInfo; use relay_transforms::remove_directive; +use relay_transforms::CatchMetadataDirective; use relay_transforms::ClientEdgeMetadata; use relay_transforms::ClientEdgeMetadataDirective; +use relay_transforms::ClientEdgeModelResolver; use relay_transforms::ClientExtensionAbstractTypeMetadataDirective; use relay_transforms::ConnectionConstants; use relay_transforms::ConnectionMetadata; @@ -56,17 +70,16 @@ use relay_transforms::RequiredMetadataDirective; use relay_transforms::ResolverOutputTypeInfo; use relay_transforms::StreamDirective; use relay_transforms::CLIENT_EXTENSION_DIRECTIVE_NAME; -use relay_transforms::DEFER_STREAM_CONSTANTS; use relay_transforms::DIRECTIVE_SPLIT_OPERATION; use relay_transforms::INLINE_DIRECTIVE_NAME; use relay_transforms::INTERNAL_METADATA_DIRECTIVE; -use relay_transforms::REACT_FLIGHT_SCALAR_FLIGHT_FIELD_METADATA_KEY; use relay_transforms::RELAY_ACTOR_CHANGE_DIRECTIVE_FOR_CODEGEN; -use relay_transforms::RELAY_CLIENT_COMPONENT_MODULE_ID_ARGUMENT_NAME; -use relay_transforms::RELAY_CLIENT_COMPONENT_SERVER_DIRECTIVE_NAME; +use relay_transforms::RESOLVER_BELONGS_TO_BASE_SCHEMA_DIRECTIVE; use relay_transforms::TYPE_DISCRIMINATOR_DIRECTIVE_NAME; +use schema::Field; use schema::SDLSchema; use schema::Schema; +use schema::Type; use crate::ast::Ast; use crate::ast::AstBuilder; @@ -78,16 +91,20 @@ use crate::ast::ObjectEntry; use crate::ast::Primitive; use crate::ast::QueryID; use crate::ast::RequestParameters; +use crate::ast::ResolverModuleReference; use crate::constants::CODEGEN_CONSTANTS; use crate::object; -use crate::top_level_statements::TopLevelStatements; + +lazy_static! { + pub static ref THROW_ON_FIELD_ERROR_DIRECTIVE_NAME: DirectiveName = + DirectiveName("throwOnFieldError".intern()); +} pub fn build_request_params_ast_key( schema: &SDLSchema, request_parameters: RequestParameters<'_>, ast_builder: &mut AstBuilder, operation: &OperationDefinition, - top_level_statements: &TopLevelStatements, definition_source_location: WithLocation, project_config: &ProjectConfig, ) -> AstKey { @@ -98,7 +115,7 @@ pub fn build_request_params_ast_key( project_config, definition_source_location, ); - operation_builder.build_request_parameters(operation, request_parameters, top_level_statements) + operation_builder.build_request_parameters(operation, request_parameters) } pub fn build_provided_variables( @@ -135,7 +152,7 @@ pub fn build_request( project_config, definition_source_location, ); - let operation = Primitive::Key(operation_builder.build_operation(operation)); + let operation_primitive = Primitive::Key(operation_builder.build_operation(operation)); let mut fragment_builder = CodegenBuilder::new( schema, CodegenVariant::Reader, @@ -148,7 +165,17 @@ pub fn build_request( ast_builder.intern(Ast::Object(object! { fragment: fragment, kind: Primitive::String(CODEGEN_CONSTANTS.request), - operation: operation, + operation: operation_primitive, + params: Primitive::Key(request_parameters), + })) +} + +pub fn build_preloadable_request( + ast_builder: &mut AstBuilder, + request_parameters: AstKey, +) -> AstKey { + ast_builder.intern(Ast::Object(object! { + kind: Primitive::String(CODEGEN_CONSTANTS.preloadable_concrete_request), params: Primitive::Key(request_parameters), })) } @@ -196,6 +223,101 @@ pub fn build_fragment( builder.build_fragment(fragment, false) } +pub fn build_resolvers_schema( + ast_builder: &mut AstBuilder, + schema: &SDLSchema, + project_config: &ProjectConfig, +) -> AstKey { + let artifact_path = &project_config + .resolvers_schema_module + .as_ref() + .unwrap() + .path; + + let mut map = vec![]; + for object in schema.get_objects() { + let mut fields = vec![]; + for field in object.fields.iter().map(|field_id| schema.field(*field_id)) { + if let Some(Ok(ResolverInfo { + import_path, + import_name: Some(import_name), + .. + })) = get_resolver_info(schema, field, field.name.location) + { + if field + .directives + .named(*RESOLVER_BELONGS_TO_BASE_SCHEMA_DIRECTIVE) + .is_some() + { + continue; + } + fields.push(ObjectEntry { + key: field.name.item, + value: Primitive::Key(build_resolver_info( + ast_builder, + project_config, + artifact_path, + field, + import_path, + ModuleImportName::Named { + name: import_name, + import_as: Some(resolver_import_alias( + object.name.item.0, + field.name.item, + )), + }, + )), + }); + } + } + if !fields.is_empty() { + fields.sort_by_key(|field| field.key); + map.push(ObjectEntry { + key: object.name.item.0, + value: Primitive::Key(ast_builder.intern(Ast::Object(fields))), + }) + } + } + map.sort_by_key(|field| field.key); + + ast_builder.intern(Ast::Object(map)) +} + +fn build_resolver_info( + ast_builder: &mut AstBuilder, + project_config: &ProjectConfig, + artifact_path: &PathBuf, + field: &Field, + import_path: StringKey, + import_name: ModuleImportName, +) -> AstKey { + ast_builder.intern(Ast::Object(object! { + resolver_function: Primitive::JSModuleDependency(JSModuleDependency { + path: project_config.js_module_import_identifier( + artifact_path, + &PathBuf::from(import_path.lookup()), + ), + import_name, + }), + root_fragment: match get_resolver_fragment_dependency_name(field) { + Some(name) => { + let definition_name = WithLocation::new( + field.name.location, + get_normalization_operation_name(name.0).intern(), + ); + Primitive::JSModuleDependency(JSModuleDependency { + path: project_config.js_module_import_identifier( + artifact_path, + &project_config.artifact_path_for_definition(definition_name), + ), + import_name: ModuleImportName::Default(definition_name.item), + }) + } + None => Primitive::SkippableNull, + }, + })) +} + pub struct CodegenBuilder<'schema, 'builder, 'config> { connection_constants: ConnectionConstants, schema: &'schema SDLSchema, @@ -317,7 +439,7 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { pub(crate) fn build_fragment( &mut self, fragment: &FragmentDefinition, - skip_metadata: bool, + skip_connection_metadata: bool, ) -> AstKey { let mut context = ContextualMetadata::default(); if fragment.directives.named(*INLINE_DIRECTIVE_NAME).is_some() { @@ -330,11 +452,7 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { &fragment.variable_definitions, &fragment.used_global_variables), kind: Primitive::String(CODEGEN_CONSTANTS.fragment_value), - metadata: if skip_metadata && !context.has_client_edges { - Primitive::SkippableNull - } else { - self.build_fragment_metadata(context, fragment) - }, + metadata: self.build_fragment_metadata(context, fragment, skip_connection_metadata), name: Primitive::String(fragment.name.item.0), selections: selections, type_: Primitive::String(self.schema.get_type_name(fragment.type_condition)), @@ -355,6 +473,7 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { // NOTE: an owned value here ensures that the caller must construct the context prior to building the metadata object context: ContextualMetadata, fragment: &FragmentDefinition, + skip_connection_metadata: bool, ) -> Primitive { let connection_metadata = extract_connection_metadata_from_directive(&fragment.directives); @@ -366,8 +485,10 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { }; let mut metadata = vec![]; - if let Some(connection_metadata) = &connection_metadata { - metadata.push(self.build_connection_metadata(connection_metadata)) + if !skip_connection_metadata { + if let Some(connection_metadata) = &connection_metadata { + metadata.push(self.build_connection_metadata(connection_metadata)) + } } if unmask { metadata.push(ObjectEntry { @@ -387,6 +508,16 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { value: Primitive::Bool(true), }) } + if fragment + .directives + .named(*THROW_ON_FIELD_ERROR_DIRECTIVE_NAME) + .is_some() + { + metadata.push(ObjectEntry { + key: CODEGEN_CONSTANTS.throw_on_field_error, + value: Primitive::Bool(true), + }) + } if let Some(refetch_metadata) = RefetchableMetadata::find(&fragment.directives) { let refetch_connection = if let Some(connection_metadata) = connection_metadata { let metadata = &connection_metadata[0]; // Validated in `transform_refetchable` @@ -438,10 +569,13 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { ), operation: Primitive::GraphQLModuleDependency(GraphQLModuleDependency::Name(refetch_metadata.operation_name.into())), }; - if let Some(identifier_field) = refetch_metadata.identifier_field { + if let Some(identifier_info) = &refetch_metadata.identifier_info { refetch_object.push(ObjectEntry { - key: CODEGEN_CONSTANTS.identifier_field, - value: Primitive::String(identifier_field), + key: CODEGEN_CONSTANTS.identifier_info, + value: Primitive::Key(self.object(object! { + identifier_field: Primitive::String(identifier_info.identifier_field), + identifier_query_variable_name: Primitive::String(identifier_info.identifier_query_variable_name), + })), }); } @@ -486,7 +620,12 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { }; if metadata.is_stream_connection { object.push(ObjectEntry { - key: DEFER_STREAM_CONSTANTS.stream_name.0, + key: self + .project_config + .schema_config + .defer_stream_interface + .stream_name + .0, value: Primitive::Bool(true), }) } @@ -532,9 +671,12 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { vec![self.build_fragment_spread(frag_spread)] } Selection::InlineFragment(inline_fragment) => { - let defer = inline_fragment - .directives - .named(DEFER_STREAM_CONSTANTS.defer_name); + let defer = inline_fragment.directives.named( + self.project_config + .schema_config + .defer_stream_interface + .defer_name, + ); if let Some(defer) = defer { vec![self.build_defer(context, inline_fragment, defer)] } else if let Some(inline_data_directive) = @@ -581,7 +723,12 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { } } Selection::LinkedField(field) => { - let stream = field.directives.named(DEFER_STREAM_CONSTANTS.stream_name); + let stream = field.directives.named( + self.project_config + .schema_config + .defer_stream_interface + .stream_name, + ); match stream { Some(stream) => vec![self.build_stream(context, field, stream)], @@ -625,6 +772,8 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { }; if let Some(required_metadata) = RequiredMetadataDirective::find(&field.directives) { self.build_required_field(required_metadata, resolver_primitive) + } else if let Some(catch_metadata) = CatchMetadataDirective::find(&field.directives) { + self.build_catch_field(catch_metadata, resolver_primitive) } else { resolver_primitive } @@ -638,6 +787,32 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { &mut self, resolver_metadata: &RelayResolverMetadata, inline_fragment: Option, + ) -> Primitive { + if self + .project_config + .resolvers_schema_module + .as_ref() + .is_some_and(|config| config.apply_to_normalization_ast) + { + self.build_normalization_relay_resolver_execution_time_for_worker(resolver_metadata) + } else if self + .project_config + .feature_flags + .enable_resolver_normalization_ast + { + self.build_normalization_relay_resolver_execution_time(resolver_metadata) + } else { + self.build_normalization_relay_resolver_read_time(resolver_metadata, inline_fragment) + } + } + + // For read time execution time Relay Resolvers in the normalization AST, + // we do not need to include resolver modules since those modules will be + // evaluated at read time. + fn build_normalization_relay_resolver_read_time( + &mut self, + resolver_metadata: &RelayResolverMetadata, + inline_fragment: Option, ) -> Primitive { let field_name = resolver_metadata.field_name(self.schema); let field_arguments = &resolver_metadata.field_arguments; @@ -670,8 +845,132 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { })) } + // For execution time Relay Resolvers in the normalization AST, we need to + // also include enough information for resolver function backing each field, + // so that normalization AST have full information on how to resolve client + // edges and fields. That means we need to include the resolver module. Note + // that we don't support inline fragment as we did for read time resolvers + fn build_normalization_relay_resolver_execution_time( + &mut self, + resolver_metadata: &RelayResolverMetadata, + ) -> Primitive { + let field_name = resolver_metadata.field_name(self.schema); + let field_arguments = &resolver_metadata.field_arguments; + let args = self.build_arguments(field_arguments); + let is_output_type = resolver_metadata + .output_type_info + .normalization_ast_should_have_is_output_type_true(); + + let variable_name = resolver_metadata.generate_local_resolver_name(self.schema); + let artifact_path = &self + .project_config + .artifact_path_for_definition(self.definition_source_location); + let kind = if resolver_metadata.live { + CODEGEN_CONSTANTS.relay_live_resolver + } else { + CODEGEN_CONSTANTS.relay_resolver + }; + let resolver_info = build_resolver_info( + self.ast_builder, + self.project_config, + artifact_path, + self.schema.field(resolver_metadata.field_id), + resolver_metadata.import_path, + match resolver_metadata.import_name { + Some(name) => ModuleImportName::Named { + name, + import_as: Some(variable_name), + }, + None => ModuleImportName::Default(variable_name), + }, + ); + + Primitive::Key(self.object(object! { + name: Primitive::String(field_name), + args: match args { + None => Primitive::SkippableNull, + Some(key) => Primitive::Key(key), + }, + kind: Primitive::String(kind), + storage_key: match args { + None => Primitive::SkippableNull, + Some(key) => { + if is_static_storage_key_available(&resolver_metadata.field_arguments) { + Primitive::StorageKey(field_name, key) + } else { + Primitive::SkippableNull + } + } + }, + is_output_type: Primitive::Bool(is_output_type), + resolver_info: Primitive::Key(resolver_info), + })) + } + + fn build_normalization_relay_resolver_execution_time_for_worker( + &mut self, + resolver_metadata: &RelayResolverMetadata, + ) -> Primitive { + let field_name = resolver_metadata.field_name(self.schema); + let field_arguments = &resolver_metadata.field_arguments; + let args = self.build_arguments(field_arguments); + let is_output_type = resolver_metadata + .output_type_info + .normalization_ast_should_have_is_output_type_true(); + + let field_type = match resolver_metadata.field(self.schema).parent_type.unwrap() { + Type::Interface(interface_id) => self.schema.interface(interface_id).name.item.0, + Type::Object(object_id) => self.schema.object(object_id).name.item.0, + _ => panic!("Unexpected parent type for resolver."), + }; + + let variable_name = resolver_metadata.generate_local_resolver_name(self.schema); + let kind = if resolver_metadata.live { + CODEGEN_CONSTANTS.relay_live_resolver + } else { + CODEGEN_CONSTANTS.relay_resolver + }; + Primitive::Key(self.object(object! { + name: Primitive::String(field_name), + args: match args { + None => Primitive::SkippableNull, + Some(key) => Primitive::Key(key), + }, + kind: Primitive::String(kind), + storage_key: match args { + None => Primitive::SkippableNull, + Some(key) => { + if is_static_storage_key_available(&resolver_metadata.field_arguments) { + Primitive::StorageKey(field_name, key) + } else { + Primitive::SkippableNull + } + } + }, + is_output_type: Primitive::Bool(is_output_type), + resolver_reference: Primitive::ResolverModuleReference(ResolverModuleReference { + field_type, + resolver_function_name: match resolver_metadata.import_name { + Some(name) => ModuleImportName::Named { + name, + import_as: Some(variable_name), + }, + None => ModuleImportName::Default(variable_name), + }, + }), + })) + } + fn build_scalar_field_and_handles(&mut self, field: &ScalarField) -> Vec { if let Some(resolver_metadata) = RelayResolverMetadata::find(&field.directives) { + if self.variant == CodegenVariant::Reader + && self + .project_config + .feature_flags + .disable_resolver_reader_ast + { + return vec![self.build_scalar_field(field)]; + } return vec![self.build_scalar_backed_resolver_field(field, resolver_metadata)]; } match self.variant { @@ -697,25 +996,31 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { })) } + fn build_catch_field( + &mut self, + catch_metadata: &CatchMetadataDirective, + primitive: Primitive, + ) -> Primitive { + Primitive::Key(self.object(object! { + kind: Primitive::String(CODEGEN_CONSTANTS.catch_field), + field: primitive, + to: Primitive::String(catch_metadata.to.into()), + path: Primitive::String(catch_metadata.path), + })) + } + fn build_scalar_field(&mut self, field: &ScalarField) -> Primitive { let schema_field = self.schema.field(field.definition.item); let (name, alias) = self.build_field_name_and_alias(schema_field.name.item, field.alias, &field.directives); let args = self.build_arguments(&field.arguments); - let kind = match field - .directives - .named(*REACT_FLIGHT_SCALAR_FLIGHT_FIELD_METADATA_KEY) - { - Some(_flight_directive) => Primitive::String(CODEGEN_CONSTANTS.flight_field), - None => Primitive::String(CODEGEN_CONSTANTS.scalar_field), - }; let primitive = Primitive::Key(self.object(object! { :build_alias(alias, name), args: match args { None => Primitive::SkippableNull, Some(key) => Primitive::Key(key), }, - kind: kind, + kind: Primitive::String(CODEGEN_CONSTANTS.scalar_field), name: Primitive::String(name), storage_key: match args { None => Primitive::SkippableNull, @@ -731,6 +1036,8 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { if let Some(required_metadata) = RequiredMetadataDirective::find(&field.directives) { self.build_required_field(required_metadata, primitive) + } else if let Some(catch_metadata) = CatchMetadataDirective::find(&field.directives) { + self.build_catch_field(catch_metadata, primitive) } else { primitive } @@ -829,6 +1136,8 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { if let Some(required_metadata) = RequiredMetadataDirective::find(&field.directives) { self.build_required_field(required_metadata, primitive) + } else if let Some(catch_metadata) = CatchMetadataDirective::find(&field.directives) { + self.build_catch_field(catch_metadata, primitive) } else { primitive } @@ -916,27 +1225,19 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { { let fragment_source_location_key = no_inline_metadata.location; - let path_for_artifact = self.project_config.create_path_for_artifact( - fragment_source_location_key, - frag_spread.fragment.item.0.lookup().to_string(), - ); - - let normalization_import_path = self.project_config.js_module_import_path( - self.definition_source_location, - path_for_artifact.to_str().unwrap().intern(), + let normalization_import_path = self.project_config.js_module_import_identifier( + &self + .project_config + .artifact_path_for_definition(self.definition_source_location), + &self.project_config.create_path_for_artifact( + fragment_source_location_key, + frag_spread.fragment.item.0.lookup().to_string(), + ), ); return self .build_normalization_fragment_spread(frag_spread, normalization_import_path); } - if self.variant == CodegenVariant::Normalization - && frag_spread - .directives - .named(*RELAY_CLIENT_COMPONENT_SERVER_DIRECTIVE_NAME) - .is_some() - { - return self.build_relay_client_component_fragment_spread(frag_spread); - } let args = self.build_arguments(&frag_spread.arguments); let primitive = Primitive::Key(self.object(object! { args: match args { @@ -964,12 +1265,34 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { t, )) }), - })) + })) } else if let Some(resolver_metadata) = RelayResolverMetadata::find(&frag_spread.directives) { let resolver_primitive = match self.variant { CodegenVariant::Reader => { - self.build_reader_relay_resolver(resolver_metadata, Some(primitive)) + if self + .project_config + .feature_flags + .disable_resolver_reader_ast + { + let scalar_field = ScalarField { + alias: resolver_metadata.field_alias.map(WithLocation::generated), + definition: WithLocation::generated(resolver_metadata.field_id), + arguments: resolver_metadata.field_arguments.clone(), + directives: frag_spread + .directives + .iter() + .filter(|directive| { + directive.name.item + != RequiredMetadataDirective::directive_name() + }) + .cloned() + .collect(), + }; + self.build_scalar_field(&scalar_field) + } else { + self.build_reader_relay_resolver(resolver_metadata, Some(primitive)) + } } // We expect all RelayResolver fragment spreads to be inlined into inline fragment spreads when generating Normalization ASTs. CodegenVariant::Normalization => panic!( @@ -981,6 +1304,10 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { RequiredMetadataDirective::find(&frag_spread.directives) { self.build_required_field(required_metadata, resolver_primitive) + } else if let Some(catch_metadata) = + CatchMetadataDirective::find(&frag_spread.directives) + { + self.build_catch_field(catch_metadata, resolver_primitive) } else { resolver_primitive } @@ -989,14 +1316,102 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { } } + fn build_client_edge_model_resolvers( + &mut self, + model_resolvers: &[ClientEdgeModelResolver], + relay_resolver_metadata: &RelayResolverMetadata, + ) -> Vec { + model_resolvers + .iter() + .map(|model_resolver| { + let type_name = model_resolver.type_name.item.0; + ObjectEntry { + key: type_name, + value: self.build_client_edge_model_resolver( + model_resolver.type_name, + model_resolver.is_live, + relay_resolver_metadata, + ), + } + }) + .collect() + } + + fn build_client_edge_model_resolver( + &mut self, + type_name: WithLocation, + is_live: bool, + relay_resolver_metadata: &RelayResolverMetadata, + ) -> Primitive { + let id_fragment_artifact_name = self + .project_config + .name + .generate_name_for_object_and_field(type_name.item.0, CODEGEN_CONSTANTS.id); + let path = format!( + "{}.{}", + relay_resolver_metadata.field_path, *RELAY_RESOLVER_MODEL_INSTANCE_FIELD + ) + .intern(); + let model_resolver_metadata = RelayResolverMetadata { + field_id: relay_resolver_metadata.field_id, + import_path: type_name.location.source_location().path().intern(), + import_name: Some(type_name.item.0), + field_alias: None, + field_path: path, + field_arguments: vec![], // The model resolver field does not take GraphQL arguments. + live: is_live, + output_type_info: relay_resolver_metadata.output_type_info.clone(), + fragment_data_injection_mode: Some(( + WithLocation::new( + type_name.location, + FragmentDefinitionName(id_fragment_artifact_name.clone().intern()), + ), + FragmentDataInjectionMode::Field { + name: CODEGEN_CONSTANTS.id, + is_required: true, + }, + )), + }; + let fragment_primitive = Primitive::Key(self.object(object! { + args: Primitive::SkippableNull, + kind: Primitive::String(CODEGEN_CONSTANTS.fragment_spread), + name: Primitive::String(id_fragment_artifact_name.clone().intern()), + })); + self.build_reader_relay_resolver(&model_resolver_metadata, Some(fragment_primitive)) + } + + fn build_reader_relay_resolver_args( + &mut self, + relay_resolver_metadata: &RelayResolverMetadata, + ) -> Primitive { + let field = relay_resolver_metadata.field(self.schema); + // Check field.arguments here instead of relay_resolver_metadata.field_arguments. field.arguments is partitioned into + // field_arguments and fragment_arguments during the relay resolvers transform. If the resolver field is only passed a + // fragment argument, we should fall back to the else case where we will return an empty array primitive as the resolver + // field arguments instead of returning null. + if field.arguments.is_empty() { + Primitive::SkippableNull + } else { + self.build_arguments(&relay_resolver_metadata.field_arguments) + .map_or_else( + || { + // Passing an empty array here, rather than `null`, allows the runtime + // to know that it should still create an arguments object to pass to + // the resolver, even though no arguments were provided at the callsite, + // since all arguments are optional. + Primitive::Key(self.array(vec![])) + }, + Primitive::Key, + ) + } + } + fn build_reader_relay_resolver( &mut self, relay_resolver_metadata: &RelayResolverMetadata, fragment_primitive: Option, ) -> Primitive { - let module = relay_resolver_metadata.import_path; let field = relay_resolver_metadata.field(self.schema); - let field_arguments = &relay_resolver_metadata.field_arguments; let field_alias = relay_resolver_metadata.field_alias; let field_name = field.name.item; let path = relay_resolver_metadata.field_path; @@ -1007,11 +1422,14 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { CODEGEN_CONSTANTS.relay_resolver }; - let import_path = self - .project_config - .js_module_import_path(self.definition_source_location, module); + let import_path = self.project_config.js_module_import_identifier( + &self + .project_config + .artifact_path_for_definition(self.definition_source_location), + &PathBuf::from(relay_resolver_metadata.import_path.lookup()), + ); - let args = self.build_arguments(field_arguments); + let args = self.build_reader_relay_resolver_args(relay_resolver_metadata); let variable_name = relay_resolver_metadata.generate_local_resolver_name(self.schema); let resolver_js_module = JSModuleDependency { @@ -1028,14 +1446,14 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { let resolver_module = if let Some((fragment_name, injection_mode)) = relay_resolver_metadata.fragment_data_injection_mode { - let path_for_artifact = self.project_config.create_path_for_artifact( - fragment_name.location.source_location(), - fragment_name.item.to_string(), - ); - - let fragment_import_path = self.project_config.js_module_import_path( - self.definition_source_location, - path_for_artifact.to_str().unwrap().intern(), + let fragment_import_path = self.project_config.js_module_import_identifier( + &self + .project_config + .artifact_path_for_definition(self.definition_source_location), + &self.project_config.create_path_for_artifact( + fragment_name.location.source_location(), + fragment_name.item.to_string(), + ), ); Primitive::RelayResolverModel { @@ -1052,36 +1470,13 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { Primitive::JSModuleDependency(resolver_js_module) }; - let resolver_module = if let Some((field_id, plural)) = - match &relay_resolver_metadata.output_type_info { - ResolverOutputTypeInfo::ScalarField => None, - ResolverOutputTypeInfo::Composite(info) => info - .weak_object_instance_field - .map(|field_name| (field_name, info.plural)), - ResolverOutputTypeInfo::EdgeTo => None, - ResolverOutputTypeInfo::Legacy => None, - } { - let key = self.schema.field(field_id).name.item; - Primitive::RelayResolverWeakObjectWrapper { - resolver: Box::new(resolver_module), - key, - plural, - live: relay_resolver_metadata.live, - } - } else { - resolver_module - }; - // For Relay Resolvers in the Reader AST, we need enough // information to _read_ the resolver. Specifically, enough data // to construct a fragment key, and an import of the resolver // module itself. let mut object_props = object! { :build_alias(field_alias, field_name), - args: match args { - None => Primitive::SkippableNull, - Some(key) => Primitive::Key(key), - }, + args: args, fragment: match fragment_primitive { None => Primitive::SkippableNull, Some(fragment_primitive) => fragment_primitive, @@ -1100,14 +1495,14 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { .location .source_location(); - let path_for_artifact = self.project_config.create_path_for_artifact( - normalization_artifact_source_location, - normalization_info.normalization_operation.item.to_string(), - ); - - let normalization_import_path = self.project_config.js_module_import_path( - self.definition_source_location, - path_for_artifact.to_str().unwrap().intern(), + let normalization_import_path = self.project_config.js_module_import_identifier( + &self + .project_config + .artifact_path_for_definition(self.definition_source_location), + &self.project_config.create_path_for_artifact( + normalization_artifact_source_location, + normalization_info.normalization_operation.item.to_string(), + ), ); let concrete_type = if normalization_info.inner_type.is_abstract_type() { Primitive::Null @@ -1115,13 +1510,22 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { Primitive::String(self.schema.get_type_name(normalization_info.inner_type)) }; - let normalization_info = object! { - concrete_type: concrete_type, - plural: Primitive::Bool(normalization_info.plural), - normalization_node: Primitive::GraphQLModuleDependency(GraphQLModuleDependency::Path { - path: normalization_import_path, - name: normalization_info.normalization_operation.item.into() - }), + let normalization_info = if normalization_info.weak_object_instance_field.is_some() { + object! { + kind: Primitive::String(CODEGEN_CONSTANTS.weak_model), + concrete_type: concrete_type, + plural: Primitive::Bool(normalization_info.plural), + } + } else { + object! { + kind: Primitive::String(CODEGEN_CONSTANTS.output_type), + concrete_type: concrete_type, + plural: Primitive::Bool(normalization_info.plural), + normalization_node: Primitive::GraphQLModuleDependency(GraphQLModuleDependency::Path { + path: normalization_import_path, + name: normalization_info.normalization_operation.item.into(), + }), + } }; object_props.push(ObjectEntry { @@ -1150,42 +1554,11 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { name: frag_spread.fragment.item.into(), }), kind: Primitive::String( - if frag_spread - .directives - .named(*RELAY_CLIENT_COMPONENT_SERVER_DIRECTIVE_NAME) - .is_some() - { - CODEGEN_CONSTANTS.client_component - } else { CODEGEN_CONSTANTS.fragment_spread - }, ), })) } - fn build_relay_client_component_fragment_spread( - &mut self, - frag_spread: &FragmentSpread, - ) -> Primitive { - let normalization_name = frag_spread - .directives - .named(*RELAY_CLIENT_COMPONENT_SERVER_DIRECTIVE_NAME) - .unwrap() - .arguments - .named(*RELAY_CLIENT_COMPONENT_MODULE_ID_ARGUMENT_NAME) - .unwrap() - .value - .item - .expect_string_literal() - .to_string() - .trim_end_matches(".graphql") - .intern(); - Primitive::Key(self.object(object! { - fragment: Primitive::GraphQLModuleDependency(GraphQLModuleDependency::Name(ExecutableDefinitionName::OperationDefinitionName(OperationDefinitionName(normalization_name)))), - kind: Primitive::String(CODEGEN_CONSTANTS.client_component), - })) - } - fn build_defer( &mut self, context: &mut ContextualMetadata, @@ -1226,7 +1599,10 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { defer: &Directive, ) -> Primitive { let next_selections = self.build_selections(context, inline_fragment.selections.iter()); - let DeferDirective { if_arg, label_arg } = DeferDirective::from(defer); + let DeferDirective { if_arg, label_arg } = DeferDirective::from( + defer, + &self.project_config.schema_config.defer_stream_interface, + ); let if_variable_name = if_arg.and_then(|arg| match &arg.value.item { // `true` is the default, remove as the AST is typed just as a variable name string // `false` constant values should've been transformed away in skip_unreachable_node @@ -1255,7 +1631,10 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { &LinkedField { directives: remove_directive( &linked_field.directives, - DEFER_STREAM_CONSTANTS.stream_name, + self.project_config + .schema_config + .defer_stream_interface + .stream_name, ), ..linked_field.to_owned() }, @@ -1272,7 +1651,10 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { label_arg, use_customized_batch_arg: _, initial_count_arg: _, - } = StreamDirective::from(stream); + } = StreamDirective::from( + stream, + &self.project_config.schema_config.defer_stream_interface, + ); let if_variable_name = if_arg.and_then(|arg| match &arg.value.item { // `true` is the default, remove as the AST is typed just as a variable name string // `false` constant values should've been transformed away in skip_unreachable_node @@ -1282,15 +1664,38 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { }); let label_name = label_arg.unwrap().value.item.expect_string_literal(); self.object(object! { - if_: Primitive::string_or_null(if_variable_name.map(|variable_name| variable_name.0)), - kind: Primitive::String(CODEGEN_CONSTANTS.stream), - label: Primitive::String(label_name), - selections: next_selections, - }) + if_: Primitive::string_or_null(if_variable_name.map(|variable_name| variable_name.0)), + kind: Primitive::String(CODEGEN_CONSTANTS.stream), + label: Primitive::String(label_name), + selections: next_selections, + }) } }) } + fn build_client_edge_with_enabled_resolver_normalization_ast( + &mut self, + context: &mut ContextualMetadata, + client_edge_metadata: ClientEdgeMetadata<'_>, + ) -> Primitive { + let backing_field_primitives = + self.build_selections_from_selection(context, &client_edge_metadata.backing_field); + + if backing_field_primitives.len() != 1 { + panic!( + "Expected client edge backing field to be transformed into exactly one primitive." + ) + } + let backing_field = backing_field_primitives.into_iter().next().unwrap(); + + let selections_item = self.build_linked_field(context, client_edge_metadata.linked_field); + Primitive::Key(self.object(object! { + kind: Primitive::String(CODEGEN_CONSTANTS.client_edge_to_client_object), + client_edge_backing_field_key: backing_field, + client_edge_selections_key: selections_item, + })) + } + fn build_normalization_client_edge( &mut self, context: &mut ContextualMetadata, @@ -1340,8 +1745,9 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { fn build_reader_client_edge( &mut self, context: &mut ContextualMetadata, - client_edge_metadata: ClientEdgeMetadata<'_>, + client_edge_metadata: &ClientEdgeMetadata<'_>, required_metadata: Option, + catch_metadata: Option, ) -> Primitive { context.has_client_edges = true; let backing_field = match &client_edge_metadata.backing_field { @@ -1386,31 +1792,66 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { ) }; - let field = match client_edge_metadata.metadata_directive { - ClientEdgeMetadataDirective::ServerObject { query_name, .. } => { - Primitive::Key(self.object(object! { - kind: Primitive::String(CODEGEN_CONSTANTS.client_edge_to_server_object), - operation: Primitive::GraphQLModuleDependency(GraphQLModuleDependency::Name(query_name.into())), - client_edge_backing_field_key: backing_field, - client_edge_selections_key: selections_item, - })) - } - ClientEdgeMetadataDirective::ClientObject { type_name, .. } => { - let concrete_type = match type_name { - Some(type_name) => Primitive::String(type_name.0), - None => Primitive::Null, - }; - Primitive::Key(self.object(object! { - kind: Primitive::String(CODEGEN_CONSTANTS.client_edge_to_client_object), - concrete_type: concrete_type, - client_edge_backing_field_key: backing_field, - client_edge_selections_key: selections_item, - })) - } - }; + let field = match &client_edge_metadata.metadata_directive { + ClientEdgeMetadataDirective::ServerObject { query_name, .. } => { + Primitive::Key(self.object(object! { + kind: Primitive::String(CODEGEN_CONSTANTS.client_edge_to_server_object), + operation: Primitive::GraphQLModuleDependency(GraphQLModuleDependency::Name(ExecutableDefinitionName::OperationDefinitionName(OperationDefinitionName(query_name.0)))), + client_edge_backing_field_key: backing_field, + client_edge_selections_key: selections_item, + })) + } + + ClientEdgeMetadataDirective::ClientObject { type_name, model_resolvers, .. } => { + if self.project_config.feature_flags.disable_resolver_reader_ast { + selections_item + } else { + let concrete_type = type_name.map_or(Primitive::Null, |type_name| Primitive::String(type_name.0)); + let field_directives = match &client_edge_metadata.backing_field { + Selection::ScalarField(field) => Some(&field.directives), + Selection::FragmentSpread(frag_spread) => Some(&frag_spread.directives), + _ => panic!( + "Expected Client Edge backing field to be a Relay Resolver. {:?}", + client_edge_metadata.backing_field + ), + }; + let model_resolver_field = field_directives.and_then(|field_directives| { + let resolver_metadata = RelayResolverMetadata::find(field_directives).unwrap(); + let is_weak_resolver = matches!(resolver_metadata.output_type_info, ResolverOutputTypeInfo::Composite(_)); + if !is_weak_resolver { + let model_resolver_primitives = self.build_client_edge_model_resolvers( + model_resolvers, + resolver_metadata, + ); + if model_resolver_primitives.is_empty() { + None + } else { + Some(self.object(model_resolver_primitives)) + } + } else { + None + } + }); + let client_edge_model_resolvers = if let Some(model_resolver_field) = model_resolver_field { + Primitive::Key(model_resolver_field) + } else { + Primitive::Null + }; + Primitive::Key(self.object(object! { + kind: Primitive::String(CODEGEN_CONSTANTS.client_edge_to_client_object), + concrete_type: concrete_type, + client_edge_model_resolvers: client_edge_model_resolvers, + client_edge_backing_field_key: backing_field, + client_edge_selections_key: selections_item, + })) + } + } + }; if let Some(required_metadata) = required_metadata { self.build_required_field(&required_metadata, field) + } else if let Some(catch_metadata) = catch_metadata { + self.build_catch_field(&catch_metadata, field) } else { field } @@ -1428,14 +1869,28 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { CodegenVariant::Reader => { let required_metadata = RequiredMetadataDirective::find(&inline_frag.directives).cloned(); + let catch_metadata = + CatchMetadataDirective::find(&inline_frag.directives).cloned(); self.build_reader_client_edge( context, - client_edge_metadata, + &client_edge_metadata, required_metadata, + catch_metadata, ) } CodegenVariant::Normalization => { - self.build_normalization_client_edge(context, client_edge_metadata) + if self + .project_config + .feature_flags + .enable_resolver_normalization_ast + { + self.build_client_edge_with_enabled_resolver_normalization_ast( + context, + client_edge_metadata, + ) + } else { + self.build_normalization_client_edge(context, client_edge_metadata) + } } } } else if @@ -1448,6 +1903,21 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { kind: Primitive::String(CODEGEN_CONSTANTS.client_extension), selections: selections, })) + } else if let Some(fragment_alias_metadata) = + FragmentAliasMetadata::find(&inline_frag.directives) + { + let selections = self.build_selections(context, inline_frag.selections.iter()); + let primitive = Primitive::Key(self.object(object! { + kind: Primitive::String(CODEGEN_CONSTANTS.inline_fragment), + selections: selections, + type_: Primitive::SkippableNull, + abstract_key: Primitive::SkippableNull, + })); + Primitive::Key(self.object(object! { + fragment: primitive, + kind: Primitive::String(CODEGEN_CONSTANTS.aliased_inline_fragment_spread), + name: Primitive::String(fragment_alias_metadata.alias.item), + })) } else { // TODO(T63559346): Handle anonymous inline fragments with no directives panic!( @@ -1855,17 +2325,22 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { provider.module_name } else { // This will build a path from the operation artifact to the provider module - self.project_config.js_module_import_path( - operation.name.map(|name| name.0), - provider.module_path().to_str().unwrap().intern(), + self.project_config.js_module_import_identifier( + &self + .project_config + .artifact_path_for_definition(operation.name), + &provider.module_path(), ) }; + let variable_name = + (provider.original_variable_name.to_string() + "_provider").intern(); + Some(ObjectEntry { key: def.name.item.0, value: Primitive::JSModuleDependency(JSModuleDependency { path: provider_module, - import_name: ModuleImportName::Default(provider.module_name), + import_name: ModuleImportName::Default(variable_name), }), }) }) @@ -1882,7 +2357,6 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { &mut self, operation: &OperationDefinition, request_parameters: RequestParameters<'_>, - top_level_statements: &TopLevelStatements, ) -> AstKey { let mut metadata_items: Vec = operation .directives @@ -1919,24 +2393,21 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { metadata_items.sort_unstable_by_key(|entry| entry.key); // Construct metadata object - let metadata_prop = ObjectEntry { - key: CODEGEN_CONSTANTS.metadata, - value: Primitive::Key(self.object(metadata_items)), - }; - let name_prop = ObjectEntry { - key: CODEGEN_CONSTANTS.name, - value: Primitive::String(request_parameters.name), - }; - let operation_kind_prop = ObjectEntry { - key: CODEGEN_CONSTANTS.operation_kind, - value: Primitive::String(match request_parameters.operation_kind { - OperationKind::Query => CODEGEN_CONSTANTS.query, - OperationKind::Mutation => CODEGEN_CONSTANTS.mutation, - OperationKind::Subscription => CODEGEN_CONSTANTS.subscription, - }), - }; + let mut params_object = vec![]; - let id_prop = ObjectEntry { + if let Some(ref text) = &request_parameters.text { + params_object.push(ObjectEntry { + key: CODEGEN_CONSTANTS.cache_id, + value: Primitive::RawString(md5(text)), + }); + } else if request_parameters.id.is_none() { + params_object.push(ObjectEntry { + key: CODEGEN_CONSTANTS.cache_id, + value: Primitive::RawString(md5(operation.name.item.0.lookup())), + }); + } + + params_object.push(ObjectEntry { key: CODEGEN_CONSTANTS.id, value: match request_parameters.id { Some(QueryID::Persisted { id, .. }) => Primitive::RawString(id.clone()), @@ -1948,65 +2419,36 @@ impl<'schema, 'builder, 'config> CodegenBuilder<'schema, 'builder, 'config> { } None => Primitive::Null, }, - }; + }); + params_object.push(ObjectEntry { + key: CODEGEN_CONSTANTS.metadata, + value: Primitive::Key(self.object(metadata_items)), + }); + params_object.push(ObjectEntry { + key: CODEGEN_CONSTANTS.name, + value: Primitive::String(request_parameters.name), + }); + params_object.push(ObjectEntry { + key: CODEGEN_CONSTANTS.operation_kind, + value: Primitive::String(match request_parameters.operation_kind { + OperationKind::Query => CODEGEN_CONSTANTS.query, + OperationKind::Mutation => CODEGEN_CONSTANTS.mutation, + OperationKind::Subscription => CODEGEN_CONSTANTS.subscription, + }), + }); - let mut params_object = if let Some(text) = request_parameters.text { - vec![ - ObjectEntry { - key: CODEGEN_CONSTANTS.cache_id, - value: Primitive::RawString(md5(&text)), - }, - id_prop, - metadata_prop, - name_prop, - operation_kind_prop, - ObjectEntry { - key: CODEGEN_CONSTANTS.text, - value: Primitive::RawString(text), - }, - ] - } else if request_parameters.id.is_some() { - vec![ - id_prop, - metadata_prop, - name_prop, - operation_kind_prop, - ObjectEntry { - key: CODEGEN_CONSTANTS.text, - value: Primitive::Null, - }, - ] - } else { - vec![ - ObjectEntry { - key: CODEGEN_CONSTANTS.cache_id, - value: Primitive::RawString(md5(operation.name.item.0.lookup())), - }, - id_prop, - metadata_prop, - name_prop, - operation_kind_prop, - ObjectEntry { - key: CODEGEN_CONSTANTS.text, - value: Primitive::Null, - }, - ] - }; + params_object.push(ObjectEntry { + key: CODEGEN_CONSTANTS.text, + value: match request_parameters.text { + Some(text) => Primitive::RawString(text), + None => Primitive::Null, + }, + }); - let provided_variables = if top_level_statements - .contains(CODEGEN_CONSTANTS.provided_variables_definition.lookup()) - { - Some(Primitive::Variable( - CODEGEN_CONSTANTS.provided_variables_definition, - )) - } else { - self.build_operation_provided_variables(operation) - .map(Primitive::Key) - }; - if let Some(value) = provided_variables { + if let Some(provided_variables) = self.build_operation_provided_variables(operation) { params_object.push(ObjectEntry { key: CODEGEN_CONSTANTS.provided_variables, - value, + value: Primitive::Key(provided_variables), }); } diff --git a/compiler/crates/relay-codegen/src/constants.rs b/compiler/crates/relay-codegen/src/constants.rs index 706a508d2987c..1a6ff1782c456 100644 --- a/compiler/crates/relay-codegen/src/constants.rs +++ b/compiler/crates/relay-codegen/src/constants.rs @@ -20,9 +20,10 @@ pub struct CodegenConstants { pub argument_definitions: StringKey, pub backward: StringKey, pub cache_id: StringKey, + pub catch_field: StringKey, pub client_abstract_types: StringKey, - pub client_component: StringKey, pub client_edge_backing_field_key: StringKey, + pub client_edge_model_resolvers: StringKey, pub client_edge_selections_key: StringKey, pub client_edge_to_client_object: StringKey, pub client_edge_to_server_object: StringKey, @@ -42,10 +43,10 @@ pub struct CodegenConstants { pub document_name: StringKey, pub dynamic_key_argument: StringKey, pub dynamic_key: StringKey, + pub throw_on_field_error: StringKey, pub field: StringKey, pub fields: StringKey, pub filters: StringKey, - pub flight_field: StringKey, pub forward: StringKey, pub fragment_name: StringKey, pub fragment_path_in_result: StringKey, @@ -60,6 +61,8 @@ pub struct CodegenConstants { pub has_client_edges: StringKey, pub id: StringKey, pub identifier_field: StringKey, + pub identifier_query_variable_name: StringKey, + pub identifier_info: StringKey, pub if_: StringKey, pub inline_data_fragment_spread: StringKey, pub inline_data_fragment: StringKey, @@ -86,11 +89,12 @@ pub struct CodegenConstants { pub operation_module_provider: StringKey, pub operation_value: StringKey, pub operation: StringKey, + pub output_type: StringKey, pub params: StringKey, pub passing_value: StringKey, pub path: StringKey, pub plural: StringKey, - pub provided_variables_definition: StringKey, + pub preloadable_concrete_request: StringKey, pub provided_variables: StringKey, pub provider: StringKey, pub query: StringKey, @@ -100,8 +104,12 @@ pub struct CodegenConstants { pub relay_resolver: StringKey, pub request: StringKey, pub required_field: StringKey, + pub resolver_function: StringKey, + pub resolver_info: StringKey, pub resolver_module: StringKey, + pub resolver_reference: StringKey, pub root_argument: StringKey, + pub root_fragment: StringKey, pub scalar_field: StringKey, pub scalar_handle: StringKey, pub selections: StringKey, @@ -110,12 +118,14 @@ pub struct CodegenConstants { pub stream: StringKey, pub subscription: StringKey, pub text: StringKey, + pub to: StringKey, pub type_: StringKey, pub type_discriminator: StringKey, pub updatable_query: StringKey, pub value: StringKey, pub variable_name: StringKey, pub variable: StringKey, + pub weak_model: StringKey, } lazy_static! { @@ -130,9 +140,10 @@ lazy_static! { argument_definitions: "argumentDefinitions".intern(), backward: "backward".intern(), cache_id: "cacheID".intern(), + catch_field: "CatchField".intern(), client_abstract_types: "clientAbstractTypes".intern(), - client_component: "ClientComponent".intern(), client_edge_backing_field_key: "backingField".intern(), + client_edge_model_resolvers: "modelResolvers".intern(), client_edge_selections_key: "linkedField".intern(), client_edge_to_client_object: "ClientEdgeToClientObject".intern(), client_edge_to_server_object: "ClientEdgeToServerObject".intern(), @@ -152,10 +163,10 @@ lazy_static! { document_name: "documentName".intern(), dynamic_key_argument: "__dynamicKey".intern(), dynamic_key: "dynamicKey".intern(), + throw_on_field_error: "throwOnFieldError".intern(), field: "field".intern(), fields: "fields".intern(), filters: "filters".intern(), - flight_field: "FlightField".intern(), forward: "forward".intern(), fragment_name: "fragmentName".intern(), fragment_path_in_result: "fragmentPathInResult".intern(), @@ -170,6 +181,8 @@ lazy_static! { has_client_edges: "hasClientEdges".intern(), id: "id".intern(), identifier_field: "identifierField".intern(), + identifier_query_variable_name: "identifierQueryVariableName".intern(), + identifier_info: "identifierInfo".intern(), if_: "if".intern(), inline_data_fragment_spread: "InlineDataFragmentSpread".intern(), inline_data_fragment: "InlineDataFragment".intern(), @@ -196,11 +209,12 @@ lazy_static! { operation_module_provider: "operationModuleProvider".intern(), operation_value: "Operation".intern(), operation: "operation".intern(), + output_type: "OutputType".intern(), params: "params".intern(), passing_value: "passingValue".intern(), path: "path".intern(), plural: "plural".intern(), - provided_variables_definition: "providedVariablesDefinition".intern(), + preloadable_concrete_request: "PreloadableConcreteRequest".intern(), provided_variables: "providedVariables".intern(), provider: "provider".intern(), query: "query".intern(), @@ -210,8 +224,12 @@ lazy_static! { relay_resolver: "RelayResolver".intern(), request: "Request".intern(), required_field: "RequiredField".intern(), + resolver_function: "resolverFunction".intern(), + resolver_info: "resolverInfo".intern(), resolver_module: "resolverModule".intern(), + resolver_reference: "resolverReference".intern(), root_argument: "RootArgument".intern(), + root_fragment: "rootFragment".intern(), scalar_field: "ScalarField".intern(), scalar_handle: "ScalarHandle".intern(), selections: "selections".intern(), @@ -220,11 +238,13 @@ lazy_static! { stream: "Stream".intern(), subscription: "subscription".intern(), text: "text".intern(), + to: "to".intern(), type_: "type".intern(), type_discriminator: "TypeDiscriminator".intern(), updatable_query: "UpdatableQuery".intern(), value: "value".intern(), variable_name: "variableName".intern(), variable: "Variable".intern(), + weak_model: "WeakModel".intern(), }; } diff --git a/compiler/crates/relay-codegen/src/lib.rs b/compiler/crates/relay-codegen/src/lib.rs index ebf9c4ebfc487..be35f149133ba 100644 --- a/compiler/crates/relay-codegen/src/lib.rs +++ b/compiler/crates/relay-codegen/src/lib.rs @@ -13,11 +13,12 @@ mod ast; mod build_ast; mod constants; mod indentation; -mod printer; +pub mod printer; mod top_level_statements; mod utils; pub use ast::AstBuilder; +pub use ast::JSModule; pub use ast::Primitive; pub use ast::QueryID; pub use ast::RequestParameters; @@ -28,6 +29,7 @@ pub use build_ast::CodegenVariant; pub use constants::CODEGEN_CONSTANTS; pub use printer::print_fragment; pub use printer::print_operation; +pub use printer::print_provided_variables; pub use printer::print_request; pub use printer::print_request_params; pub use printer::JSONPrinter; diff --git a/compiler/crates/relay-codegen/src/printer.rs b/compiler/crates/relay-codegen/src/printer.rs index 832937813fee5..40d112eb3f956 100644 --- a/compiler/crates/relay-codegen/src/printer.rs +++ b/compiler/crates/relay-codegen/src/printer.rs @@ -35,12 +35,15 @@ use crate::ast::ObjectEntry; use crate::ast::Primitive; use crate::ast::QueryID; use crate::ast::RequestParameters; +use crate::ast::ResolverModuleReference; use crate::build_ast::build_fragment; use crate::build_ast::build_operation; +use crate::build_ast::build_preloadable_request; use crate::build_ast::build_provided_variables; use crate::build_ast::build_request; use crate::build_ast::build_request_params; use crate::build_ast::build_request_params_ast_key; +use crate::build_ast::build_resolvers_schema; use crate::constants::CODEGEN_CONSTANTS; use crate::indentation::print_indentation; use crate::object; @@ -95,14 +98,12 @@ pub fn print_request_params( ) -> String { let mut request_parameters = build_request_params(operation); request_parameters.id = query_id; - - let mut builder = AstBuilder::default(); + let mut builder: AstBuilder = AstBuilder::default(); let request_parameters_ast_key = build_request_params_ast_key( schema, request_parameters, &mut builder, operation, - top_level_statements, operation.name.map(|x| x.0), project_config, ); @@ -110,6 +111,22 @@ pub fn print_request_params( printer.print(request_parameters_ast_key, false) } +pub fn print_provided_variables( + schema: &SDLSchema, + operation: &OperationDefinition, + project_config: &ProjectConfig, +) -> Option { + Printer::without_dedupe(project_config).print_provided_variables(schema, operation) +} + +pub fn print_resolvers_schema( + schema: &SDLSchema, + project_config: &ProjectConfig, + top_level_statements: &mut TopLevelStatements, +) -> String { + Printer::without_dedupe(project_config).print_resolvers_schema(schema, top_level_statements) +} + pub struct Printer<'p> { project_config: &'p ProjectConfig, builder: AstBuilder, @@ -137,17 +154,23 @@ impl<'p> Printer<'p> { &mut self, schema: &SDLSchema, operation: &OperationDefinition, - top_level_statements: &mut TopLevelStatements, ) -> Option { - let key = build_provided_variables( + // We do not expect the generate of provided variables object + // to mutate any top-level statements + let mut top_level_statements = Default::default(); + let provided_variables = build_provided_variables( schema, &mut self.builder, operation, operation.name.map(|x| x.0), self.project_config, )?; - let printer = JSONPrinter::new(&self.builder, self.project_config, top_level_statements); - Some(printer.print(key, self.dedupe)) + let printer = JSONPrinter::new( + &self.builder, + self.project_config, + &mut top_level_statements, + ); + Some(printer.print(provided_variables, self.dedupe)) } pub fn print_updatable_query( @@ -190,11 +213,9 @@ impl<'p> Printer<'p> { request_parameters, &mut self.builder, operation, - top_level_statements, operation.name.map(|x| x.0), self.project_config, ); - let key = build_request( schema, &mut self.builder, @@ -208,6 +229,26 @@ impl<'p> Printer<'p> { printer.print(key, self.dedupe) } + pub fn print_preloadable_request( + &mut self, + schema: &SDLSchema, + request_parameters: RequestParameters<'_>, + operation: &OperationDefinition, + top_level_statements: &mut TopLevelStatements, + ) -> String { + let request_parameters = build_request_params_ast_key( + schema, + request_parameters, + &mut self.builder, + operation, + operation.name.map(|x| x.0), + self.project_config, + ); + let key = build_preloadable_request(&mut self.builder, request_parameters); + let printer = JSONPrinter::new(&self.builder, self.project_config, top_level_statements); + printer.print(key, self.dedupe) + } + pub fn print_operation( &mut self, schema: &SDLSchema, @@ -254,13 +295,22 @@ impl<'p> Printer<'p> { request_parameters, &mut self.builder, operation, - top_level_statements, operation.name.map(|x| x.0), self.project_config, ); let printer = JSONPrinter::new(&self.builder, self.project_config, top_level_statements); printer.print(key, self.dedupe) } + + pub fn print_resolvers_schema( + &mut self, + schema: &SDLSchema, + top_level_statements: &mut TopLevelStatements, + ) -> String { + let key = build_resolvers_schema(&mut self.builder, schema, self.project_config); + let printer = JSONPrinter::new(&self.builder, self.project_config, top_level_statements); + printer.print(key, self.dedupe) + } } type VariableDefinitions = IndexMap; @@ -499,6 +549,12 @@ impl<'b> JSONPrinter<'b> { import_name.clone(), get_module_path(self.js_module_format, *path), ), + Primitive::ResolverModuleReference(ResolverModuleReference { + field_type, + resolver_function_name, + }) => { + self.write_resolver_module_reference(f, resolver_function_name.clone(), field_type) + } Primitive::DynamicImport { provider, module } => match provider { DynamicModuleProvider::JSResource => { self.top_level_statements.insert( @@ -530,20 +586,26 @@ impl<'b> JSONPrinter<'b> { js_module, injected_field_name_details.as_ref().copied(), ), - Primitive::RelayResolverWeakObjectWrapper { - resolver, - key, - plural, - live, - } => self.write_relay_resolver_weak_object_wrapper( - f, - resolver, - *key, - *plural, - *live, - indent, - is_dedupe_var, - ), + } + } + + fn write_resolver_module_reference( + &mut self, + f: &mut String, + resolver_function_name: ModuleImportName, + field_type: &StringKey, + ) -> FmtResult { + match resolver_function_name { + ModuleImportName::Default(_) => { + panic!("Expected a named import for Relay Resolvers") + } + ModuleImportName::Named { name, .. } => { + write!( + f, + "{{ resolverFunctionName: \"{}\", fieldType: \"{}\" }}", + name, field_type + ) + } } } @@ -623,46 +685,16 @@ impl<'b> JSONPrinter<'b> { } write!(f, ")") } - - fn write_relay_resolver_weak_object_wrapper( - &mut self, - f: &mut String, - resolver: &Primitive, - key: StringKey, - plural: bool, - live: bool, - indent: usize, - is_dedupe_var: bool, - ) -> FmtResult { - let relay_runtime_experimental = "relay-runtime/experimental"; - let weak_object_wrapper = if live { - "weakObjectWrapperLive" - } else { - "weakObjectWrapper" - }; - - self.write_js_dependency( - f, - ModuleImportName::Named { - name: weak_object_wrapper.intern(), - import_as: None, - }, - Cow::Borrowed(relay_runtime_experimental), - )?; - write!(f, "(")?; - self.print_primitive(f, resolver, indent + 1, is_dedupe_var)?; - write!(f, ", '{}', {})", key, plural) - } } -fn get_module_path(js_module_format: JsModuleFormat, key: StringKey) -> Cow<'static, str> { +pub fn get_module_path(js_module_format: JsModuleFormat, key: StringKey) -> Cow<'static, str> { match js_module_format { JsModuleFormat::CommonJS => { let path = Path::new(key.lookup()); let extension = path.extension(); if let Some(extension) = extension { - if extension == "ts" || extension == "js" { + if extension == "ts" || extension == "tsx" || extension == "js" { let path_without_extension = path.with_extension(""); let path_without_extension = path_without_extension @@ -819,10 +851,8 @@ fn write_constant_value(f: &mut String, builder: &AstBuilder, value: &Primitive) Primitive::RawString(_) => panic!("Unexpected RawString"), Primitive::GraphQLModuleDependency(_) => panic!("Unexpected GraphQLModuleDependency"), Primitive::JSModuleDependency { .. } => panic!("Unexpected JSModuleDependency"), + Primitive::ResolverModuleReference { .. } => panic!("Unexpected ResolverModuleReference"), Primitive::DynamicImport { .. } => panic!("Unexpected DynamicImport"), Primitive::RelayResolverModel { .. } => panic!("Unexpected RelayResolver"), - Primitive::RelayResolverWeakObjectWrapper { .. } => { - panic!("Unexpected RelayResolverWeakObjectWrapper") - } } } diff --git a/compiler/crates/relay-codegen/src/top_level_statements.rs b/compiler/crates/relay-codegen/src/top_level_statements.rs index a346700f020b6..55d4fc4c04ca4 100644 --- a/compiler/crates/relay-codegen/src/top_level_statements.rs +++ b/compiler/crates/relay-codegen/src/top_level_statements.rs @@ -18,7 +18,6 @@ pub struct TopLevelStatements(IndexMap match dependency.import_name { ModuleImportName::Default(default_import) => { - write!(f, "import {} from '{}';\n", default_import, dependency.path)? + writeln!(f, "import {} from '{}';", default_import, dependency.path)? } ModuleImportName::Named { name, import_as } => { if let Some(import_as) = import_as { - write!( + writeln!( f, - "import {{{} as {}}} from '{}';\n", + "import {{{} as {}}} from '{}';", name, import_as, dependency.path )? } else { - write!(f, "import {{{}}} from '{}';\n", name, dependency.path)? + writeln!(f, "import {{{}}} from '{}';", name, dependency.path)? } } }, - TopLevelStatement::VariableDefinition(text) => write!(f, "{}", text)?, }; Ok(()) } diff --git a/compiler/crates/relay-codegen/tests/aliased_fragments.rs b/compiler/crates/relay-codegen/tests/aliased_fragments.rs new file mode 100644 index 0000000000000..b0c700853be67 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/aliased_fragments.rs @@ -0,0 +1,66 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_codegen::print_fragment; +use relay_codegen::print_operation; +use relay_codegen::JsModuleFormat; +use relay_config::ProjectConfig; +use relay_test_schema::get_test_schema; +use relay_transforms::fragment_alias_directive; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let base = fixture.content; + let schema = get_test_schema(); + + let ast = parse_executable(base, SourceLocationKey::standalone(fixture.file_name)).unwrap(); + let ir = build(&schema, &ast.definitions) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let program = Program::from_definitions(Arc::clone(&schema), ir); + + fragment_alias_directive(&program, true, true) + .map(|next_program| { + next_program + .fragments() + .map(|def| { + let mut import_statements = Default::default(); + let fragment = print_fragment( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, fragment) + }) + .chain(next_program.operations().map(|def| { + let mut import_statements = Default::default(); + let operation = print_operation( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, operation) + })) + .collect::>() + .join("\n\n") + }) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) +} diff --git a/compiler/crates/relay-codegen/tests/aliased_fragments/mod.rs b/compiler/crates/relay-codegen/tests/aliased_fragments/mod.rs deleted file mode 100644 index b1676e440b80e..0000000000000 --- a/compiler/crates/relay-codegen/tests/aliased_fragments/mod.rs +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::FeatureFlag; -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_codegen::print_fragment; -use relay_codegen::print_operation; -use relay_codegen::JsModuleFormat; -use relay_config::ProjectConfig; -use relay_test_schema::get_test_schema; -use relay_transforms::fragment_alias_directive; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let base = fixture.content; - let schema = get_test_schema(); - - let ast = parse_executable(base, SourceLocationKey::standalone(fixture.file_name)).unwrap(); - let ir = build(&schema, &ast.definitions) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - let program = Program::from_definitions(Arc::clone(&schema), ir); - - fragment_alias_directive(&program, &FeatureFlag::Enabled) - .map(|next_program| { - next_program - .fragments() - .map(|def| { - let mut import_statements = Default::default(); - let fragment = print_fragment( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, fragment) - }) - .chain(next_program.operations().map(|def| { - let mut import_statements = Default::default(); - let operation = print_operation( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, operation) - })) - .collect::>() - .join("\n\n") - }) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) -} diff --git a/compiler/crates/relay-codegen/tests/aliased_fragments_test.rs b/compiler/crates/relay-codegen/tests/aliased_fragments_test.rs index f591fa6b9460e..937840ce07d3f 100644 --- a/compiler/crates/relay-codegen/tests/aliased_fragments_test.rs +++ b/compiler/crates/relay-codegen/tests/aliased_fragments_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<82605c02b8c64d2e0d4b47e0e3b61579>> + * @generated SignedSource<<90116fdbb3e0a24e4e0086798282c602>> */ mod aliased_fragments; @@ -12,23 +12,23 @@ mod aliased_fragments; use aliased_fragments::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn aliased_fragment_on_abstract_type() { +#[tokio::test] +async fn aliased_fragment_on_abstract_type() { let input = include_str!("aliased_fragments/fixtures/aliased_fragment_on_abstract_type.graphql"); let expected = include_str!("aliased_fragments/fixtures/aliased_fragment_on_abstract_type.expected"); - test_fixture(transform_fixture, "aliased_fragment_on_abstract_type.graphql", "aliased_fragments/fixtures/aliased_fragment_on_abstract_type.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased_fragment_on_abstract_type.graphql", "aliased_fragments/fixtures/aliased_fragment_on_abstract_type.expected", input, expected).await; } -#[test] -fn aliased_fragment_spread() { +#[tokio::test] +async fn aliased_fragment_spread() { let input = include_str!("aliased_fragments/fixtures/aliased_fragment_spread.graphql"); let expected = include_str!("aliased_fragments/fixtures/aliased_fragment_spread.expected"); - test_fixture(transform_fixture, "aliased_fragment_spread.graphql", "aliased_fragments/fixtures/aliased_fragment_spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased_fragment_spread.graphql", "aliased_fragments/fixtures/aliased_fragment_spread.expected", input, expected).await; } -#[test] -fn aliased_inline_fragment_spread() { +#[tokio::test] +async fn aliased_inline_fragment_spread() { let input = include_str!("aliased_fragments/fixtures/aliased_inline_fragment_spread.graphql"); let expected = include_str!("aliased_fragments/fixtures/aliased_inline_fragment_spread.expected"); - test_fixture(transform_fixture, "aliased_inline_fragment_spread.graphql", "aliased_fragments/fixtures/aliased_inline_fragment_spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased_inline_fragment_spread.graphql", "aliased_fragments/fixtures/aliased_inline_fragment_spread.expected", input, expected).await; } diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen.rs b/compiler/crates/relay-codegen/tests/catch_directive_codegen.rs new file mode 100644 index 0000000000000..06084ba1ae753 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen.rs @@ -0,0 +1,71 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_codegen::print_fragment; +use relay_codegen::print_operation; +use relay_codegen::JsModuleFormat; +use relay_config::ProjectConfig; +use relay_test_schema::get_test_schema; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::catch_directive; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + let (base, schema) = match parts.as_slice() { + [base, extensions] => (base, get_test_schema_with_extensions(extensions)), + [base] => (base, get_test_schema()), + _ => panic!("Invalid fixture input {}", fixture.content), + }; + + let ast = parse_executable(base, SourceLocationKey::standalone(fixture.file_name)).unwrap(); + let ir = build(&schema, &ast.definitions) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let program = Program::from_definitions(Arc::clone(&schema), ir); + + catch_directive(&program, true) + .map(|next_program| { + next_program + .fragments() + .map(|def| { + let mut import_statements = Default::default(); + let fragment = print_fragment( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, fragment) + }) + .chain(next_program.operations().map(|def| { + let mut import_statements = Default::default(); + let operation = print_operation( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, operation) + })) + .collect::>() + .join("\n\n") + }) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive.expected b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive.expected new file mode 100644 index 0000000000000..e6540087dff73 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive.expected @@ -0,0 +1,35 @@ +==================================== INPUT ==================================== +fragment MyFragment on Node { + id + name @catch(to: RESULT) +} +==================================== OUTPUT =================================== +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "MyFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "kind": "CatchField", + "field": { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + "to": "RESULT", + "path": "name" + } + ], + "type": "Node", + "abstractKey": "__isNode" +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive.graphql b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive.graphql new file mode 100644 index 0000000000000..409c5e1fb4862 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive.graphql @@ -0,0 +1,4 @@ +fragment MyFragment on Node { + id + name @catch(to: RESULT) +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_linked_child_has_to_result.expected b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_linked_child_has_to_result.expected new file mode 100644 index 0000000000000..0279feeea7f54 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_linked_child_has_to_result.expected @@ -0,0 +1,40 @@ +==================================== INPUT ==================================== +fragment MyFragment on User { + address { + street @catch(to: RESULT) + } +} +==================================== OUTPUT =================================== +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "MyFragment", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "StreetAddress", + "kind": "LinkedField", + "name": "address", + "plural": false, + "selections": [ + { + "kind": "CatchField", + "field": { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "street", + "storageKey": null + }, + "to": "RESULT", + "path": "address.street" + } + ], + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_linked_child_has_to_result.graphql b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_linked_child_has_to_result.graphql new file mode 100644 index 0000000000000..7565ba152a90b --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_linked_child_has_to_result.graphql @@ -0,0 +1,5 @@ +fragment MyFragment on User { + address { + street @catch(to: RESULT) + } +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_linked_to_result.expected b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_linked_to_result.expected new file mode 100644 index 0000000000000..b5b2e17859039 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_linked_to_result.expected @@ -0,0 +1,40 @@ +==================================== INPUT ==================================== +fragment MyFragment on User { + address @catch(to: RESULT) { + street + } +} +==================================== OUTPUT =================================== +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "MyFragment", + "selections": [ + { + "kind": "CatchField", + "field": { + "alias": null, + "args": null, + "concreteType": "StreetAddress", + "kind": "LinkedField", + "name": "address", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "street", + "storageKey": null + } + ], + "storageKey": null + }, + "to": "RESULT", + "path": "address" + } + ], + "type": "User", + "abstractKey": null +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_linked_to_result.graphql b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_linked_to_result.graphql new file mode 100644 index 0000000000000..5b2d19bd6f4de --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_linked_to_result.graphql @@ -0,0 +1,5 @@ +fragment MyFragment on User { + address @catch(to: RESULT) { + street + } +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_catch.expected b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_catch.expected new file mode 100644 index 0000000000000..9dbb8e1ddfa8a --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_catch.expected @@ -0,0 +1,45 @@ +==================================== INPUT ==================================== +fragment MyFragment on User { + address @catch { + street @catch + } +} +==================================== OUTPUT =================================== +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "MyFragment", + "selections": [ + { + "kind": "CatchField", + "field": { + "alias": null, + "args": null, + "concreteType": "StreetAddress", + "kind": "LinkedField", + "name": "address", + "plural": false, + "selections": [ + { + "kind": "CatchField", + "field": { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "street", + "storageKey": null + }, + "to": "RESULT", + "path": "address.street" + } + ], + "storageKey": null + }, + "to": "RESULT", + "path": "address" + } + ], + "type": "User", + "abstractKey": null +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_catch.graphql b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_catch.graphql new file mode 100644 index 0000000000000..18acdd3a4f849 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_catch.graphql @@ -0,0 +1,5 @@ +fragment MyFragment on User { + address @catch { + street @catch + } +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_linked_different_to.expected b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_linked_different_to.expected new file mode 100644 index 0000000000000..abf589d688df8 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_linked_different_to.expected @@ -0,0 +1,45 @@ +==================================== INPUT ==================================== +fragment MyFragmentFirst on User { + parents @catch(to: NULL) { + lastName @catch(to: RESULT) + } +} +==================================== OUTPUT =================================== +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "MyFragmentFirst", + "selections": [ + { + "kind": "CatchField", + "field": { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "parents", + "plural": true, + "selections": [ + { + "kind": "CatchField", + "field": { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "lastName", + "storageKey": null + }, + "to": "RESULT", + "path": "parents.lastName" + } + ], + "storageKey": null + }, + "to": "NULL", + "path": "parents" + } + ], + "type": "User", + "abstractKey": null +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_linked_different_to.graphql b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_linked_different_to.graphql new file mode 100644 index 0000000000000..8466fe29b4b36 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_linked_different_to.graphql @@ -0,0 +1,5 @@ +fragment MyFragmentFirst on User { + parents @catch(to: NULL) { + lastName @catch(to: RESULT) + } +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_linked_with_other_fields.expected b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_linked_with_other_fields.expected new file mode 100644 index 0000000000000..c007a6a430301 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_linked_with_other_fields.expected @@ -0,0 +1,53 @@ +==================================== INPUT ==================================== +fragment MyFragmentFirst on User { + parents @catch(to: NULL) { + id + lastName @catch(to: RESULT) + } +} +==================================== OUTPUT =================================== +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "MyFragmentFirst", + "selections": [ + { + "kind": "CatchField", + "field": { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "parents", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "kind": "CatchField", + "field": { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "lastName", + "storageKey": null + }, + "to": "RESULT", + "path": "parents.lastName" + } + ], + "storageKey": null + }, + "to": "NULL", + "path": "parents" + } + ], + "type": "User", + "abstractKey": null +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_linked_with_other_fields.graphql b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_linked_with_other_fields.graphql new file mode 100644 index 0000000000000..7ac720b27d6f1 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_nested_linked_with_other_fields.graphql @@ -0,0 +1,6 @@ +fragment MyFragmentFirst on User { + parents @catch(to: NULL) { + id + lastName @catch(to: RESULT) + } +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_no_args.expected b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_no_args.expected new file mode 100644 index 0000000000000..390426a8bea2f --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_no_args.expected @@ -0,0 +1,35 @@ +==================================== INPUT ==================================== +fragment MyFragment on Node { + id + name @catch +} +==================================== OUTPUT =================================== +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "MyFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "kind": "CatchField", + "field": { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + "to": "RESULT", + "path": "name" + } + ], + "type": "Node", + "abstractKey": "__isNode" +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_no_args.graphql b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_no_args.graphql new file mode 100644 index 0000000000000..37ea419f167b8 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_no_args.graphql @@ -0,0 +1,4 @@ +fragment MyFragment on Node { + id + name @catch +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_null_arg.expected b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_null_arg.expected new file mode 100644 index 0000000000000..95bc85c659a8e --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_null_arg.expected @@ -0,0 +1,35 @@ +==================================== INPUT ==================================== +fragment MyFragment on Node { + id + name @catch(to: NULL) +} +==================================== OUTPUT =================================== +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "MyFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "kind": "CatchField", + "field": { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + "to": "NULL", + "path": "name" + } + ], + "type": "Node", + "abstractKey": "__isNode" +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_null_arg.graphql b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_null_arg.graphql new file mode 100644 index 0000000000000..f4272c2917230 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_null_arg.graphql @@ -0,0 +1,4 @@ +fragment MyFragment on Node { + id + name @catch(to: NULL) +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_on_query.expected b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_on_query.expected new file mode 100644 index 0000000000000..ca93a9fc75aed --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_on_query.expected @@ -0,0 +1,45 @@ +==================================== INPUT ==================================== +query Foo { + me { + id + name @catch + } +} +==================================== OUTPUT =================================== +{ + "argumentDefinitions": [], + "kind": "Operation", + "name": "Foo", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "kind": "CatchField", + "field": { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + "to": "RESULT", + "path": "me.name" + } + ], + "storageKey": null + } + ] +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_on_query.graphql b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_on_query.graphql new file mode 100644 index 0000000000000..e57bcb8832bfd --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen/fixtures/catch_directive_on_query.graphql @@ -0,0 +1,6 @@ +query Foo { + me { + id + name @catch + } +} diff --git a/compiler/crates/relay-codegen/tests/catch_directive_codegen_test.rs b/compiler/crates/relay-codegen/tests/catch_directive_codegen_test.rs new file mode 100644 index 0000000000000..6e15c4e63850e --- /dev/null +++ b/compiler/crates/relay-codegen/tests/catch_directive_codegen_test.rs @@ -0,0 +1,76 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @generated SignedSource<<9b3e9405c461aea8f7d13327590c2c37>> + */ + +mod catch_directive_codegen; + +use catch_directive_codegen::transform_fixture; +use fixture_tests::test_fixture; + +#[tokio::test] +async fn catch_directive() { + let input = include_str!("catch_directive_codegen/fixtures/catch_directive.graphql"); + let expected = include_str!("catch_directive_codegen/fixtures/catch_directive.expected"); + test_fixture(transform_fixture, file!(), "catch_directive.graphql", "catch_directive_codegen/fixtures/catch_directive.expected", input, expected).await; +} + +#[tokio::test] +async fn catch_directive_linked_child_has_to_result() { + let input = include_str!("catch_directive_codegen/fixtures/catch_directive_linked_child_has_to_result.graphql"); + let expected = include_str!("catch_directive_codegen/fixtures/catch_directive_linked_child_has_to_result.expected"); + test_fixture(transform_fixture, file!(), "catch_directive_linked_child_has_to_result.graphql", "catch_directive_codegen/fixtures/catch_directive_linked_child_has_to_result.expected", input, expected).await; +} + +#[tokio::test] +async fn catch_directive_linked_to_result() { + let input = include_str!("catch_directive_codegen/fixtures/catch_directive_linked_to_result.graphql"); + let expected = include_str!("catch_directive_codegen/fixtures/catch_directive_linked_to_result.expected"); + test_fixture(transform_fixture, file!(), "catch_directive_linked_to_result.graphql", "catch_directive_codegen/fixtures/catch_directive_linked_to_result.expected", input, expected).await; +} + +#[tokio::test] +async fn catch_directive_nested_catch() { + let input = include_str!("catch_directive_codegen/fixtures/catch_directive_nested_catch.graphql"); + let expected = include_str!("catch_directive_codegen/fixtures/catch_directive_nested_catch.expected"); + test_fixture(transform_fixture, file!(), "catch_directive_nested_catch.graphql", "catch_directive_codegen/fixtures/catch_directive_nested_catch.expected", input, expected).await; +} + +#[tokio::test] +async fn catch_directive_nested_linked_different_to() { + let input = include_str!("catch_directive_codegen/fixtures/catch_directive_nested_linked_different_to.graphql"); + let expected = include_str!("catch_directive_codegen/fixtures/catch_directive_nested_linked_different_to.expected"); + test_fixture(transform_fixture, file!(), "catch_directive_nested_linked_different_to.graphql", "catch_directive_codegen/fixtures/catch_directive_nested_linked_different_to.expected", input, expected).await; +} + +#[tokio::test] +async fn catch_directive_nested_linked_with_other_fields() { + let input = include_str!("catch_directive_codegen/fixtures/catch_directive_nested_linked_with_other_fields.graphql"); + let expected = include_str!("catch_directive_codegen/fixtures/catch_directive_nested_linked_with_other_fields.expected"); + test_fixture(transform_fixture, file!(), "catch_directive_nested_linked_with_other_fields.graphql", "catch_directive_codegen/fixtures/catch_directive_nested_linked_with_other_fields.expected", input, expected).await; +} + +#[tokio::test] +async fn catch_directive_no_args() { + let input = include_str!("catch_directive_codegen/fixtures/catch_directive_no_args.graphql"); + let expected = include_str!("catch_directive_codegen/fixtures/catch_directive_no_args.expected"); + test_fixture(transform_fixture, file!(), "catch_directive_no_args.graphql", "catch_directive_codegen/fixtures/catch_directive_no_args.expected", input, expected).await; +} + +#[tokio::test] +async fn catch_directive_null_arg() { + let input = include_str!("catch_directive_codegen/fixtures/catch_directive_null_arg.graphql"); + let expected = include_str!("catch_directive_codegen/fixtures/catch_directive_null_arg.expected"); + test_fixture(transform_fixture, file!(), "catch_directive_null_arg.graphql", "catch_directive_codegen/fixtures/catch_directive_null_arg.expected", input, expected).await; +} + +#[tokio::test] +async fn catch_directive_on_query() { + let input = include_str!("catch_directive_codegen/fixtures/catch_directive_on_query.graphql"); + let expected = include_str!("catch_directive_codegen/fixtures/catch_directive_on_query.expected"); + test_fixture(transform_fixture, file!(), "catch_directive_on_query.graphql", "catch_directive_codegen/fixtures/catch_directive_on_query.expected", input, expected).await; +} diff --git a/compiler/crates/relay-codegen/tests/client_edges.rs b/compiler/crates/relay-codegen/tests/client_edges.rs new file mode 100644 index 0000000000000..f223b4ef5f824 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/client_edges.rs @@ -0,0 +1,89 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::FeatureFlag; +use common::FeatureFlags; +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use relay_codegen::print_fragment; +use relay_codegen::print_operation; +use relay_codegen::JsModuleFormat; +use relay_config::ProjectConfig; +use relay_config::ProjectName; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::client_edges; +use relay_transforms::relay_resolvers; +use relay_transforms::sort_selections; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + if let [base, extensions] = parts.as_slice() { + let ast = parse_executable(base, SourceLocationKey::standalone(fixture.file_name)).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + let relay_resolver_enable_interface_output_type = if fixture + .content + .contains("# relay-resolver-enable-interface-output-type") + { + FeatureFlag::Enabled + } else { + FeatureFlag::Disabled + }; + let feature_flags = Arc::new(FeatureFlags { + relay_resolver_enable_interface_output_type, + ..Default::default() + }); + let project_config: ProjectConfig = ProjectConfig { + feature_flags, + ..Default::default() + }; + let next_program = sort_selections( + &client_edges(&program, &project_config, &Default::default()) + .and_then(|program| relay_resolvers(ProjectName::default(), &program, true)) + .unwrap(), + ); + let mut result = next_program + .fragments() + .map(|def| { + let mut import_statements = Default::default(); + let fragment = print_fragment( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, fragment) + }) + .chain(next_program.operations().map(|def| { + let mut import_statements = Default::default(); + let operation = print_operation( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, operation) + })) + .collect::>(); + result.sort_unstable(); + Ok(result.join("\n\n")) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-codegen/tests/client_edges/fixtures/client-edge-backed-by-resolver.expected b/compiler/crates/relay-codegen/tests/client_edges/fixtures/client-edge-backed-by-resolver.expected index 95d9e8d4110c2..8343ea2b9104c 100644 --- a/compiler/crates/relay-codegen/tests/client_edges/fixtures/client-edge-backed-by-resolver.expected +++ b/compiler/crates/relay-codegen/tests/client_edges/fixtures/client-edge-backed-by-resolver.expected @@ -127,7 +127,10 @@ extend type User { "node" ], "operation": require('ClientEdgeQuery_Foo_user_best_friend.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_Foo_user_best_friend", diff --git a/compiler/crates/relay-codegen/tests/client_edges/fixtures/client-edge-to-client-object.expected b/compiler/crates/relay-codegen/tests/client_edges/fixtures/client-edge-to-client-object.expected index 3d803d8135fe2..ade16abdb5253 100644 --- a/compiler/crates/relay-codegen/tests/client_edges/fixtures/client-edge-to-client-object.expected +++ b/compiler/crates/relay-codegen/tests/client_edges/fixtures/client-edge-to-client-object.expected @@ -48,6 +48,7 @@ extend type User { { "kind": "ClientEdgeToClientObject", "concreteType": "ClientOnlyType", + "modelResolvers": null, "backingField": { "alias": null, "args": null, diff --git a/compiler/crates/relay-codegen/tests/client_edges/fixtures/client-edge-to-plural-client-object.expected b/compiler/crates/relay-codegen/tests/client_edges/fixtures/client-edge-to-plural-client-object.expected index 2662a356bfd8a..20cfac9a8651d 100644 --- a/compiler/crates/relay-codegen/tests/client_edges/fixtures/client-edge-to-plural-client-object.expected +++ b/compiler/crates/relay-codegen/tests/client_edges/fixtures/client-edge-to-plural-client-object.expected @@ -52,6 +52,7 @@ extend type User { { "kind": "ClientEdgeToClientObject", "concreteType": "ClientOnlyType", + "modelResolvers": null, "backingField": { "alias": null, "args": null, diff --git a/compiler/crates/relay-codegen/tests/client_edges/mod.rs b/compiler/crates/relay-codegen/tests/client_edges/mod.rs deleted file mode 100644 index a0b59b30f0dda..0000000000000 --- a/compiler/crates/relay-codegen/tests/client_edges/mod.rs +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use relay_codegen::print_fragment; -use relay_codegen::print_operation; -use relay_codegen::JsModuleFormat; -use relay_config::ProjectConfig; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::client_edges; -use relay_transforms::relay_resolvers; -use relay_transforms::sort_selections; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - if let [base, extensions] = parts.as_slice() { - let ast = parse_executable(base, SourceLocationKey::standalone(fixture.file_name)).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - let next_program = sort_selections( - &client_edges(&program, &Default::default()) - .and_then(|program| relay_resolvers(&program, true)) - .unwrap(), - ); - let mut result = next_program - .fragments() - .map(|def| { - let mut import_statements = Default::default(); - let fragment = print_fragment( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, fragment) - }) - .chain(next_program.operations().map(|def| { - let mut import_statements = Default::default(); - let operation = print_operation( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, operation) - })) - .collect::>(); - result.sort_unstable(); - Ok(result.join("\n\n")) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} diff --git a/compiler/crates/relay-codegen/tests/client_edges_test.rs b/compiler/crates/relay-codegen/tests/client_edges_test.rs index 0509411bfbf8b..ff863e9645370 100644 --- a/compiler/crates/relay-codegen/tests/client_edges_test.rs +++ b/compiler/crates/relay-codegen/tests/client_edges_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<47f77ebc1507bbe5e60146b25f042c50>> */ mod client_edges; @@ -12,30 +12,30 @@ mod client_edges; use client_edges::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn client_edge_backed_by_resolver() { +#[tokio::test] +async fn client_edge_backed_by_resolver() { let input = include_str!("client_edges/fixtures/client-edge-backed-by-resolver.graphql"); let expected = include_str!("client_edges/fixtures/client-edge-backed-by-resolver.expected"); - test_fixture(transform_fixture, "client-edge-backed-by-resolver.graphql", "client_edges/fixtures/client-edge-backed-by-resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-edge-backed-by-resolver.graphql", "client_edges/fixtures/client-edge-backed-by-resolver.expected", input, expected).await; } -#[test] -fn client_edge_to_client_object() { +#[tokio::test] +async fn client_edge_to_client_object() { let input = include_str!("client_edges/fixtures/client-edge-to-client-object.graphql"); let expected = include_str!("client_edges/fixtures/client-edge-to-client-object.expected"); - test_fixture(transform_fixture, "client-edge-to-client-object.graphql", "client_edges/fixtures/client-edge-to-client-object.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-edge-to-client-object.graphql", "client_edges/fixtures/client-edge-to-client-object.expected", input, expected).await; } -#[test] -fn client_edge_to_plural_client_object() { +#[tokio::test] +async fn client_edge_to_plural_client_object() { let input = include_str!("client_edges/fixtures/client-edge-to-plural-client-object.graphql"); let expected = include_str!("client_edges/fixtures/client-edge-to-plural-client-object.expected"); - test_fixture(transform_fixture, "client-edge-to-plural-client-object.graphql", "client_edges/fixtures/client-edge-to-plural-client-object.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-edge-to-plural-client-object.graphql", "client_edges/fixtures/client-edge-to-plural-client-object.expected", input, expected).await; } -#[test] -fn relay_resolver_field_and_fragment_args() { +#[tokio::test] +async fn relay_resolver_field_and_fragment_args() { let input = include_str!("client_edges/fixtures/relay-resolver-field-and-fragment-args.graphql"); let expected = include_str!("client_edges/fixtures/relay-resolver-field-and-fragment-args.expected"); - test_fixture(transform_fixture, "relay-resolver-field-and-fragment-args.graphql", "client_edges/fixtures/relay-resolver-field-and-fragment-args.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-field-and-fragment-args.graphql", "client_edges/fixtures/relay-resolver-field-and-fragment-args.expected", input, expected).await; } diff --git a/compiler/crates/relay-codegen/tests/client_extensions.rs b/compiler/crates/relay-codegen/tests/client_extensions.rs new file mode 100644 index 0000000000000..f837cb859f827 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/client_extensions.rs @@ -0,0 +1,65 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use relay_codegen::print_fragment; +use relay_codegen::print_operation; +use relay_codegen::JsModuleFormat; +use relay_config::ProjectConfig; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::client_extensions; +use relay_transforms::sort_selections; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + if let [base, extensions] = parts.as_slice() { + let ast = parse_executable(base, SourceLocationKey::standalone(fixture.file_name)).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + let next_program = sort_selections(&client_extensions(&program)); + let mut result = next_program + .fragments() + .map(|def| { + let mut import_statements = Default::default(); + let fragment = print_fragment( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, fragment) + }) + .chain(next_program.operations().map(|def| { + let mut import_statements = Default::default(); + let operation = print_operation( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, operation) + })) + .collect::>(); + result.sort_unstable(); + Ok(result.join("\n\n")) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-codegen/tests/client_extensions/mod.rs b/compiler/crates/relay-codegen/tests/client_extensions/mod.rs deleted file mode 100644 index 86ec5a3c677a2..0000000000000 --- a/compiler/crates/relay-codegen/tests/client_extensions/mod.rs +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use relay_codegen::print_fragment; -use relay_codegen::print_operation; -use relay_codegen::JsModuleFormat; -use relay_config::ProjectConfig; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::client_extensions; -use relay_transforms::sort_selections; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - if let [base, extensions] = parts.as_slice() { - let ast = parse_executable(base, SourceLocationKey::standalone(fixture.file_name)).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - let next_program = sort_selections(&client_extensions(&program)); - let mut result = next_program - .fragments() - .map(|def| { - let mut import_statements = Default::default(); - let fragment = print_fragment( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, fragment) - }) - .chain(next_program.operations().map(|def| { - let mut import_statements = Default::default(); - let operation = print_operation( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, operation) - })) - .collect::>(); - result.sort_unstable(); - Ok(result.join("\n\n")) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} diff --git a/compiler/crates/relay-codegen/tests/client_extensions_abstract_types.rs b/compiler/crates/relay-codegen/tests/client_extensions_abstract_types.rs new file mode 100644 index 0000000000000..2cd010d60b71e --- /dev/null +++ b/compiler/crates/relay-codegen/tests/client_extensions_abstract_types.rs @@ -0,0 +1,65 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use relay_codegen::print_fragment; +use relay_codegen::print_operation; +use relay_codegen::JsModuleFormat; +use relay_config::ProjectConfig; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::client_extensions_abstract_types; +use relay_transforms::sort_selections; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + if let [base, extensions] = parts.as_slice() { + let ast = parse_executable(base, SourceLocationKey::standalone(fixture.file_name)).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + let next_program = sort_selections(&client_extensions_abstract_types(&program)); + let mut result = next_program + .fragments() + .map(|def| { + let mut import_statements = Default::default(); + let fragment = print_fragment( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, fragment) + }) + .chain(next_program.operations().map(|def| { + let mut import_statements = Default::default(); + let operation = print_operation( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, operation) + })) + .collect::>(); + result.sort_unstable(); + Ok(result.join("\n\n")) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-codegen/tests/client_extensions_abstract_types/mod.rs b/compiler/crates/relay-codegen/tests/client_extensions_abstract_types/mod.rs deleted file mode 100644 index e589dc28a9009..0000000000000 --- a/compiler/crates/relay-codegen/tests/client_extensions_abstract_types/mod.rs +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use relay_codegen::print_fragment; -use relay_codegen::print_operation; -use relay_codegen::JsModuleFormat; -use relay_config::ProjectConfig; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::client_extensions_abstract_types; -use relay_transforms::sort_selections; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - if let [base, extensions] = parts.as_slice() { - let ast = parse_executable(base, SourceLocationKey::standalone(fixture.file_name)).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - let next_program = sort_selections(&client_extensions_abstract_types(&program)); - let mut result = next_program - .fragments() - .map(|def| { - let mut import_statements = Default::default(); - let fragment = print_fragment( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, fragment) - }) - .chain(next_program.operations().map(|def| { - let mut import_statements = Default::default(); - let operation = print_operation( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, operation) - })) - .collect::>(); - result.sort_unstable(); - Ok(result.join("\n\n")) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} diff --git a/compiler/crates/relay-codegen/tests/client_extensions_abstract_types_test.rs b/compiler/crates/relay-codegen/tests/client_extensions_abstract_types_test.rs index 7bf36359d5df8..f0fcaa2c1c3c5 100644 --- a/compiler/crates/relay-codegen/tests/client_extensions_abstract_types_test.rs +++ b/compiler/crates/relay-codegen/tests/client_extensions_abstract_types_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<3d2f11178076ec66072479878618fb48>> */ mod client_extensions_abstract_types; @@ -12,30 +12,30 @@ mod client_extensions_abstract_types; use client_extensions_abstract_types::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn fragment_spread_on_client_interface() { +#[tokio::test] +async fn fragment_spread_on_client_interface() { let input = include_str!("client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface.graphql"); let expected = include_str!("client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface.expected"); - test_fixture(transform_fixture, "fragment_spread_on_client_interface.graphql", "client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_spread_on_client_interface.graphql", "client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface.expected", input, expected).await; } -#[test] -fn fragment_spread_on_client_interface_transitively() { +#[tokio::test] +async fn fragment_spread_on_client_interface_transitively() { let input = include_str!("client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface_transitively.graphql"); let expected = include_str!("client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface_transitively.expected"); - test_fixture(transform_fixture, "fragment_spread_on_client_interface_transitively.graphql", "client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface_transitively.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_spread_on_client_interface_transitively.graphql", "client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface_transitively.expected", input, expected).await; } -#[test] -fn fragment_spread_on_client_union() { +#[tokio::test] +async fn fragment_spread_on_client_union() { let input = include_str!("client_extensions_abstract_types/fixtures/fragment_spread_on_client_union.graphql"); let expected = include_str!("client_extensions_abstract_types/fixtures/fragment_spread_on_client_union.expected"); - test_fixture(transform_fixture, "fragment_spread_on_client_union.graphql", "client_extensions_abstract_types/fixtures/fragment_spread_on_client_union.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_spread_on_client_union.graphql", "client_extensions_abstract_types/fixtures/fragment_spread_on_client_union.expected", input, expected).await; } -#[test] -fn inline_fragment_on_client_interface() { +#[tokio::test] +async fn inline_fragment_on_client_interface() { let input = include_str!("client_extensions_abstract_types/fixtures/inline_fragment_on_client_interface.graphql"); let expected = include_str!("client_extensions_abstract_types/fixtures/inline_fragment_on_client_interface.expected"); - test_fixture(transform_fixture, "inline_fragment_on_client_interface.graphql", "client_extensions_abstract_types/fixtures/inline_fragment_on_client_interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline_fragment_on_client_interface.graphql", "client_extensions_abstract_types/fixtures/inline_fragment_on_client_interface.expected", input, expected).await; } diff --git a/compiler/crates/relay-codegen/tests/client_extensions_test.rs b/compiler/crates/relay-codegen/tests/client_extensions_test.rs index 41289fa5fc4b5..eaa9dda3f5337 100644 --- a/compiler/crates/relay-codegen/tests/client_extensions_test.rs +++ b/compiler/crates/relay-codegen/tests/client_extensions_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<9a35afd125f7a2bbdee5bdb73d9c90f2>> + * @generated SignedSource<<1845a10ed61ee33195d95fa1bd54f913>> */ mod client_extensions; @@ -12,30 +12,30 @@ mod client_extensions; use client_extensions::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn client_conditions() { +#[tokio::test] +async fn client_conditions() { let input = include_str!("client_extensions/fixtures/client-conditions.graphql"); let expected = include_str!("client_extensions/fixtures/client-conditions.expected"); - test_fixture(transform_fixture, "client-conditions.graphql", "client_extensions/fixtures/client-conditions.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-conditions.graphql", "client_extensions/fixtures/client-conditions.expected", input, expected).await; } -#[test] -fn client_fields_in_inline_fragments() { +#[tokio::test] +async fn client_fields_in_inline_fragments() { let input = include_str!("client_extensions/fixtures/client-fields-in-inline-fragments.graphql"); let expected = include_str!("client_extensions/fixtures/client-fields-in-inline-fragments.expected"); - test_fixture(transform_fixture, "client-fields-in-inline-fragments.graphql", "client_extensions/fixtures/client-fields-in-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields-in-inline-fragments.graphql", "client_extensions/fixtures/client-fields-in-inline-fragments.expected", input, expected).await; } -#[test] -fn client_fields_on_roots() { +#[tokio::test] +async fn client_fields_on_roots() { let input = include_str!("client_extensions/fixtures/client-fields-on-roots.graphql"); let expected = include_str!("client_extensions/fixtures/client-fields-on-roots.expected"); - test_fixture(transform_fixture, "client-fields-on-roots.graphql", "client_extensions/fixtures/client-fields-on-roots.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields-on-roots.graphql", "client_extensions/fixtures/client-fields-on-roots.expected", input, expected).await; } -#[test] -fn sibling_client_selections() { +#[tokio::test] +async fn sibling_client_selections() { let input = include_str!("client_extensions/fixtures/sibling-client-selections.graphql"); let expected = include_str!("client_extensions/fixtures/sibling-client-selections.expected"); - test_fixture(transform_fixture, "sibling-client-selections.graphql", "client_extensions/fixtures/sibling-client-selections.expected", input, expected); + test_fixture(transform_fixture, file!(), "sibling-client-selections.graphql", "client_extensions/fixtures/sibling-client-selections.expected", input, expected).await; } diff --git a/compiler/crates/relay-codegen/tests/connections.rs b/compiler/crates/relay-codegen/tests/connections.rs new file mode 100644 index 0000000000000..9d7bdebf6185a --- /dev/null +++ b/compiler/crates/relay-codegen/tests/connections.rs @@ -0,0 +1,85 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::FragmentDefinition; +use graphql_ir::FragmentDefinitionName; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_codegen::build_request_params; +use relay_codegen::JsModuleFormat; +use relay_codegen::Printer; +use relay_config::DeferStreamInterface; +use relay_config::ProjectConfig; +use relay_test_schema::get_test_schema; +use relay_transforms::transform_connections; +use relay_transforms::validate_connections; +use relay_transforms::ConnectionInterface; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let project_config = ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }; + let mut printer = Printer::with_dedupe(&project_config); + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let schema = get_test_schema(); + + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir = build(&schema, &ast.definitions) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let program = Program::from_definitions(Arc::clone(&schema), ir); + + let connection_interface = ConnectionInterface::default(); + let defer_stream_interface = DeferStreamInterface::default(); + + validate_connections(&program, &connection_interface) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let next_program = + transform_connections(&program, &connection_interface, &defer_stream_interface); + + let mut printed = next_program + .operations() + .map(|def| { + let operation_fragment = FragmentDefinition { + name: def.name.map(|x| FragmentDefinitionName(x.0)), + variable_definitions: def.variable_definitions.clone(), + selections: def.selections.clone(), + used_global_variables: Default::default(), + directives: def.directives.clone(), + type_condition: def.type_, + }; + let request_parameters = build_request_params(def); + let mut import_statements = Default::default(); + let request = printer.print_request( + &schema, + def, + &operation_fragment, + request_parameters, + &mut import_statements, + ); + format!("{}{}", import_statements, request) + }) + .collect::>(); + let mut import_statements = Default::default(); + for def in next_program.fragments() { + printed.push(printer.print_fragment(&schema, def, &mut import_statements)); + } + if !import_statements.is_empty() { + printed.push(import_statements.to_string()) + } + printed.sort(); + Ok(printed.join("\n\n")) +} diff --git a/compiler/crates/relay-codegen/tests/connections/mod.rs b/compiler/crates/relay-codegen/tests/connections/mod.rs deleted file mode 100644 index 130d7ffa80071..0000000000000 --- a/compiler/crates/relay-codegen/tests/connections/mod.rs +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::FragmentDefinition; -use graphql_ir::FragmentDefinitionName; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_codegen::build_request_params; -use relay_codegen::JsModuleFormat; -use relay_codegen::Printer; -use relay_config::ProjectConfig; -use relay_test_schema::get_test_schema; -use relay_transforms::transform_connections; -use relay_transforms::validate_connections; -use relay_transforms::ConnectionInterface; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let project_config = ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }; - let mut printer = Printer::with_dedupe(&project_config); - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let schema = get_test_schema(); - - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir = build(&schema, &ast.definitions) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let program = Program::from_definitions(Arc::clone(&schema), ir); - - let connection_interface = ConnectionInterface::default(); - - validate_connections(&program, &connection_interface) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let next_program = transform_connections(&program, &connection_interface); - - let mut printed = next_program - .operations() - .map(|def| { - let operation_fragment = FragmentDefinition { - name: def.name.map(|x| FragmentDefinitionName(x.0)), - variable_definitions: def.variable_definitions.clone(), - selections: def.selections.clone(), - used_global_variables: Default::default(), - directives: def.directives.clone(), - type_condition: def.type_, - }; - let request_parameters = build_request_params(def); - let mut import_statements = Default::default(); - let request = printer.print_request( - &schema, - def, - &operation_fragment, - request_parameters, - &mut import_statements, - ); - format!("{}{}", import_statements, request) - }) - .collect::>(); - let mut import_statements = Default::default(); - for def in next_program.fragments() { - printed.push(printer.print_fragment(&schema, def, &mut import_statements)); - } - if !import_statements.is_empty() { - printed.push(import_statements.to_string()) - } - printed.sort(); - Ok(printed.join("\n\n")) -} diff --git a/compiler/crates/relay-codegen/tests/connections_test.rs b/compiler/crates/relay-codegen/tests/connections_test.rs index 76c6139ea43f2..63311328cfcae 100644 --- a/compiler/crates/relay-codegen/tests/connections_test.rs +++ b/compiler/crates/relay-codegen/tests/connections_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<44a8c537a98a80a74d430701a2fb3bd8>> + * @generated SignedSource<> */ mod connections; @@ -12,65 +12,65 @@ mod connections; use connections::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn connection() { +#[tokio::test] +async fn connection() { let input = include_str!("connections/fixtures/connection.graphql"); let expected = include_str!("connections/fixtures/connection.expected"); - test_fixture(transform_fixture, "connection.graphql", "connections/fixtures/connection.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection.graphql", "connections/fixtures/connection.expected", input, expected).await; } -#[test] -fn connection_directions() { +#[tokio::test] +async fn connection_directions() { let input = include_str!("connections/fixtures/connection-directions.graphql"); let expected = include_str!("connections/fixtures/connection-directions.expected"); - test_fixture(transform_fixture, "connection-directions.graphql", "connections/fixtures/connection-directions.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-directions.graphql", "connections/fixtures/connection-directions.expected", input, expected).await; } -#[test] -fn connection_empty_filters() { +#[tokio::test] +async fn connection_empty_filters() { let input = include_str!("connections/fixtures/connection-empty-filters.graphql"); let expected = include_str!("connections/fixtures/connection-empty-filters.expected"); - test_fixture(transform_fixture, "connection-empty-filters.graphql", "connections/fixtures/connection-empty-filters.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-empty-filters.graphql", "connections/fixtures/connection-empty-filters.expected", input, expected).await; } -#[test] -fn connection_filters() { +#[tokio::test] +async fn connection_filters() { let input = include_str!("connections/fixtures/connection-filters.graphql"); let expected = include_str!("connections/fixtures/connection-filters.expected"); - test_fixture(transform_fixture, "connection-filters.graphql", "connections/fixtures/connection-filters.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-filters.graphql", "connections/fixtures/connection-filters.expected", input, expected).await; } -#[test] -fn connection_generate_filters() { +#[tokio::test] +async fn connection_generate_filters() { let input = include_str!("connections/fixtures/connection-generate-filters.graphql"); let expected = include_str!("connections/fixtures/connection-generate-filters.expected"); - test_fixture(transform_fixture, "connection-generate-filters.graphql", "connections/fixtures/connection-generate-filters.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-generate-filters.graphql", "connections/fixtures/connection-generate-filters.expected", input, expected).await; } -#[test] -fn connection_with_aliased_edges_page_info() { +#[tokio::test] +async fn connection_with_aliased_edges_page_info() { let input = include_str!("connections/fixtures/connection-with-aliased-edges-page-info.graphql"); let expected = include_str!("connections/fixtures/connection-with-aliased-edges-page-info.expected"); - test_fixture(transform_fixture, "connection-with-aliased-edges-page-info.graphql", "connections/fixtures/connection-with-aliased-edges-page-info.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-aliased-edges-page-info.graphql", "connections/fixtures/connection-with-aliased-edges-page-info.expected", input, expected).await; } -#[test] -fn connection_with_custom_handler() { +#[tokio::test] +async fn connection_with_custom_handler() { let input = include_str!("connections/fixtures/connection-with-custom-handler.graphql"); let expected = include_str!("connections/fixtures/connection-with-custom-handler.expected"); - test_fixture(transform_fixture, "connection-with-custom-handler.graphql", "connections/fixtures/connection-with-custom-handler.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-custom-handler.graphql", "connections/fixtures/connection-with-custom-handler.expected", input, expected).await; } -#[test] -fn connection_with_page_info() { +#[tokio::test] +async fn connection_with_page_info() { let input = include_str!("connections/fixtures/connection-with-page-info.graphql"); let expected = include_str!("connections/fixtures/connection-with-page-info.expected"); - test_fixture(transform_fixture, "connection-with-page-info.graphql", "connections/fixtures/connection-with-page-info.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-page-info.graphql", "connections/fixtures/connection-with-page-info.expected", input, expected).await; } -#[test] -fn connection_with_variables() { +#[tokio::test] +async fn connection_with_variables() { let input = include_str!("connections/fixtures/connection-with-variables.graphql"); let expected = include_str!("connections/fixtures/connection-with-variables.expected"); - test_fixture(transform_fixture, "connection-with-variables.graphql", "connections/fixtures/connection-with-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-variables.graphql", "connections/fixtures/connection-with-variables.expected", input, expected).await; } diff --git a/compiler/crates/relay-codegen/tests/deduped_json_codegen.rs b/compiler/crates/relay-codegen/tests/deduped_json_codegen.rs new file mode 100644 index 0000000000000..845777078dd04 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/deduped_json_codegen.rs @@ -0,0 +1,59 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_syntax::parse_executable; +use relay_codegen::JsModuleFormat; +use relay_codegen::Printer; +use relay_config::ProjectConfig; +use relay_test_schema::TEST_SCHEMA; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let project_config = ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }; + let mut printer = Printer::with_dedupe(&project_config); + let ast = parse_executable( + fixture.content, + SourceLocationKey::standalone(fixture.file_name), + ) + .unwrap(); + build(&TEST_SCHEMA, &ast.definitions) + .map(|definitions| { + definitions + .iter() + .map(|def| match def { + graphql_ir::ExecutableDefinition::Operation(operation) => { + let mut import_statements = Default::default(); + let operation = printer.print_operation( + &TEST_SCHEMA, + operation, + &mut import_statements, + ); + format!("Operation:\n{}{}\n", import_statements, operation,) + } + graphql_ir::ExecutableDefinition::Fragment(fragment) => { + let mut import_statements = Default::default(); + let fragment = + printer.print_fragment(&TEST_SCHEMA, fragment, &mut import_statements); + format!("Fragment:\n{}{}\n", import_statements, fragment) + } + }) + .collect::>() + .join("\n\n") + }) + .map_err(|errors| { + errors + .into_iter() + .map(|error| format!("{:?}", error)) + .collect::>() + .join("\n\n") + }) +} diff --git a/compiler/crates/relay-codegen/tests/deduped_json_codegen/mod.rs b/compiler/crates/relay-codegen/tests/deduped_json_codegen/mod.rs deleted file mode 100644 index f5c441f461772..0000000000000 --- a/compiler/crates/relay-codegen/tests/deduped_json_codegen/mod.rs +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_syntax::parse_executable; -use relay_codegen::JsModuleFormat; -use relay_codegen::Printer; -use relay_config::ProjectConfig; -use relay_test_schema::TEST_SCHEMA; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let project_config = ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }; - let mut printer = Printer::with_dedupe(&project_config); - let ast = parse_executable( - fixture.content, - SourceLocationKey::standalone(fixture.file_name), - ) - .unwrap(); - build(&TEST_SCHEMA, &ast.definitions) - .map(|definitions| { - definitions - .iter() - .map(|def| match def { - graphql_ir::ExecutableDefinition::Operation(operation) => { - let mut import_statements = Default::default(); - let operation = printer.print_operation( - &TEST_SCHEMA, - operation, - &mut import_statements, - ); - format!("Operation:\n{}{}\n", import_statements, operation,) - } - graphql_ir::ExecutableDefinition::Fragment(fragment) => { - let mut import_statements = Default::default(); - let fragment = - printer.print_fragment(&TEST_SCHEMA, fragment, &mut import_statements); - format!("Fragment:\n{}{}\n", import_statements, fragment) - } - }) - .collect::>() - .join("\n\n") - }) - .map_err(|errors| { - errors - .into_iter() - .map(|error| format!("{:?}", error)) - .collect::>() - .join("\n\n") - }) -} diff --git a/compiler/crates/relay-codegen/tests/deduped_json_codegen_test.rs b/compiler/crates/relay-codegen/tests/deduped_json_codegen_test.rs index 8a7b1539244aa..a6838bd58633d 100644 --- a/compiler/crates/relay-codegen/tests/deduped_json_codegen_test.rs +++ b/compiler/crates/relay-codegen/tests/deduped_json_codegen_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<8c1491751a56c21d043d0a431f86d2cb>> + * @generated SignedSource<<0b25da3483365b2f5061cc5e7f9d72b7>> */ mod deduped_json_codegen; @@ -12,23 +12,23 @@ mod deduped_json_codegen; use deduped_json_codegen::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn kitchen_sink() { +#[tokio::test] +async fn kitchen_sink() { let input = include_str!("deduped_json_codegen/fixtures/kitchen-sink.graphql"); let expected = include_str!("deduped_json_codegen/fixtures/kitchen-sink.expected"); - test_fixture(transform_fixture, "kitchen-sink.graphql", "deduped_json_codegen/fixtures/kitchen-sink.expected", input, expected); + test_fixture(transform_fixture, file!(), "kitchen-sink.graphql", "deduped_json_codegen/fixtures/kitchen-sink.expected", input, expected).await; } -#[test] -fn stable_literals() { +#[tokio::test] +async fn stable_literals() { let input = include_str!("deduped_json_codegen/fixtures/stable-literals.graphql"); let expected = include_str!("deduped_json_codegen/fixtures/stable-literals.expected"); - test_fixture(transform_fixture, "stable-literals.graphql", "deduped_json_codegen/fixtures/stable-literals.expected", input, expected); + test_fixture(transform_fixture, file!(), "stable-literals.graphql", "deduped_json_codegen/fixtures/stable-literals.expected", input, expected).await; } -#[test] -fn stable_literals_duplicates() { +#[tokio::test] +async fn stable_literals_duplicates() { let input = include_str!("deduped_json_codegen/fixtures/stable-literals-duplicates.graphql"); let expected = include_str!("deduped_json_codegen/fixtures/stable-literals-duplicates.expected"); - test_fixture(transform_fixture, "stable-literals-duplicates.graphql", "deduped_json_codegen/fixtures/stable-literals-duplicates.expected", input, expected); + test_fixture(transform_fixture, file!(), "stable-literals-duplicates.graphql", "deduped_json_codegen/fixtures/stable-literals-duplicates.expected", input, expected).await; } diff --git a/compiler/crates/relay-codegen/tests/defer_stream.rs b/compiler/crates/relay-codegen/tests/defer_stream.rs new file mode 100644 index 0000000000000..e8a51a97b938e --- /dev/null +++ b/compiler/crates/relay-codegen/tests/defer_stream.rs @@ -0,0 +1,67 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use relay_codegen::print_fragment; +use relay_codegen::print_operation; +use relay_codegen::JsModuleFormat; +use relay_config::DeferStreamInterface; +use relay_config::ProjectConfig; +use relay_test_schema::get_test_schema; +use relay_transforms::sort_selections; +use relay_transforms::transform_defer_stream; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let ast = parse_executable( + fixture.content, + SourceLocationKey::standalone(fixture.file_name), + ) + .unwrap(); + let schema = get_test_schema(); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + let defer_stream_interface = DeferStreamInterface::default(); + let next_program = + sort_selections(&transform_defer_stream(&program, &defer_stream_interface).unwrap()); + let mut result = next_program + .fragments() + .map(|def| { + let mut import_statements = Default::default(); + let fragment = print_fragment( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, fragment) + }) + .chain(next_program.operations().map(|def| { + let mut import_statements = Default::default(); + let operation = print_operation( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, operation) + })) + .collect::>(); + result.sort_unstable(); + Ok(result.join("\n\n")) +} diff --git a/compiler/crates/relay-codegen/tests/defer_stream/fixtures/fragment-with-stream-default-label.expected b/compiler/crates/relay-codegen/tests/defer_stream/fixtures/fragment-with-stream-default-label.expected index 95222b6649848..a741b1fce7a72 100644 --- a/compiler/crates/relay-codegen/tests/defer_stream/fixtures/fragment-with-stream-default-label.expected +++ b/compiler/crates/relay-codegen/tests/defer_stream/fixtures/fragment-with-stream-default-label.expected @@ -8,7 +8,7 @@ query QueryWithFragmentWithStream($id: ID!) { fragment FeedbackFragment on Feedback { id - actors @stream(initial_count: 1) { + actors @stream(initialCount: 1) { name } } diff --git a/compiler/crates/relay-codegen/tests/defer_stream/fixtures/fragment-with-stream-default-label.graphql b/compiler/crates/relay-codegen/tests/defer_stream/fixtures/fragment-with-stream-default-label.graphql index 23fa23b3e8e43..739db7de64c6a 100644 --- a/compiler/crates/relay-codegen/tests/defer_stream/fixtures/fragment-with-stream-default-label.graphql +++ b/compiler/crates/relay-codegen/tests/defer_stream/fixtures/fragment-with-stream-default-label.graphql @@ -7,7 +7,7 @@ query QueryWithFragmentWithStream($id: ID!) { fragment FeedbackFragment on Feedback { id - actors @stream(initial_count: 1) { + actors @stream(initialCount: 1) { name } } diff --git a/compiler/crates/relay-codegen/tests/defer_stream/mod.rs b/compiler/crates/relay-codegen/tests/defer_stream/mod.rs deleted file mode 100644 index 839bf1c051b58..0000000000000 --- a/compiler/crates/relay-codegen/tests/defer_stream/mod.rs +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use relay_codegen::print_fragment; -use relay_codegen::print_operation; -use relay_codegen::JsModuleFormat; -use relay_config::ProjectConfig; -use relay_test_schema::get_test_schema; -use relay_transforms::sort_selections; -use relay_transforms::transform_defer_stream; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let ast = parse_executable( - fixture.content, - SourceLocationKey::standalone(fixture.file_name), - ) - .unwrap(); - let schema = get_test_schema(); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - let next_program = sort_selections(&transform_defer_stream(&program).unwrap()); - let mut result = next_program - .fragments() - .map(|def| { - let mut import_statements = Default::default(); - let fragment = print_fragment( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, fragment) - }) - .chain(next_program.operations().map(|def| { - let mut import_statements = Default::default(); - let operation = print_operation( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, operation) - })) - .collect::>(); - result.sort_unstable(); - Ok(result.join("\n\n")) -} diff --git a/compiler/crates/relay-codegen/tests/defer_stream_test.rs b/compiler/crates/relay-codegen/tests/defer_stream_test.rs index 4d60225530a67..061bbebe2cda3 100644 --- a/compiler/crates/relay-codegen/tests/defer_stream_test.rs +++ b/compiler/crates/relay-codegen/tests/defer_stream_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<1202650c9e42a6b47175ff4550f58ab5>> */ mod defer_stream; @@ -12,16 +12,16 @@ mod defer_stream; use defer_stream::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn fragment_with_defer_default_label() { +#[tokio::test] +async fn fragment_with_defer_default_label() { let input = include_str!("defer_stream/fixtures/fragment-with-defer-default-label.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-defer-default-label.expected"); - test_fixture(transform_fixture, "fragment-with-defer-default-label.graphql", "defer_stream/fixtures/fragment-with-defer-default-label.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-defer-default-label.graphql", "defer_stream/fixtures/fragment-with-defer-default-label.expected", input, expected).await; } -#[test] -fn fragment_with_stream_default_label() { +#[tokio::test] +async fn fragment_with_stream_default_label() { let input = include_str!("defer_stream/fixtures/fragment-with-stream-default-label.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-stream-default-label.expected"); - test_fixture(transform_fixture, "fragment-with-stream-default-label.graphql", "defer_stream/fixtures/fragment-with-stream-default-label.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-stream-default-label.graphql", "defer_stream/fixtures/fragment-with-stream-default-label.expected", input, expected).await; } diff --git a/compiler/crates/relay-codegen/tests/json_codegen.rs b/compiler/crates/relay-codegen/tests/json_codegen.rs new file mode 100644 index 0000000000000..e2f1861b73d46 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/json_codegen.rs @@ -0,0 +1,67 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::ExecutableDefinition; +use graphql_syntax::parse_executable; +use relay_codegen::print_fragment; +use relay_codegen::print_operation; +use relay_codegen::JsModuleFormat; +use relay_config::ProjectConfig; +use relay_test_schema::TEST_SCHEMA; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let ast = parse_executable( + fixture.content, + SourceLocationKey::standalone(fixture.file_name), + ) + .unwrap(); + build(&TEST_SCHEMA, &ast.definitions) + .map(|definitions| { + definitions + .iter() + .map(|def| match def { + ExecutableDefinition::Operation(operation) => { + let mut import_statements = Default::default(); + let operation = print_operation( + &TEST_SCHEMA, + operation, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, operation) + } + ExecutableDefinition::Fragment(fragment) => { + let mut import_statements = Default::default(); + let fragment = print_fragment( + &TEST_SCHEMA, + fragment, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, fragment) + } + }) + .collect::>() + .join("\n\n") + }) + .map_err(|errors| { + errors + .into_iter() + .map(|error| format!("{:?}", error)) + .collect::>() + .join("\n\n") + }) +} diff --git a/compiler/crates/relay-codegen/tests/json_codegen/mod.rs b/compiler/crates/relay-codegen/tests/json_codegen/mod.rs deleted file mode 100644 index 051f5beafa7e0..0000000000000 --- a/compiler/crates/relay-codegen/tests/json_codegen/mod.rs +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::ExecutableDefinition; -use graphql_syntax::parse_executable; -use relay_codegen::print_fragment; -use relay_codegen::print_operation; -use relay_codegen::JsModuleFormat; -use relay_config::ProjectConfig; -use relay_test_schema::TEST_SCHEMA; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let ast = parse_executable( - fixture.content, - SourceLocationKey::standalone(fixture.file_name), - ) - .unwrap(); - build(&TEST_SCHEMA, &ast.definitions) - .map(|definitions| { - definitions - .iter() - .map(|def| match def { - ExecutableDefinition::Operation(operation) => { - let mut import_statements = Default::default(); - let operation = print_operation( - &TEST_SCHEMA, - operation, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, operation) - } - ExecutableDefinition::Fragment(fragment) => { - let mut import_statements = Default::default(); - let fragment = print_fragment( - &TEST_SCHEMA, - fragment, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, fragment) - } - }) - .collect::>() - .join("\n\n") - }) - .map_err(|errors| { - errors - .into_iter() - .map(|error| format!("{:?}", error)) - .collect::>() - .join("\n\n") - }) -} diff --git a/compiler/crates/relay-codegen/tests/json_codegen_test.rs b/compiler/crates/relay-codegen/tests/json_codegen_test.rs index 61d541a96ed7e..2f96a4f8564c1 100644 --- a/compiler/crates/relay-codegen/tests/json_codegen_test.rs +++ b/compiler/crates/relay-codegen/tests/json_codegen_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<98d370c271590bbbce69074868ae68ae>> */ mod json_codegen; @@ -12,16 +12,16 @@ mod json_codegen; use json_codegen::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn kitchen_sink() { +#[tokio::test] +async fn kitchen_sink() { let input = include_str!("json_codegen/fixtures/kitchen-sink.graphql"); let expected = include_str!("json_codegen/fixtures/kitchen-sink.expected"); - test_fixture(transform_fixture, "kitchen-sink.graphql", "json_codegen/fixtures/kitchen-sink.expected", input, expected); + test_fixture(transform_fixture, file!(), "kitchen-sink.graphql", "json_codegen/fixtures/kitchen-sink.expected", input, expected).await; } -#[test] -fn stable_literals() { +#[tokio::test] +async fn stable_literals() { let input = include_str!("json_codegen/fixtures/stable-literals.graphql"); let expected = include_str!("json_codegen/fixtures/stable-literals.expected"); - test_fixture(transform_fixture, "stable-literals.graphql", "json_codegen/fixtures/stable-literals.expected", input, expected); + test_fixture(transform_fixture, file!(), "stable-literals.graphql", "json_codegen/fixtures/stable-literals.expected", input, expected).await; } diff --git a/compiler/crates/relay-codegen/tests/react_flight_codegen/fixtures/flight-props.expected b/compiler/crates/relay-codegen/tests/react_flight_codegen/fixtures/flight-props.expected deleted file mode 100644 index 904b3e9aae5cd..0000000000000 --- a/compiler/crates/relay-codegen/tests/react_flight_codegen/fixtures/flight-props.expected +++ /dev/null @@ -1,109 +0,0 @@ -==================================== INPUT ==================================== -query FlightQuery($cond: Boolean!, $count: Int!, $id: ID!) { - node(id: $id) { - ... on Story { - id - extension_field(condition: $cond, count: $count, id: $id) - } - } -} - -# %extensions% -directive @react_flight_component(name: String!) on FIELD_DEFINITION - -extend type Story { - extension_field( - condition: Boolean! - count: Int! - id: ID! - ): ReactFlightComponent @react_flight_component(name: "ExtensionField.server") -} -==================================== OUTPUT =================================== -{ - "argumentDefinitions": [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "cond" - }, - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "count" - }, - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } - ], - "kind": "Operation", - "name": "FlightQuery", - "selections": [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } - ], - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "kind": "InlineFragment", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - }, - { - "alias": "extension_field", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "ExtensionField.server" - }, - { - "fields": [ - { - "kind": "Variable", - "name": "condition", - "variableName": "cond" - }, - { - "kind": "Variable", - "name": "count", - "variableName": "count" - }, - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } - ], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": null - } - ], - "type": "Story", - "abstractKey": null - } - ], - "storageKey": null - } - ] -} diff --git a/compiler/crates/relay-codegen/tests/react_flight_codegen/fixtures/flight-props.graphql b/compiler/crates/relay-codegen/tests/react_flight_codegen/fixtures/flight-props.graphql deleted file mode 100644 index 3a56bd589d195..0000000000000 --- a/compiler/crates/relay-codegen/tests/react_flight_codegen/fixtures/flight-props.graphql +++ /dev/null @@ -1,19 +0,0 @@ -query FlightQuery($cond: Boolean!, $count: Int!, $id: ID!) { - node(id: $id) { - ... on Story { - id - extension_field(condition: $cond, count: $count, id: $id) - } - } -} - -# %extensions% -directive @react_flight_component(name: String!) on FIELD_DEFINITION - -extend type Story { - extension_field( - condition: Boolean! - count: Int! - id: ID! - ): ReactFlightComponent @react_flight_component(name: "ExtensionField.server") -} diff --git a/compiler/crates/relay-codegen/tests/react_flight_codegen/fixtures/flight.invalid.expected b/compiler/crates/relay-codegen/tests/react_flight_codegen/fixtures/flight.invalid.expected deleted file mode 100644 index c2b500cfa1fe0..0000000000000 --- a/compiler/crates/relay-codegen/tests/react_flight_codegen/fixtures/flight.invalid.expected +++ /dev/null @@ -1,30 +0,0 @@ -==================================== INPUT ==================================== -# expected-to-throw - -query FlightQuery($cond: Boolean!, $count: Int!, $id: ID!) { - node(id: $id) { - ... on User { - id - extension_field(condition: $cond, count: $count, id: $id) - } - } -} - -# %extensions% -directive @react_flight_component(name: String!) on FIELD_DEFINITION - -extend type User { - extension_field( - condition: Boolean! - count: Int! - id: ID! - ): ReactFlightComponent @react_flight_component(name: "ExtensionField.server") -} -==================================== ERROR ==================================== -✖︎ Cannot query field 'extension_field', this type does not define a 'flight' field - - flight.invalid.graphql:7:7 - 6 │ id - 7 │ extension_field(condition: $cond, count: $count, id: $id) - │ ^^^^^^^^^^^^^^^ - 8 │ } diff --git a/compiler/crates/relay-codegen/tests/react_flight_codegen/fixtures/flight.invalid.graphql b/compiler/crates/relay-codegen/tests/react_flight_codegen/fixtures/flight.invalid.graphql deleted file mode 100644 index 535504bf25d76..0000000000000 --- a/compiler/crates/relay-codegen/tests/react_flight_codegen/fixtures/flight.invalid.graphql +++ /dev/null @@ -1,21 +0,0 @@ -# expected-to-throw - -query FlightQuery($cond: Boolean!, $count: Int!, $id: ID!) { - node(id: $id) { - ... on User { - id - extension_field(condition: $cond, count: $count, id: $id) - } - } -} - -# %extensions% -directive @react_flight_component(name: String!) on FIELD_DEFINITION - -extend type User { - extension_field( - condition: Boolean! - count: Int! - id: ID! - ): ReactFlightComponent @react_flight_component(name: "ExtensionField.server") -} diff --git a/compiler/crates/relay-codegen/tests/react_flight_codegen/mod.rs b/compiler/crates/relay-codegen/tests/react_flight_codegen/mod.rs deleted file mode 100644 index 8d994b6e3400f..0000000000000 --- a/compiler/crates/relay-codegen/tests/react_flight_codegen/mod.rs +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_codegen::print_fragment; -use relay_codegen::print_operation; -use relay_codegen::JsModuleFormat; -use relay_config::ProjectConfig; -use relay_test_schema::get_test_schema; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::react_flight; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - let (base, schema) = match parts.as_slice() { - [base, extensions] => (base, get_test_schema_with_extensions(extensions)), - [base] => (base, get_test_schema()), - _ => panic!("Invalid fixture input {}", fixture.content), - }; - - let ast = parse_executable(base, SourceLocationKey::standalone(fixture.file_name)).unwrap(); - let ir = build(&schema, &ast.definitions) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - let program = Program::from_definitions(Arc::clone(&schema), ir); - - react_flight(&program) - .map(|next_program| { - next_program - .fragments() - .map(|def| { - let mut import_statements = Default::default(); - let fragment = print_fragment( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, fragment) - }) - .chain(next_program.operations().map(|def| { - let mut import_statements = Default::default(); - let operation = print_operation( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, operation) - })) - .collect::>() - .join("\n\n") - }) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) -} diff --git a/compiler/crates/relay-codegen/tests/react_flight_codegen_test.rs b/compiler/crates/relay-codegen/tests/react_flight_codegen_test.rs deleted file mode 100644 index cc95a836af42f..0000000000000 --- a/compiler/crates/relay-codegen/tests/react_flight_codegen_test.rs +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @generated SignedSource<<569f1efa26994d3ed3c24325d2914203>> - */ - -mod react_flight_codegen; - -use react_flight_codegen::transform_fixture; -use fixture_tests::test_fixture; - -#[test] -fn flight_invalid() { - let input = include_str!("react_flight_codegen/fixtures/flight.invalid.graphql"); - let expected = include_str!("react_flight_codegen/fixtures/flight.invalid.expected"); - test_fixture(transform_fixture, "flight.invalid.graphql", "react_flight_codegen/fixtures/flight.invalid.expected", input, expected); -} - -#[test] -fn flight_props() { - let input = include_str!("react_flight_codegen/fixtures/flight-props.graphql"); - let expected = include_str!("react_flight_codegen/fixtures/flight-props.expected"); - test_fixture(transform_fixture, "flight-props.graphql", "react_flight_codegen/fixtures/flight-props.expected", input, expected); -} diff --git a/compiler/crates/relay-codegen/tests/relay_actor_change.rs b/compiler/crates/relay-codegen/tests/relay_actor_change.rs new file mode 100644 index 0000000000000..f1bab63be9a56 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/relay_actor_change.rs @@ -0,0 +1,66 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::FeatureFlag; +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_codegen::print_fragment; +use relay_codegen::print_operation; +use relay_codegen::JsModuleFormat; +use relay_config::ProjectConfig; +use relay_test_schema::get_test_schema; +use relay_transforms::relay_actor_change_transform; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let ast = parse_executable( + fixture.content, + SourceLocationKey::standalone(fixture.file_name), + ) + .unwrap(); + let schema = get_test_schema(); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + let next_program = relay_actor_change_transform(&program, &FeatureFlag::Enabled) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let mut result = next_program + .fragments() + .map(|def| { + let mut import_statements = Default::default(); + let fragment = print_fragment( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, fragment) + }) + .chain(next_program.operations().map(|def| { + let mut import_statements = Default::default(); + let operation = print_operation( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, operation) + })) + .collect::>(); + result.sort_unstable(); + Ok(result.join("\n\n")) +} diff --git a/compiler/crates/relay-codegen/tests/relay_actor_change/mod.rs b/compiler/crates/relay-codegen/tests/relay_actor_change/mod.rs deleted file mode 100644 index e859221cbed21..0000000000000 --- a/compiler/crates/relay-codegen/tests/relay_actor_change/mod.rs +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::FeatureFlag; -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_codegen::print_fragment; -use relay_codegen::print_operation; -use relay_codegen::JsModuleFormat; -use relay_config::ProjectConfig; -use relay_test_schema::get_test_schema; -use relay_transforms::relay_actor_change_transform; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let ast = parse_executable( - fixture.content, - SourceLocationKey::standalone(fixture.file_name), - ) - .unwrap(); - let schema = get_test_schema(); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - let next_program = relay_actor_change_transform(&program, &FeatureFlag::Enabled) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - let mut result = next_program - .fragments() - .map(|def| { - let mut import_statements = Default::default(); - let fragment = print_fragment( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, fragment) - }) - .chain(next_program.operations().map(|def| { - let mut import_statements = Default::default(); - let operation = print_operation( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, operation) - })) - .collect::>(); - result.sort_unstable(); - Ok(result.join("\n\n")) -} diff --git a/compiler/crates/relay-codegen/tests/relay_actor_change_test.rs b/compiler/crates/relay-codegen/tests/relay_actor_change_test.rs index 7d9b7e7ef2dae..06ddfc635e487 100644 --- a/compiler/crates/relay-codegen/tests/relay_actor_change_test.rs +++ b/compiler/crates/relay-codegen/tests/relay_actor_change_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<346e464f90b119168d614671e5aa003e>> + * @generated SignedSource<<71701182887388b05adae9f92976538d>> */ mod relay_actor_change; @@ -12,9 +12,9 @@ mod relay_actor_change; use relay_actor_change::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn relay_actor_change_simple_query() { +#[tokio::test] +async fn relay_actor_change_simple_query() { let input = include_str!("relay_actor_change/fixtures/relay_actor_change-simple-query.graphql"); let expected = include_str!("relay_actor_change/fixtures/relay_actor_change-simple-query.expected"); - test_fixture(transform_fixture, "relay_actor_change-simple-query.graphql", "relay_actor_change/fixtures/relay_actor_change-simple-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay_actor_change-simple-query.graphql", "relay_actor_change/fixtures/relay_actor_change-simple-query.expected", input, expected).await; } diff --git a/compiler/crates/relay-codegen/tests/request_metadata.rs b/compiler/crates/relay-codegen/tests/request_metadata.rs new file mode 100644 index 0000000000000..37954f51b02ef --- /dev/null +++ b/compiler/crates/relay-codegen/tests/request_metadata.rs @@ -0,0 +1,114 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::ArgumentName; +use common::DirectiveName; +use common::SourceLocationKey; +use common::WithLocation; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Argument; +use graphql_ir::ConstantValue; +use graphql_ir::Directive; +use graphql_ir::ExecutableDefinition; +use graphql_ir::FragmentDefinition; +use graphql_ir::FragmentDefinitionName; +use graphql_ir::OperationDefinition; +use graphql_ir::Value; +use graphql_syntax::parse_executable; +use intern::string_key::Intern; +use relay_codegen::build_request_params; +use relay_codegen::print_fragment; +use relay_codegen::print_request; +use relay_codegen::JsModuleFormat; +use relay_config::ProjectConfig; +use relay_test_schema::TEST_SCHEMA; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let ast = parse_executable( + fixture.content, + SourceLocationKey::standalone(fixture.file_name), + ) + .unwrap(); + let program = build(&TEST_SCHEMA, &ast.definitions); + program + .map(|definitions| { + definitions + .iter() + .map(|def| match def { + ExecutableDefinition::Operation(operation) => { + let operation = OperationDefinition { + directives: vec![Directive { + name: WithLocation::new( + operation.name.location, + DirectiveName("__metadata".intern()), + ), + arguments: vec![Argument { + name: WithLocation::new( + operation.name.location, + ArgumentName("metadataKey".intern()), + ), + value: WithLocation::new( + operation.name.location, + Value::Constant(ConstantValue::String( + "Hello world!".intern(), + )), + ), + }], + data: None, + }], + ..operation.clone() + }; + + let operation_fragment = FragmentDefinition { + name: operation.name.map(|x| FragmentDefinitionName(x.0)), + variable_definitions: operation.variable_definitions.clone(), + selections: operation.selections.clone(), + used_global_variables: Default::default(), + directives: operation.directives.clone(), + type_condition: operation.type_, + }; + let request_parameters = build_request_params(&operation); + let mut import_statements = Default::default(); + let request = print_request( + &TEST_SCHEMA, + &operation, + &operation_fragment, + request_parameters, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, request) + } + ExecutableDefinition::Fragment(fragment) => { + let mut import_statements = Default::default(); + let fragment = print_fragment( + &TEST_SCHEMA, + fragment, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, fragment) + } + }) + .collect::>() + .join("\n\n") + }) + .map_err(|errors| { + errors + .into_iter() + .map(|error| format!("{:?}", error)) + .collect::>() + .join("\n\n") + }) +} diff --git a/compiler/crates/relay-codegen/tests/request_metadata/mod.rs b/compiler/crates/relay-codegen/tests/request_metadata/mod.rs deleted file mode 100644 index 5a92c6cf5009e..0000000000000 --- a/compiler/crates/relay-codegen/tests/request_metadata/mod.rs +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::ArgumentName; -use common::DirectiveName; -use common::SourceLocationKey; -use common::WithLocation; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Argument; -use graphql_ir::ConstantValue; -use graphql_ir::Directive; -use graphql_ir::ExecutableDefinition; -use graphql_ir::FragmentDefinition; -use graphql_ir::FragmentDefinitionName; -use graphql_ir::OperationDefinition; -use graphql_ir::Value; -use graphql_syntax::parse_executable; -use intern::string_key::Intern; -use relay_codegen::build_request_params; -use relay_codegen::print_fragment; -use relay_codegen::print_request; -use relay_codegen::JsModuleFormat; -use relay_config::ProjectConfig; -use relay_test_schema::TEST_SCHEMA; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let ast = parse_executable( - fixture.content, - SourceLocationKey::standalone(fixture.file_name), - ) - .unwrap(); - let program = build(&TEST_SCHEMA, &ast.definitions); - program - .map(|definitions| { - definitions - .iter() - .map(|def| match def { - ExecutableDefinition::Operation(operation) => { - let operation = OperationDefinition { - directives: vec![Directive { - name: WithLocation::new( - operation.name.location, - DirectiveName("__metadata".intern()), - ), - arguments: vec![Argument { - name: WithLocation::new( - operation.name.location, - ArgumentName("metadataKey".intern()), - ), - value: WithLocation::new( - operation.name.location, - Value::Constant(ConstantValue::String( - "Hello world!".intern(), - )), - ), - }], - data: None, - }], - ..operation.clone() - }; - - let operation_fragment = FragmentDefinition { - name: operation.name.map(|x| FragmentDefinitionName(x.0)), - variable_definitions: operation.variable_definitions.clone(), - selections: operation.selections.clone(), - used_global_variables: Default::default(), - directives: operation.directives.clone(), - type_condition: operation.type_, - }; - let request_parameters = build_request_params(&operation); - let mut import_statements = Default::default(); - let request = print_request( - &TEST_SCHEMA, - &operation, - &operation_fragment, - request_parameters, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, request) - } - ExecutableDefinition::Fragment(fragment) => { - let mut import_statements = Default::default(); - let fragment = print_fragment( - &TEST_SCHEMA, - fragment, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, fragment) - } - }) - .collect::>() - .join("\n\n") - }) - .map_err(|errors| { - errors - .into_iter() - .map(|error| format!("{:?}", error)) - .collect::>() - .join("\n\n") - }) -} diff --git a/compiler/crates/relay-codegen/tests/request_metadata_test.rs b/compiler/crates/relay-codegen/tests/request_metadata_test.rs index 74540a7156b5d..5e3961a8de071 100644 --- a/compiler/crates/relay-codegen/tests/request_metadata_test.rs +++ b/compiler/crates/relay-codegen/tests/request_metadata_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<876b7ae96c21e9c336b8c69eab102066>> + * @generated SignedSource<<8568219a3a26f703d22be705fb299688>> */ mod request_metadata; @@ -12,9 +12,9 @@ mod request_metadata; use request_metadata::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn valid_documents() { +#[tokio::test] +async fn valid_documents() { let input = include_str!("request_metadata/fixtures/valid-documents.graphql"); let expected = include_str!("request_metadata/fixtures/valid-documents.expected"); - test_fixture(transform_fixture, "valid-documents.graphql", "request_metadata/fixtures/valid-documents.expected", input, expected); + test_fixture(transform_fixture, file!(), "valid-documents.graphql", "request_metadata/fixtures/valid-documents.expected", input, expected).await; } diff --git a/compiler/crates/relay-codegen/tests/required_directive_codegen.rs b/compiler/crates/relay-codegen/tests/required_directive_codegen.rs new file mode 100644 index 0000000000000..e7e2b284cf250 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/required_directive_codegen.rs @@ -0,0 +1,71 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_codegen::print_fragment; +use relay_codegen::print_operation; +use relay_codegen::JsModuleFormat; +use relay_config::ProjectConfig; +use relay_test_schema::get_test_schema; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::required_directive; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + let (base, schema) = match parts.as_slice() { + [base, extensions] => (base, get_test_schema_with_extensions(extensions)), + [base] => (base, get_test_schema()), + _ => panic!("Invalid fixture input {}", fixture.content), + }; + + let ast = parse_executable(base, SourceLocationKey::standalone(fixture.file_name)).unwrap(); + let ir = build(&schema, &ast.definitions) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let program = Program::from_definitions(Arc::clone(&schema), ir); + + required_directive(&program) + .map(|next_program| { + next_program + .fragments() + .map(|def| { + let mut import_statements = Default::default(); + let fragment = print_fragment( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, fragment) + }) + .chain(next_program.operations().map(|def| { + let mut import_statements = Default::default(); + let operation = print_operation( + &schema, + def, + &ProjectConfig { + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, operation) + })) + .collect::>() + .join("\n\n") + }) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) +} diff --git a/compiler/crates/relay-codegen/tests/required_directive_codegen/mod.rs b/compiler/crates/relay-codegen/tests/required_directive_codegen/mod.rs deleted file mode 100644 index dd9d9cadc0147..0000000000000 --- a/compiler/crates/relay-codegen/tests/required_directive_codegen/mod.rs +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_codegen::print_fragment; -use relay_codegen::print_operation; -use relay_codegen::JsModuleFormat; -use relay_config::ProjectConfig; -use relay_test_schema::get_test_schema; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::required_directive; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - let (base, schema) = match parts.as_slice() { - [base, extensions] => (base, get_test_schema_with_extensions(extensions)), - [base] => (base, get_test_schema()), - _ => panic!("Invalid fixture input {}", fixture.content), - }; - - let ast = parse_executable(base, SourceLocationKey::standalone(fixture.file_name)).unwrap(); - let ir = build(&schema, &ast.definitions) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - let program = Program::from_definitions(Arc::clone(&schema), ir); - - required_directive(&program) - .map(|next_program| { - next_program - .fragments() - .map(|def| { - let mut import_statements = Default::default(); - let fragment = print_fragment( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, fragment) - }) - .chain(next_program.operations().map(|def| { - let mut import_statements = Default::default(); - let operation = print_operation( - &schema, - def, - &ProjectConfig { - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, operation) - })) - .collect::>() - .join("\n\n") - }) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) -} diff --git a/compiler/crates/relay-codegen/tests/required_directive_codegen_test.rs b/compiler/crates/relay-codegen/tests/required_directive_codegen_test.rs index f2582afceea09..ebe4678ae1701 100644 --- a/compiler/crates/relay-codegen/tests/required_directive_codegen_test.rs +++ b/compiler/crates/relay-codegen/tests/required_directive_codegen_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<7da5edea64127ccd7e5801c7c251499e>> */ mod required_directive_codegen; @@ -12,16 +12,16 @@ mod required_directive_codegen; use required_directive_codegen::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn required_directive() { +#[tokio::test] +async fn required_directive() { let input = include_str!("required_directive_codegen/fixtures/required_directive.graphql"); let expected = include_str!("required_directive_codegen/fixtures/required_directive.expected"); - test_fixture(transform_fixture, "required_directive.graphql", "required_directive_codegen/fixtures/required_directive.expected", input, expected); + test_fixture(transform_fixture, file!(), "required_directive.graphql", "required_directive_codegen/fixtures/required_directive.expected", input, expected).await; } -#[test] -fn required_linked_field() { +#[tokio::test] +async fn required_linked_field() { let input = include_str!("required_directive_codegen/fixtures/required_linked_field.graphql"); let expected = include_str!("required_directive_codegen/fixtures/required_linked_field.expected"); - test_fixture(transform_fixture, "required_linked_field.graphql", "required_directive_codegen/fixtures/required_linked_field.expected", input, expected); + test_fixture(transform_fixture, file!(), "required_linked_field.graphql", "required_directive_codegen/fixtures/required_linked_field.expected", input, expected).await; } diff --git a/compiler/crates/relay-codegen/tests/skip_printing_nulls.rs b/compiler/crates/relay-codegen/tests/skip_printing_nulls.rs new file mode 100644 index 0000000000000..eaff96dccdfb0 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/skip_printing_nulls.rs @@ -0,0 +1,77 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::FeatureFlag; +use common::FeatureFlags; +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::ExecutableDefinition; +use graphql_syntax::parse_executable; +use relay_codegen::print_fragment; +use relay_codegen::print_operation; +use relay_config::ProjectConfig; +use relay_test_schema::TEST_SCHEMA; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let ast = parse_executable( + fixture.content, + SourceLocationKey::standalone(fixture.file_name), + ) + .unwrap(); + let feature_flags = FeatureFlags { + skip_printing_nulls: FeatureFlag::Enabled, + ..Default::default() + }; + + build(&TEST_SCHEMA, &ast.definitions) + .map(|definitions| { + definitions + .iter() + .map(|def| match def { + ExecutableDefinition::Operation(operation) => { + let mut import_statements = Default::default(); + + let operation = print_operation( + &TEST_SCHEMA, + operation, + &ProjectConfig { + feature_flags: Arc::new(feature_flags.clone()), + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, operation) + } + ExecutableDefinition::Fragment(fragment) => { + let mut import_statements = Default::default(); + + let fragment = print_fragment( + &TEST_SCHEMA, + fragment, + &ProjectConfig { + feature_flags: Arc::new(feature_flags.clone()), + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, fragment) + } + }) + .collect::>() + .join("\n\n") + }) + .map_err(|errors| { + errors + .into_iter() + .map(|error| format!("{:?}", error)) + .collect::>() + .join("\n\n") + }) +} diff --git a/compiler/crates/relay-codegen/tests/skip_printing_nulls/mod.rs b/compiler/crates/relay-codegen/tests/skip_printing_nulls/mod.rs deleted file mode 100644 index 942ceec108e54..0000000000000 --- a/compiler/crates/relay-codegen/tests/skip_printing_nulls/mod.rs +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::FeatureFlag; -use common::FeatureFlags; -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::ExecutableDefinition; -use graphql_syntax::parse_executable; -use relay_codegen::print_fragment; -use relay_codegen::print_operation; -use relay_config::ProjectConfig; -use relay_test_schema::TEST_SCHEMA; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let ast = parse_executable( - fixture.content, - SourceLocationKey::standalone(fixture.file_name), - ) - .unwrap(); - let feature_flags = FeatureFlags { - skip_printing_nulls: FeatureFlag::Enabled, - ..Default::default() - }; - - build(&TEST_SCHEMA, &ast.definitions) - .map(|definitions| { - definitions - .iter() - .map(|def| match def { - ExecutableDefinition::Operation(operation) => { - let mut import_statements = Default::default(); - - let operation = print_operation( - &TEST_SCHEMA, - operation, - &ProjectConfig { - feature_flags: Arc::new(feature_flags.clone()), - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, operation) - } - ExecutableDefinition::Fragment(fragment) => { - let mut import_statements = Default::default(); - - let fragment = print_fragment( - &TEST_SCHEMA, - fragment, - &ProjectConfig { - feature_flags: Arc::new(feature_flags.clone()), - ..Default::default() - }, - &mut import_statements, - ); - format!("{}{}", import_statements, fragment) - } - }) - .collect::>() - .join("\n\n") - }) - .map_err(|errors| { - errors - .into_iter() - .map(|error| format!("{:?}", error)) - .collect::>() - .join("\n\n") - }) -} diff --git a/compiler/crates/relay-codegen/tests/skip_printing_nulls_test.rs b/compiler/crates/relay-codegen/tests/skip_printing_nulls_test.rs index 8052f7903be85..afeaa0385264f 100644 --- a/compiler/crates/relay-codegen/tests/skip_printing_nulls_test.rs +++ b/compiler/crates/relay-codegen/tests/skip_printing_nulls_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<664b28507e153b2bcb16860b7d2119c3>> + * @generated SignedSource<> */ mod skip_printing_nulls; @@ -12,9 +12,9 @@ mod skip_printing_nulls; use skip_printing_nulls::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn kitchen_sink() { +#[tokio::test] +async fn kitchen_sink() { let input = include_str!("skip_printing_nulls/fixtures/kitchen-sink.graphql"); let expected = include_str!("skip_printing_nulls/fixtures/kitchen-sink.expected"); - test_fixture(transform_fixture, "kitchen-sink.graphql", "skip_printing_nulls/fixtures/kitchen-sink.expected", input, expected); + test_fixture(transform_fixture, file!(), "kitchen-sink.graphql", "skip_printing_nulls/fixtures/kitchen-sink.expected", input, expected).await; } diff --git a/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen.rs b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen.rs new file mode 100644 index 0000000000000..471cd9928f429 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen.rs @@ -0,0 +1,94 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::FeatureFlag; +use common::FeatureFlags; +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::ExecutableDefinition; +use graphql_syntax::parse_executable; +use relay_codegen::print_fragment; +use relay_codegen::print_operation; +use relay_config::ProjectConfig; +use relay_config::TypegenConfig; +use relay_test_schema::TEST_SCHEMA; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let ast = parse_executable( + fixture.content, + SourceLocationKey::standalone(fixture.file_name), + ) + .unwrap(); + let feature_flags = FeatureFlags { + skip_printing_nulls: FeatureFlag::Enabled, + ..Default::default() + }; + + build(&TEST_SCHEMA, &ast.definitions) + .map(|definitions| { + definitions + .iter() + .map(|def| match def { + ExecutableDefinition::Operation(operation) => { + let mut import_statements = Default::default(); + + let operation = print_operation( + &TEST_SCHEMA, + operation, + &ProjectConfig { + feature_flags: Arc::new(feature_flags.clone()), + typegen_config: TypegenConfig { + experimental_emit_semantic_nullability_types: fixture + .content + .contains( + "# relay:experimental_emit_semantic_nullability_types", + ), + ..Default::default() + }, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, operation) + } + ExecutableDefinition::Fragment(fragment) => { + let mut import_statements = Default::default(); + + let fragment = print_fragment( + &TEST_SCHEMA, + fragment, + &ProjectConfig { + feature_flags: Arc::new(feature_flags.clone()), + typegen_config: TypegenConfig { + experimental_emit_semantic_nullability_types: fixture + .content + .contains( + "# relay:experimental_emit_semantic_nullability_types", + ), + ..Default::default() + }, + ..Default::default() + }, + &mut import_statements, + ); + format!("{}{}", import_statements, fragment) + } + }) + .collect::>() + .join("\n\n") + }) + .map_err(|errors| { + errors + .into_iter() + .map(|error| format!("{:?}", error)) + .collect::>() + .join("\n\n") + }) +} diff --git a/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_fragment_directive.expected b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_fragment_directive.expected new file mode 100644 index 0000000000000..c579449357be0 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_fragment_directive.expected @@ -0,0 +1,28 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types + +fragment MyFragment on Node @throwOnFieldError { + id + name +} +==================================== OUTPUT =================================== +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "throwOnFieldError": true + }, + "name": "MyFragment", + "selections": [ + { + "kind": "ScalarField", + "name": "id" + }, + { + "kind": "ScalarField", + "name": "name" + } + ], + "type": "Node", + "abstractKey": "__isNode" +} diff --git a/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_fragment_directive.graphql b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_fragment_directive.graphql new file mode 100644 index 0000000000000..c099f34ce1221 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_fragment_directive.graphql @@ -0,0 +1,6 @@ +# relay:experimental_emit_semantic_nullability_types + +fragment MyFragment on Node @throwOnFieldError { + id + name +} diff --git a/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_mutation_directive.expected b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_mutation_directive.expected new file mode 100644 index 0000000000000..52dfce7ab4f96 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_mutation_directive.expected @@ -0,0 +1,11 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +# expected-to-throw + +mutation MyMutation @throwOnFieldError { + setName(name: "Alice") { + name + } +} +==================================== ERROR ==================================== +Diagnostic(DiagnosticData { message: InvalidDirectiveUsageUnsupportedLocation(DirectiveName("throwOnFieldError")), location: throw_on_field_error_mutation_directive.graphql:95:112, related_information: [], tags: [], severity: Error, data: [], machine_readable: {} }) diff --git a/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_mutation_directive.graphql b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_mutation_directive.graphql new file mode 100644 index 0000000000000..12325047e67f3 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_mutation_directive.graphql @@ -0,0 +1,8 @@ +# relay:experimental_emit_semantic_nullability_types +# expected-to-throw + +mutation MyMutation @throwOnFieldError { + setName(name: "Alice") { + name + } +} diff --git a/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_query_directive.expected b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_query_directive.expected new file mode 100644 index 0000000000000..ee4a94759d424 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_query_directive.expected @@ -0,0 +1,33 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types + +query MyQuery @throwOnFieldError { + me { + id + name + } +} +==================================== OUTPUT =================================== +{ + "argumentDefinitions": [], + "kind": "Operation", + "name": "MyQuery", + "selections": [ + { + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "kind": "ScalarField", + "name": "id" + }, + { + "kind": "ScalarField", + "name": "name" + } + ] + } + ] +} diff --git a/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_query_directive.graphql b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_query_directive.graphql new file mode 100644 index 0000000000000..af74b2a980a08 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_query_directive.graphql @@ -0,0 +1,8 @@ +# relay:experimental_emit_semantic_nullability_types + +query MyQuery @throwOnFieldError { + me { + id + name + } +} diff --git a/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen_test.rs b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen_test.rs new file mode 100644 index 0000000000000..34dae646f94f1 --- /dev/null +++ b/compiler/crates/relay-codegen/tests/throw_on_field_error_directive_codegen_test.rs @@ -0,0 +1,34 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @generated SignedSource<> + */ + +mod throw_on_field_error_directive_codegen; + +use throw_on_field_error_directive_codegen::transform_fixture; +use fixture_tests::test_fixture; + +#[tokio::test] +async fn throw_on_field_error_fragment_directive() { + let input = include_str!("throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_fragment_directive.graphql"); + let expected = include_str!("throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_fragment_directive.expected"); + test_fixture(transform_fixture, file!(), "throw_on_field_error_fragment_directive.graphql", "throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_fragment_directive.expected", input, expected).await; +} + +#[tokio::test] +async fn throw_on_field_error_mutation_directive() { + let input = include_str!("throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_mutation_directive.graphql"); + let expected = include_str!("throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_mutation_directive.expected"); + test_fixture(transform_fixture, file!(), "throw_on_field_error_mutation_directive.graphql", "throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_mutation_directive.expected", input, expected).await; +} + +#[tokio::test] +async fn throw_on_field_error_query_directive() { + let input = include_str!("throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_query_directive.graphql"); + let expected = include_str!("throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_query_directive.expected"); + test_fixture(transform_fixture, file!(), "throw_on_field_error_query_directive.graphql", "throw_on_field_error_directive_codegen/fixtures/throw_on_field_error_query_directive.expected", input, expected).await; +} diff --git a/compiler/crates/relay-compiler-playground/README.md b/compiler/crates/relay-compiler-playground/README.md index b884971b70a01..91af385139ade 100644 --- a/compiler/crates/relay-compiler-playground/README.md +++ b/compiler/crates/relay-compiler-playground/README.md @@ -52,6 +52,9 @@ yarn link cd ~/fbsource/xplat/js/RKJSModules/Libraries/Relay/oss/__github__/website yarn link relay-compiler-playground +# You may need to clear Docusaurus cache +npx docusaurus clear + # Launch the website in dev mode yarn start ``` diff --git a/compiler/crates/relay-compiler-playground/__tests__/relay_compiler_playground-test.js b/compiler/crates/relay-compiler-playground/__tests__/relay_compiler_playground-test.js index 585fa0cda780a..a0b98444195cb 100644 --- a/compiler/crates/relay-compiler-playground/__tests__/relay_compiler_playground-test.js +++ b/compiler/crates/relay-compiler-playground/__tests__/relay_compiler_playground-test.js @@ -139,7 +139,7 @@ describe('Err', () => { ); expect(actual.Err).toEqual({ ConfigError: - 'unknown field `this_key_does_not_exist`, expected one of `enable_flight_transform`, `enable_relay_resolver_transform`, `hash_supported_argument`, `no_inline`, `enable_3d_branch_arg_generation`, `actor_change_support`, `text_artifacts`, `enable_client_edges`, `enable_provided_variables`, `skip_printing_nulls`, `enable_fragment_aliases` at line 1 column 26', + 'unknown field `this_key_does_not_exist`, expected one of `enable_flight_transform`, `enable_relay_resolver_transform`, `no_inline`, `enable_3d_branch_arg_generation`, `actor_change_support`, `text_artifacts`, `enable_provided_variables`, `skip_printing_nulls`, `enable_fragment_aliases` at line 1 column 26', }); }); test('parse_to_ast', () => { diff --git a/compiler/crates/relay-compiler-playground/src/lib.rs b/compiler/crates/relay-compiler-playground/src/lib.rs index f1fe17adbac2b..8512231ebd66d 100644 --- a/compiler/crates/relay-compiler-playground/src/lib.rs +++ b/compiler/crates/relay-compiler-playground/src/lib.rs @@ -18,6 +18,7 @@ use graphql_text_printer::PrinterOptions; use intern::string_key::Intern; use relay_codegen::print_fragment; use relay_codegen::print_operation; +use relay_codegen::print_provided_variables; use relay_config::ProjectConfig; use relay_schema::build_schema_with_extensions; use relay_transforms::apply_transforms; @@ -254,6 +255,7 @@ pub fn parse_to_types_impl( &schema, &project_config, &fragment_locations, + print_provided_variables(&schema, normalization_operation, &project_config), ) })) .collect::>() @@ -314,7 +316,7 @@ fn get_project_config( ) -> Result { let feature_flags: FeatureFlags = serde_json::from_str(feature_flags_json) .map_err(|err| PlaygroundError::ConfigError(format!("{}", err)))?; - let project_name = "test_project".intern(); + let project_name = "test_project".intern().into(); let typegen_config: TypegenConfig = typegen_config_json .map(|str| { serde_json::from_str(str) diff --git a/compiler/crates/relay-compiler/Cargo.toml b/compiler/crates/relay-compiler/Cargo.toml index bdff20454a6ec..b79c9d9c4f087 100644 --- a/compiler/crates/relay-compiler/Cargo.toml +++ b/compiler/crates/relay-compiler/Cargo.toml @@ -1,9 +1,11 @@ -# @generated by autocargo from //relay/oss/crates/relay-compiler:[relay-compiler,relay-compiler-compile_relay_artifacts_test,relay-compiler-compile_relay_artifacts_with_custom_id_test] +# @generated by autocargo from //relay/oss/crates/relay-compiler:[relay-compiler,relay-compiler-compile_relay_artifacts_test,relay-compiler-compile_relay_artifacts_with_custom_id_test,relay-compiler-relay_compiler_integration_test] + [package] name = "relay-compiler" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -14,19 +16,24 @@ path = "tests/compile_relay_artifacts_test.rs" name = "relay_compiler_compile_relay_artifacts_with_custom_id_test" path = "tests/compile_relay_artifacts_with_custom_id_test.rs" +[[test]] +name = "relay_compiler_relay_compiler_integration_test" +path = "tests/relay_compiler_integration_test.rs" + [dependencies] -async-trait = "0.1.58" +async-trait = "0.1.71" bincode = "1.3.3" common = { path = "../common" } common-path = "1.0.0" -dashmap = { version = "5.4", features = ["raw-api", "rayon", "serde"] } +dashmap = { version = "5.5.3", features = ["rayon", "serde"] } dependency-analyzer = { path = "../dependency-analyzer" } +docblock-shared = { path = "../docblock-shared" } docblock-syntax = { path = "../docblock-syntax" } dunce = "1.0.2" errors = { path = "../errors" } extract-graphql = { path = "../extract-graphql" } fnv = "1.0" -futures = { version = "0.3.22", features = ["async-await", "compat"] } +futures = { version = "0.3.30", features = ["async-await", "compat"] } glob = "0.3" graphql-cli = { path = "../graphql-cli" } graphql-ir = { path = "../graphql-ir" } @@ -34,36 +41,40 @@ graphql-syntax = { path = "../graphql-syntax" } graphql-text-printer = { path = "../graphql-text-printer" } graphql-watchman = { path = "../graphql-watchman" } hex = "0.4.3" -indexmap = { version = "1.9.2", features = ["arbitrary", "rayon", "serde-1"] } +indexmap = { version = "2.2.6", features = ["arbitrary", "rayon", "serde"] } intern = { path = "../intern" } js-config-loader = { path = "../js-config-loader" } lazy_static = "1.4" log = { version = "0.4.17", features = ["kv_unstable", "kv_unstable_std"] } md-5 = "0.10" persist-query = { path = "../persist-query" } -rayon = "1.2" -regex = "1.6.0" +petgraph = { version = "0.6.3", features = ["serde-1"] } +rayon = "1.9.0" +regex = "1.9.2" relay-codegen = { path = "../relay-codegen" } relay-config = { path = "../relay-config" } relay-docblock = { path = "../relay-docblock" } relay-schema = { path = "../relay-schema" } relay-transforms = { path = "../relay-transforms" } relay-typegen = { path = "../relay-typegen" } +rustc-hash = "1.1.0" schema = { path = "../schema" } schema-diff = { path = "../schema-diff" } -serde = { version = "1.0.136", features = ["derive", "rc"] } +schema-validate-lib = { path = "../schema-validate" } +serde = { version = "1.0.185", features = ["derive", "rc"] } serde_bser = "0.3" -serde_json = { version = "1.0.79", features = ["float_roundtrip", "unbounded_depth"] } +serde_json = { version = "1.0.100", features = ["float_roundtrip", "unbounded_depth"] } sha1 = "0.10.5" sha2 = "0.10.6" signedsource = { path = "../signedsource" } -thiserror = "1.0.36" -tokio = { version = "1.25.0", features = ["full", "test-util", "tracing"] } +thiserror = "1.0.49" +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } walkdir = "2.3" watchman_client = "0.8.0" -zstd = { version = "0.11.2+zstd.1.5.2", features = ["experimental", "zstdmt"] } +zstd = { version = "0.13", features = ["experimental", "zstdmt"] } [dev-dependencies] fixture-tests = { path = "../fixture-tests" } +futures-util = "0.3.30" graphql-test-helpers = { path = "../graphql-test-helpers" } relay-test-schema = { path = "../relay-test-schema" } diff --git a/compiler/crates/relay-compiler/src/artifact_content.rs b/compiler/crates/relay-compiler/src/artifact_content.rs new file mode 100644 index 0000000000000..3e120665b64f3 --- /dev/null +++ b/compiler/crates/relay-compiler/src/artifact_content.rs @@ -0,0 +1,173 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +pub mod content; +pub mod content_section; + +use std::sync::Arc; + +use common::SourceLocationKey; +use content::generate_fragment; +use content::generate_operation; +use content::generate_resolvers_schema_module_content; +use content::generate_split_operation; +use content::generate_updatable_query; +use graphql_ir::FragmentDefinition; +use graphql_ir::OperationDefinition; +use relay_codegen::Printer; +use relay_codegen::QueryID; +use relay_typegen::FragmentLocations; +use schema::SDLSchema; + +use self::content::generate_preloadable_query_parameters; +use crate::config::Config; +use crate::config::ProjectConfig; + +#[derive(Clone)] +pub enum ArtifactContent { + Operation { + normalization_operation: Arc, + reader_operation: Arc, + typegen_operation: Arc, + source_hash: String, + text: Option, + id_and_text_hash: Option, + }, + UpdatableQuery { + reader_operation: Arc, + typegen_operation: Arc, + source_hash: String, + }, + PreloadableQueryParameters { + normalization_operation: Arc, + query_id: QueryID, + }, + Fragment { + reader_fragment: Arc, + typegen_fragment: Arc, + source_hash: Option, + }, + SplitOperation { + normalization_operation: Arc, + typegen_operation: Option>, + source_hash: Option, + no_optional_fields_in_raw_response_type: bool, + }, + ResolversSchema, + Generic { + content: Vec, + }, +} + +impl ArtifactContent { + pub fn as_bytes( + &self, + config: &Config, + project_config: &ProjectConfig, + printer: &mut Printer<'_>, + schema: &SDLSchema, + source_file: SourceLocationKey, + fragment_locations: &FragmentLocations, + ) -> Vec { + let skip_types = + if let Some(extra_artifacts_config) = &project_config.extra_artifacts_config { + (extra_artifacts_config.skip_types_for_artifact)(source_file) + } else { + false + }; + match self { + ArtifactContent::Operation { + normalization_operation, + reader_operation, + typegen_operation, + source_hash, + text, + id_and_text_hash, + } => generate_operation( + config, + project_config, + printer, + schema, + normalization_operation, + reader_operation, + typegen_operation, + source_hash.into(), + text, + id_and_text_hash, + skip_types, + fragment_locations, + ) + .unwrap(), + ArtifactContent::UpdatableQuery { + reader_operation, + typegen_operation, + source_hash, + } => generate_updatable_query( + config, + project_config, + printer, + schema, + reader_operation, + typegen_operation, + source_hash.into(), + skip_types, + fragment_locations, + ) + .unwrap(), + ArtifactContent::PreloadableQueryParameters { + normalization_operation, + query_id, + } => generate_preloadable_query_parameters( + config, + project_config, + printer, + schema, + normalization_operation, + query_id, + ) + .unwrap(), + ArtifactContent::SplitOperation { + normalization_operation, + typegen_operation, + no_optional_fields_in_raw_response_type, + source_hash, + } => generate_split_operation( + config, + project_config, + printer, + schema, + normalization_operation, + typegen_operation, + source_hash.as_ref(), + fragment_locations, + *no_optional_fields_in_raw_response_type, + ) + .unwrap(), + ArtifactContent::Fragment { + reader_fragment, + typegen_fragment, + source_hash, + } => generate_fragment( + config, + project_config, + printer, + schema, + reader_fragment, + typegen_fragment, + source_hash.as_ref(), + skip_types, + fragment_locations, + ) + .unwrap(), + ArtifactContent::ResolversSchema => { + generate_resolvers_schema_module_content(config, project_config, printer, schema) + .unwrap() + } + ArtifactContent::Generic { content } => content.clone(), + } + } +} diff --git a/compiler/crates/relay-compiler/src/artifact_content/content.rs b/compiler/crates/relay-compiler/src/artifact_content/content.rs index 7230ad91cae9b..2549c36a20e73 100644 --- a/compiler/crates/relay-compiler/src/artifact_content/content.rs +++ b/compiler/crates/relay-compiler/src/artifact_content/content.rs @@ -14,15 +14,10 @@ use common::NamedItem; use graphql_ir::FragmentDefinition; use graphql_ir::FragmentDefinitionName; use graphql_ir::OperationDefinition; -use intern::Lookup; use relay_codegen::build_request_params; use relay_codegen::Printer; use relay_codegen::QueryID; -use relay_codegen::TopLevelStatement; -use relay_codegen::CODEGEN_CONSTANTS; use relay_transforms::is_operation_preloadable; -use relay_transforms::ReactFlightLocalComponentsMetadata; -use relay_transforms::RelayClientComponentMetadata; use relay_transforms::RelayDataDrivenDependencyMetadata; use relay_transforms::ASSIGNABLE_DIRECTIVE; use relay_typegen::generate_fragment_type_exports_section; @@ -44,6 +39,115 @@ use super::content_section::GenericSection; use crate::config::Config; use crate::config::ProjectConfig; +pub fn generate_preloadable_query_parameters( + config: &Config, + project_config: &ProjectConfig, + printer: &mut Printer<'_>, + schema: &SDLSchema, + normalization_operation: &OperationDefinition, + query_id: &QueryID, +) -> Result, FmtError> { + let mut request_parameters = build_request_params(normalization_operation); + let cloned_query_id = Some(query_id.clone()); + request_parameters.id = &cloned_query_id; + + let mut content_sections = ContentSections::default(); + + // -- Begin Docblock Section -- + let extra_annotations = match query_id { + QueryID::Persisted { text_hash, .. } => vec![format!("@relayHash {}", text_hash)], + _ => vec![], + }; + content_sections.push(ContentSection::Docblock(generate_docblock_section( + config, + project_config, + extra_annotations, + )?)); + // -- End Docblock Section -- + + // -- Begin Disable Lint Section -- + content_sections.push(ContentSection::Generic(generate_disable_lint_section( + &project_config.typegen_config.language, + )?)); + // -- End Disable Lint Section -- + + // -- Begin Use Strict Section -- + content_sections.push(ContentSection::Generic(generate_use_strict_section( + &project_config.typegen_config.language, + )?)); + // -- End Use Strict Section -- + + // -- Begin Metadata Annotations Section -- + let mut section = CommentAnnotationsSection::default(); + if let Some(QueryID::Persisted { id, .. }) = &request_parameters.id { + writeln!(section, "@relayRequestID {}", id)?; + } + content_sections.push(ContentSection::CommentAnnotations(section)); + // -- End Metadata Annotations Section -- + + // -- Begin Types Section -- + let mut section = GenericSection::default(); + if project_config.typegen_config.language == TypegenLanguage::Flow { + writeln!(section, "/*::")?; + } + + write_import_type_from( + project_config, + &mut section, + "PreloadableConcreteRequest", + "relay-runtime", + )?; + write_import_type_from( + project_config, + &mut section, + &normalization_operation.name.item.0.to_string(), + &format!("./{}.graphql", normalization_operation.name.item.0), + )?; + + if project_config.typegen_config.language == TypegenLanguage::Flow { + writeln!(section, "*/")?; + } + content_sections.push(ContentSection::Generic(section)); + // -- End Types Section -- + + // -- Begin Query Node Section -- + let preloadable_request = printer.print_preloadable_request( + schema, + request_parameters, + normalization_operation, + &mut Default::default(), + ); + let mut section = GenericSection::default(); + + let node_type = format!( + "PreloadableConcreteRequest<{}>", + normalization_operation.name.item.0 + ); + + write_variable_value_with_type( + &project_config.typegen_config.language, + &mut section, + "node", + &node_type, + &preloadable_request, + )?; + content_sections.push(ContentSection::Generic(section)); + // -- End Query Node Section -- + + // -- Begin Export Section -- + let mut section = GenericSection::default(); + write_export_generated_node( + &project_config.typegen_config, + &mut section, + "node", + Some(node_type), + )?; + content_sections.push(ContentSection::Generic(section)); + // -- End Export Section -- + + content_sections.into_signed_bytes() +} + #[allow(clippy::too_many_arguments)] pub fn generate_updatable_query( config: &Config, @@ -113,6 +217,7 @@ pub fn generate_updatable_query( schema, project_config, fragment_locations, + None, // TODO: Add/investigrate support for provided variables in updatable queries ) )?; } @@ -178,11 +283,20 @@ pub fn generate_operation( fragment_locations: &FragmentLocations, ) -> Result, FmtError> { let mut request_parameters = build_request_params(normalization_operation); + if id_and_text_hash.is_some() { request_parameters.id = id_and_text_hash; + if project_config + .persist + .as_ref() + .map_or(false, |config| config.include_query_text()) + { + request_parameters.text = text.clone(); + } } else { request_parameters.text = text.clone(); - }; + } + let operation_fragment = FragmentDefinition { name: reader_operation.name.map(|x| FragmentDefinitionName(x.0)), variable_definitions: reader_operation.variable_definitions.clone(), @@ -195,14 +309,14 @@ pub fn generate_operation( let mut content_sections = ContentSections::default(); // -- Begin Docblock Section -- - let v = match id_and_text_hash { + let extra_annotations = match id_and_text_hash { Some(QueryID::Persisted { text_hash, .. }) => vec![format!("@relayHash {}", text_hash)], _ => vec![], }; content_sections.push(ContentSection::Docblock(generate_docblock_section( config, project_config, - v, + extra_annotations, )?)); // -- End Docblock Section -- @@ -235,16 +349,6 @@ pub fn generate_operation( if let Some(data_driven_dependency_metadata) = data_driven_dependency_metadata { write_data_driven_dependency_annotation(&mut section, data_driven_dependency_metadata)?; } - if let Some(flight_metadata) = - ReactFlightLocalComponentsMetadata::find(&operation_fragment.directives) - { - write_react_flight_server_annotation(&mut section, flight_metadata)?; - } - let relay_client_component_metadata = - RelayClientComponentMetadata::find(&operation_fragment.directives); - if let Some(relay_client_component_metadata) = relay_client_component_metadata { - write_react_flight_client_annotation(&mut section, relay_client_component_metadata)?; - } content_sections.push(ContentSection::CommentAnnotations(section)); // -- End Metadata Annotations Section -- @@ -268,6 +372,8 @@ pub fn generate_operation( )?; if !skip_types { + let maybe_provided_variables = + printer.print_provided_variables(schema, normalization_operation); write!( section, "{}", @@ -277,6 +383,7 @@ pub fn generate_operation( schema, project_config, fragment_locations, + maybe_provided_variables, ) )?; } @@ -287,27 +394,8 @@ pub fn generate_operation( content_sections.push(ContentSection::Generic(section)); // -- End Types Section -- - // -- Begin Top Level Statements Section -- - let mut section = GenericSection::default(); let mut top_level_statements = Default::default(); - if let Some(provided_variables) = - printer.print_provided_variables(schema, normalization_operation, &mut top_level_statements) - { - let mut provided_variable_text = String::new(); - write_variable_value_with_type( - &project_config.typegen_config.language, - &mut provided_variable_text, - CODEGEN_CONSTANTS.provided_variables_definition.lookup(), - relay_typegen::PROVIDED_VARIABLE_TYPE, - &provided_variables, - ) - .unwrap(); - top_level_statements.insert( - CODEGEN_CONSTANTS.provided_variables_definition.to_string(), - TopLevelStatement::VariableDefinition(provided_variable_text), - ); - } - + // -- Begin Query Node Section -- let request = printer.print_request( schema, normalization_operation, @@ -316,11 +404,12 @@ pub fn generate_operation( &mut top_level_statements, ); + // -- Begin Top Level Statements Section -- + let mut section: GenericSection = GenericSection::default(); write!(section, "{}", &top_level_statements)?; content_sections.push(ContentSection::Generic(section)); // -- End Top Level Statements Section -- - // -- Begin Query Node Section -- let mut section = GenericSection::default(); write_variable_value_with_type( &project_config.typegen_config.language, @@ -348,26 +437,38 @@ pub fn generate_operation( if is_operation_preloadable(normalization_operation) && id_and_text_hash.is_some() { match project_config.typegen_config.language { TypegenLanguage::Flow => { - writeln!( - section, - "require('relay-runtime').PreloadableQueryRegistry.set((node.params/*: any*/).id, node);", - )?; - } - TypegenLanguage::JavaScript => { - writeln!( - section, - "require('relay-runtime').PreloadableQueryRegistry.set(node.params.id, node);", - )?; + if project_config.typegen_config.eager_es_modules { + writeln!( + section, + "import {{ PreloadableQueryRegistry }} from 'relay-runtime';", + )?; + writeln!( + section, + "PreloadableQueryRegistry.set((node.params/*: any*/).id, node);", + )?; + } else { + writeln!( + section, + "require('relay-runtime').PreloadableQueryRegistry.set((node.params/*: any*/).id, node);", + )?; + } } - TypegenLanguage::TypeScript => { - writeln!( - section, - "import {{ PreloadableQueryRegistry }} from 'relay-runtime';", - )?; - writeln!( - section, - "PreloadableQueryRegistry.set(node.params.id, node);", - )?; + TypegenLanguage::JavaScript | TypegenLanguage::TypeScript => { + if project_config.typegen_config.eager_es_modules { + writeln!( + section, + "import {{ PreloadableQueryRegistry }} from 'relay-runtime';", + )?; + writeln!( + section, + "PreloadableQueryRegistry.set(node.params.id, node);", + )?; + } else { + writeln!( + section, + "require('relay-runtime').PreloadableQueryRegistry.set(node.params.id, node);", + )?; + } } } } @@ -522,7 +623,7 @@ pub fn generate_fragment( project_config, schema, typegen_fragment, - skip_types, + source_hash, fragment_locations, ) } else { @@ -581,16 +682,6 @@ fn generate_read_only_fragment( { write_data_driven_dependency_annotation(&mut section, data_driven_dependency_metadata)?; } - if let Some(flight_metadata) = - ReactFlightLocalComponentsMetadata::find(&reader_fragment.directives) - { - write_react_flight_server_annotation(&mut section, flight_metadata)?; - } - let relay_client_component_metadata = - RelayClientComponentMetadata::find(&reader_fragment.directives); - if let Some(relay_client_component_metadata) = relay_client_component_metadata { - write_react_flight_client_annotation(&mut section, relay_client_component_metadata)?; - } content_sections.push(ContentSection::CommentAnnotations(section)); // -- End Metadata Annotations Section -- @@ -617,7 +708,7 @@ fn generate_read_only_fragment( typegen_fragment, schema, project_config, - fragment_locations + fragment_locations, ) )?; } @@ -681,7 +772,7 @@ fn generate_assignable_fragment( project_config: &ProjectConfig, schema: &SDLSchema, typegen_fragment: &FragmentDefinition, - skip_types: bool, + source_hash: Option<&String>, fragment_locations: &FragmentLocations, ) -> Result, FmtError> { let mut content_sections = ContentSections::default(); @@ -712,18 +803,16 @@ fn generate_assignable_fragment( writeln!(section, "/*::")?; } - if !skip_types { - write!( - section, - "{}", - generate_fragment_type_exports_section( - typegen_fragment, - schema, - project_config, - fragment_locations - ) - )?; - } + write!( + section, + "{}", + generate_fragment_type_exports_section( + typegen_fragment, + schema, + project_config, + fragment_locations, + ) + )?; if project_config.typegen_config.language == TypegenLanguage::Flow { writeln!(section, "*/")?; @@ -731,12 +820,43 @@ fn generate_assignable_fragment( content_sections.push(ContentSection::Generic(section)); // -- End Types Section -- + // -- Begin Fragment Node Section -- + let mut section = GenericSection::default(); + write_variable_value_with_type( + &project_config.typegen_config.language, + &mut section, + "node", + "any", + "{}", + )?; + content_sections.push(ContentSection::Generic(section)); + // -- End Fragment Node Section -- + + // -- Begin Fragment Node Hash Section -- + if let Some(source_hash) = source_hash { + let mut section = GenericSection::default(); + write_source_hash( + config, + &project_config.typegen_config.language, + &mut section, + source_hash, + )?; + content_sections.push(ContentSection::Generic(section)); + } + // -- End Fragment Node Hash Section -- + + // -- Begin Fragment Node Export Section -- + let mut section = GenericSection::default(); + write_export_generated_node(&project_config.typegen_config, &mut section, "node", None)?; + content_sections.push(ContentSection::Generic(section)); + // -- End Fragment Node Export Section -- + // -- Begin Export Section -- let mut section = GenericSection::default(); // Assignable fragments should never be passed to useFragment, and thus, we // don't need to emit a reader fragment. // Instead, we only need a named validator export, i.e. - // module.exports.validator = ... + // module.exports.validate = ... let named_validator_export = generate_named_validator_export( typegen_fragment, schema, @@ -818,7 +938,7 @@ fn write_import_type_from( } } -fn write_export_generated_node( +pub fn write_export_generated_node( typegen_config: &TypegenConfig, section: &mut dyn Write, variable_node: &str, @@ -843,7 +963,7 @@ fn write_export_generated_node( } } -fn generate_docblock_section( +pub fn generate_docblock_section( config: &Config, project_config: &ProjectConfig, extra_annotations: Vec, @@ -864,7 +984,12 @@ fn generate_docblock_section( } writeln!(section, "@lightSyntaxTransform")?; writeln!(section, "@nogrep")?; - if let Some(codegen_command) = &config.codegen_command { + + if let Some(codegen_command) = &project_config + .codegen_command + .as_ref() + .or(config.codegen_command.as_ref()) + { writeln!(section, "@codegen-command: {}", codegen_command)?; } Ok(section) @@ -924,22 +1049,88 @@ fn write_data_driven_dependency_annotation( Ok(()) } -fn write_react_flight_server_annotation( - section: &mut CommentAnnotationsSection, - flight_local_components_metadata: &ReactFlightLocalComponentsMetadata, -) -> FmtResult { - for item in &flight_local_components_metadata.components { - writeln!(section, "@ReactFlightServerDependency {}", item)?; - } - Ok(()) -} +pub fn generate_resolvers_schema_module_content( + config: &Config, + project_config: &ProjectConfig, + printer: &mut Printer<'_>, + schema: &SDLSchema, +) -> Result, FmtError> { + let mut content_sections = ContentSections::default(); + // -- Begin Docblock Section -- + content_sections.push(ContentSection::Docblock(generate_docblock_section( + config, + project_config, + vec![], + )?)); + // -- End Docblock Section -- -fn write_react_flight_client_annotation( - section: &mut CommentAnnotationsSection, - relay_client_component_metadata: &RelayClientComponentMetadata, -) -> FmtResult { - for value in &relay_client_component_metadata.split_operation_filenames { - writeln!(section, "@ReactFlightClientDependency {}", value)?; + // -- Begin Disable Lint Section -- + content_sections.push(ContentSection::Generic(generate_disable_lint_section( + &project_config.typegen_config.language, + )?)); + // -- End Disable Lint Section -- + + // -- Begin Use Strict Section -- + content_sections.push(ContentSection::Generic(generate_use_strict_section( + &project_config.typegen_config.language, + )?)); + // -- End Use Strict Section -- + + // -- Begin Types Section -- + let mut section = GenericSection::default(); + if project_config.typegen_config.language == TypegenLanguage::Flow { + writeln!(section, "/*::")?; } - Ok(()) + write_import_type_from( + project_config, + &mut section, + "SchemaResolvers", + "ReactiveQueryExecutor", + )?; + write_import_type_from( + project_config, + &mut section, + "ResolverFunction, NormalizationSplitOperation", + "relay-runtime", + )?; + writeln!(section)?; + if project_config.typegen_config.language == TypegenLanguage::Flow { + writeln!(section, "*/")?; + } + content_sections.push(ContentSection::Generic(section)); + // -- End Types Section -- + + let mut top_level_statements = Default::default(); + let resolvers_schema = printer.print_resolvers_schema(schema, &mut top_level_statements); + + // -- Begin Top Level Statements Section -- + let mut section: GenericSection = GenericSection::default(); + write!(section, "{}", &top_level_statements)?; + content_sections.push(ContentSection::Generic(section)); + // -- End Top Level Statements Section -- + + // -- Begin Resolvers Schema Section -- + let mut section = GenericSection::default(); + write_variable_value_with_type( + &project_config.typegen_config.language, + &mut section, + "schema_resolvers", + "SchemaResolvers", + &resolvers_schema, + )?; + content_sections.push(ContentSection::Generic(section)); + // -- End Resolvers Schema Section -- + + // -- Begin Exports Section -- + let mut section = GenericSection::default(); + write_export_generated_node( + &project_config.typegen_config, + &mut section, + "schema_resolvers", + None, + )?; + content_sections.push(ContentSection::Generic(section)); + // -- End Exports Section -- + + content_sections.into_signed_bytes() } diff --git a/compiler/crates/relay-compiler/src/artifact_content/mod.rs b/compiler/crates/relay-compiler/src/artifact_content/mod.rs deleted file mode 100644 index 7b0163272ebda..0000000000000 --- a/compiler/crates/relay-compiler/src/artifact_content/mod.rs +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -mod content; -mod content_section; - -use std::sync::Arc; - -use common::SourceLocationKey; -use content::generate_fragment; -use content::generate_operation; -use content::generate_split_operation; -use content::generate_updatable_query; -use graphql_ir::FragmentDefinition; -use graphql_ir::OperationDefinition; -use relay_codegen::Printer; -use relay_codegen::QueryID; -use relay_typegen::FragmentLocations; -use schema::SDLSchema; - -use crate::config::Config; -use crate::config::ProjectConfig; - -#[derive(Clone)] -pub enum ArtifactContent { - Operation { - normalization_operation: Arc, - reader_operation: Arc, - typegen_operation: Arc, - source_hash: String, - text: Option, - id_and_text_hash: Option, - }, - UpdatableQuery { - reader_operation: Arc, - typegen_operation: Arc, - source_hash: String, - }, - Fragment { - reader_fragment: Arc, - typegen_fragment: Arc, - source_hash: Option, - }, - SplitOperation { - normalization_operation: Arc, - typegen_operation: Option>, - source_hash: Option, - no_optional_fields_in_raw_response_type: bool, - }, - Generic { - content: Vec, - }, -} - -impl ArtifactContent { - pub fn as_bytes( - &self, - config: &Config, - project_config: &ProjectConfig, - printer: &mut Printer<'_>, - schema: &SDLSchema, - source_file: SourceLocationKey, - fragment_locations: &FragmentLocations, - ) -> Vec { - let skip_types = project_config - .skip_types_for_artifact - .as_ref() - .map_or(false, |skip_types_fn| skip_types_fn(source_file)); - match self { - ArtifactContent::Operation { - normalization_operation, - reader_operation, - typegen_operation, - source_hash, - text, - id_and_text_hash, - } => generate_operation( - config, - project_config, - printer, - schema, - normalization_operation, - reader_operation, - typegen_operation, - source_hash.into(), - text, - id_and_text_hash, - skip_types, - fragment_locations, - ) - .unwrap(), - ArtifactContent::UpdatableQuery { - reader_operation, - typegen_operation, - source_hash, - } => generate_updatable_query( - config, - project_config, - printer, - schema, - reader_operation, - typegen_operation, - source_hash.into(), - skip_types, - fragment_locations, - ) - .unwrap(), - ArtifactContent::SplitOperation { - normalization_operation, - typegen_operation, - no_optional_fields_in_raw_response_type, - source_hash, - } => generate_split_operation( - config, - project_config, - printer, - schema, - normalization_operation, - typegen_operation, - source_hash.as_ref(), - fragment_locations, - *no_optional_fields_in_raw_response_type, - ) - .unwrap(), - ArtifactContent::Fragment { - reader_fragment, - typegen_fragment, - source_hash, - } => generate_fragment( - config, - project_config, - printer, - schema, - reader_fragment, - typegen_fragment, - source_hash.as_ref(), - skip_types, - fragment_locations, - ) - .unwrap(), - ArtifactContent::Generic { content } => content.clone(), - } - } -} diff --git a/compiler/crates/relay-compiler/src/artifact_map.rs b/compiler/crates/relay-compiler/src/artifact_map.rs index dfc5732c4348d..f7f98a4b418c8 100644 --- a/compiler/crates/relay-compiler/src/artifact_map.rs +++ b/compiler/crates/relay-compiler/src/artifact_map.rs @@ -9,8 +9,10 @@ use std::path::PathBuf; use dashmap::mapref::entry::Entry; use dashmap::DashMap; +use docblock_shared::ResolverSourceHash; use graphql_ir::ExecutableDefinitionName; use relay_codegen::QueryID; +use relay_transforms::ArtifactSourceKeyData; use serde::Deserialize; use serde::Serialize; @@ -25,7 +27,23 @@ pub struct ArtifactRecord { } /// A map from DefinitionName to output artifacts records #[derive(Default, Serialize, Deserialize, Debug, Clone)] -pub struct ArtifactMap(pub DashMap>); +pub struct ArtifactMap(pub DashMap>); + +#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq, Hash)] +pub enum ArtifactSourceKey { + /// Derieved from a GraphQL Executable Definition + ExecutableDefinition(ExecutableDefinitionName), + /// Derieved from a RelayResolver docblock + ResolverHash(ResolverSourceHash), + /// Derived from GraphQL Schema + Schema(), +} + +impl From for ArtifactSourceKey { + fn from(directive: ArtifactSourceKeyData) -> Self { + ArtifactSourceKey::ResolverHash(directive.0) + } +} impl ArtifactMap { pub fn insert(&self, artifact: Artifact) { @@ -42,8 +60,8 @@ impl ArtifactMap { }, }; - for source_definition_name in artifact.source_definition_names { - match self.0.entry(source_definition_name) { + for source_key in artifact.artifact_source_keys { + match self.0.entry(source_key) { Entry::Occupied(mut entry) => { entry.get_mut().push(artifact_tuple.clone()); } diff --git a/compiler/crates/relay-compiler/src/build_project.rs b/compiler/crates/relay-compiler/src/build_project.rs new file mode 100644 index 0000000000000..7f8d20f5666c4 --- /dev/null +++ b/compiler/crates/relay-compiler/src/build_project.rs @@ -0,0 +1,787 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//! This module is responsible to build a single project. It does not handle +//! watch mode or other state. + +mod artifact_generated_types; +pub mod artifact_writer; +mod build_ir; +mod build_resolvers_schema; +pub mod build_schema; +mod generate_artifacts; +pub mod generate_extra_artifacts; +pub mod get_artifacts_file_hash_map; +mod log_program_stats; +mod persist_operations; +mod project_asts; +mod source_control; +mod validate; + +use std::fmt; +use std::path::PathBuf; +use std::sync::Arc; + +pub use artifact_generated_types::ArtifactGeneratedTypes; +use build_ir::BuildIRResult; +pub use build_ir::SourceHashes; +pub use build_schema::build_schema; +use common::sync::*; +use common::Diagnostic; +use common::PerfLogEvent; +use common::PerfLogger; +use common::WithDiagnostics; +use dashmap::mapref::entry::Entry; +use dashmap::DashSet; +use dependency_analyzer::get_ir_definition_references; +use fnv::FnvBuildHasher; +use fnv::FnvHashMap; +use fnv::FnvHashSet; +pub use generate_artifacts::generate_artifacts; +pub use generate_artifacts::generate_preloadable_query_parameters_artifact; +pub use generate_artifacts::Artifact; +pub use generate_artifacts::ArtifactContent; +use graphql_ir::ExecutableDefinition; +use graphql_ir::ExecutableDefinitionName; +use graphql_ir::FragmentDefinitionNameSet; +use graphql_ir::Program; +use indexmap::IndexSet; +use log::debug; +use log::info; +use log::warn; +use petgraph::unionfind::UnionFind; +use rayon::iter::IntoParallelRefIterator; +use rayon::slice::ParallelSlice; +use relay_codegen::Printer; +use relay_config::ProjectName; +use relay_transforms::apply_transforms; +use relay_transforms::CustomTransformsConfig; +use relay_transforms::Programs; +use relay_typegen::FragmentLocations; +use rustc_hash::FxHashMap; +use rustc_hash::FxHashSet; +use schema::SDLSchema; +use schema_diff::check::IncrementalBuildSchemaChange; +use schema_diff::check::SchemaChangeSafety; +pub use source_control::source_control_for_root; +pub use validate::validate; +pub use validate::AdditionalValidations; + +use self::log_program_stats::print_stats; +pub use self::project_asts::find_duplicates; +pub use self::project_asts::get_project_asts; +pub use self::project_asts::ProjectAstData; +pub use self::project_asts::ProjectAsts; +use super::artifact_content; +use crate::artifact_map::ArtifactMap; +use crate::artifact_map::ArtifactSourceKey; +use crate::compiler_state::ArtifactMapKind; +use crate::compiler_state::CompilerState; +use crate::config::Config; +use crate::config::ProjectConfig; +use crate::errors::BuildProjectError; +use crate::file_source::SourceControlUpdateStatus; +use crate::graphql_asts::GraphQLAsts; + +type BuildProjectOutput = WithDiagnostics<(ProjectName, Arc, Programs, Vec)>; +type BuildProgramsOutput = WithDiagnostics<(Vec, Arc)>; + +pub enum BuildProjectFailure { + Error(BuildProjectError), + Cancelled, +} + +impl From for BuildProjectFailure { + fn from(err: BuildProjectError) -> BuildProjectFailure { + BuildProjectFailure::Error(err) + } +} + +pub enum BuildMode { + Full, + Incremental, + IncrementalWithSchemaChanges(FxHashSet), +} +impl fmt::Debug for BuildMode { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + BuildMode::Full => write!(f, "Full"), + BuildMode::Incremental => write!(f, "Incremental"), + BuildMode::IncrementalWithSchemaChanges(changes) => { + write!(f, "IncrementalWithSchemaChanges({:?})", changes) + } + } + } +} + +/// This program doesn't have IR transforms applied to it, so it's not optimized. +/// It's perfect for the LSP server: we have all the documents with +/// their locations to provide information to go_to_definition, hover, etc. +pub fn build_raw_program( + project_config: &ProjectConfig, + project_asts: ProjectAsts, + schema: Arc, + log_event: &impl PerfLogEvent, + build_mode: BuildMode, +) -> Result<(Program, SourceHashes), BuildProjectError> { + // Build a type aware IR. + let BuildIRResult { ir, source_hashes } = log_event.time("build_ir_time", || { + build_ir::build_ir(project_config, project_asts, &schema, build_mode, log_event).map_err( + |errors| BuildProjectError::ValidationErrors { + errors, + project_name: project_config.name, + }, + ) + })?; + + // Turn the IR into a base Program. + let program = log_event.time("build_program_time", || { + Program::from_definitions(schema, ir) + }); + + Ok((program, source_hashes)) +} + +const MIN_CHUNK_SIZE: usize = 8192; + +/// Build raw programs and divide them into chunks for parallelization +fn build_raw_program_chunks( + project_config: &ProjectConfig, + project_asts: ProjectAsts, + schema: Arc, + log_event: &impl PerfLogEvent, + build_mode: BuildMode, +) -> Result<(Vec, SourceHashes), BuildProjectError> { + // Build a type aware IR. + let BuildIRResult { ir, source_hashes } = log_event.time("build_ir_time", || { + build_ir::build_ir(project_config, project_asts, &schema, build_mode, log_event).map_err( + |errors| BuildProjectError::ValidationErrors { + errors, + project_name: project_config.name, + }, + ) + })?; + + let chunks = if ir.len() < MIN_CHUNK_SIZE { + vec![ir] + } else { + let chunkify_time = log_event.start("chunkify_project_time"); + let dependency_map = get_ir_definition_references(&schema, &ir); + let definition_indexes: IndexSet = ir + .iter() + .map(|def| match def { + ExecutableDefinition::Operation(operation) => { + ExecutableDefinitionName::OperationDefinitionName(operation.name.item) + } + ExecutableDefinition::Fragment(fragment) => { + ExecutableDefinitionName::FragmentDefinitionName(fragment.name.item) + } + }) + .collect(); + + let mut unionfind = UnionFind::::new(definition_indexes.len()); + for (source, destinations) in &dependency_map { + let source_index = definition_indexes.get_index_of(source).unwrap(); + for destination in destinations { + let destination_index = definition_indexes.get_index_of(destination).unwrap(); + unionfind.union(source_index, destination_index); + } + } + + let mut groups = FxHashMap::default(); + for (idx, def) in ir.into_iter().enumerate() { + let group = unionfind.find(idx); + groups.entry(group).or_insert_with(Vec::new).push(def); + } + + let mut chunks = vec![]; + let mut buffer = Vec::new(); + for group in groups.into_values() { + if group.len() > MIN_CHUNK_SIZE { + chunks.push(group); + } else { + buffer.extend(group); + if buffer.len() > MIN_CHUNK_SIZE { + chunks.push(std::mem::take(&mut buffer)); + } + } + } + if !buffer.is_empty() { + chunks.push(buffer); + } + log_event.stop(chunkify_time); + chunks + }; + + // Turn the IR into base Programs. + let programs = log_event.time("build_program_time", || { + chunks + .into_iter() + .map(|definitions| Program::from_definitions(Arc::clone(&schema), definitions)) + .collect() + }); + Ok((programs, source_hashes)) +} + +pub fn validate_program( + config: &Config, + project_config: &ProjectConfig, + program: &Program, + log_event: &impl PerfLogEvent, +) -> Result, BuildProjectError> { + let timer = log_event.start("validate_time"); + log_event.number("validate_documents_count", program.document_count()); + let result = validate(program, project_config, &config.additional_validations).map_or_else( + |errors| { + Err(BuildProjectError::ValidationErrors { + errors, + project_name: project_config.name, + }) + }, + |result| Ok(result.diagnostics), + ); + + log_event.stop(timer); + + result +} + +/// Apply various chains of transforms to create a set of output programs. +pub fn transform_program( + project_config: &ProjectConfig, + program: Arc, + base_fragment_names: Arc, + perf_logger: Arc, + log_event: &impl PerfLogEvent, + custom_transforms_config: Option<&CustomTransformsConfig>, +) -> Result { + let timer = log_event.start("apply_transforms_time"); + let result = apply_transforms( + project_config, + program, + base_fragment_names, + perf_logger, + Some(print_stats), + custom_transforms_config, + ) + .map_err(|errors| { + BuildProjectFailure::Error(BuildProjectError::ValidationErrors { + errors, + project_name: project_config.name, + }) + }); + + log_event.stop(timer); + + result +} + +pub fn build_programs( + config: &Config, + project_config: &ProjectConfig, + compiler_state: &CompilerState, + project_asts: ProjectAsts, + base_fragment_names: FragmentDefinitionNameSet, + schema: Arc, + log_event: &impl PerfLogEvent, + perf_logger: Arc, +) -> Result { + let project_name = project_config.name; + let mut build_mode = if !compiler_state.has_processed_changes() { + BuildMode::Full + } else { + let project_schema_change = compiler_state.schema_change_safety( + log_event, + project_name, + &project_config.schema_config, + ); + match project_schema_change { + SchemaChangeSafety::Unsafe => BuildMode::Full, + SchemaChangeSafety::Safe | SchemaChangeSafety::SafeWithIncrementalBuild(_) => { + let base_schema_change = if let Some(base) = project_config.base { + compiler_state.schema_change_safety( + log_event, + base, + &project_config.schema_config, + ) + } else { + SchemaChangeSafety::Safe + }; + match (project_schema_change, base_schema_change) { + (SchemaChangeSafety::Unsafe, _) => BuildMode::Full, + (_, SchemaChangeSafety::Unsafe) => BuildMode::Full, + (SchemaChangeSafety::Safe, SchemaChangeSafety::Safe) => BuildMode::Incremental, + (SchemaChangeSafety::SafeWithIncrementalBuild(c), SchemaChangeSafety::Safe) => { + BuildMode::IncrementalWithSchemaChanges(c) + } + (SchemaChangeSafety::Safe, SchemaChangeSafety::SafeWithIncrementalBuild(c)) => { + BuildMode::IncrementalWithSchemaChanges(c) + } + ( + SchemaChangeSafety::SafeWithIncrementalBuild(c1), + SchemaChangeSafety::SafeWithIncrementalBuild(c2), + ) => { + BuildMode::IncrementalWithSchemaChanges(c1.into_iter().chain(c2).collect()) + } + } + } + } + }; + if !config.has_schema_change_incremental_build { + // Killswitch here to bail out of schema based incremental builds + build_mode = if let BuildMode::IncrementalWithSchemaChanges(_) = build_mode { + BuildMode::Full + } else { + build_mode + } + } + log_event.bool( + "is_incremental_build", + match build_mode { + BuildMode::Incremental | BuildMode::IncrementalWithSchemaChanges(_) => true, + BuildMode::Full => false, + }, + ); + log_event.string( + "build_mode", + match build_mode { + BuildMode::Full => String::from("Full"), + BuildMode::Incremental => String::from("Incremental"), + BuildMode::IncrementalWithSchemaChanges(_) => { + String::from("IncrementalWithSchemaChanges") + } + }, + ); + let (programs, source_hashes) = + build_raw_program_chunks(project_config, project_asts, schema, log_event, build_mode)?; + + if compiler_state.should_cancel_current_build() { + debug!("Build is cancelled: updates in source code/or new file changes are pending."); + return Err(BuildProjectFailure::Cancelled); + } + let base_fragment_names = Arc::new(base_fragment_names); + let results: Vec<(Programs, Vec)> = programs + .into_par_iter() + .map(|program| { + // Call validation rules that go beyond type checking. + // FIXME: Return non-fatal diagnostics from transforms (only validations for now) + let diagnostics = validate_program(config, project_config, &program, log_event)?; + + let programs = transform_program( + project_config, + Arc::new(program), + Arc::clone(&base_fragment_names), + Arc::clone(&perf_logger), + log_event, + config.custom_transforms.as_ref(), + )?; + + Ok((programs, diagnostics)) + }) + .collect::, BuildProjectFailure>>()?; + + let len = results.len(); + let (programs, diagnostics) = results.into_iter().fold( + (Vec::with_capacity(len), vec![]), + |(mut programs, mut diagnostics), (temp_programs, temp_diagnostics)| { + programs.push(temp_programs); + diagnostics.extend(temp_diagnostics); + (programs, diagnostics) + }, + ); + + Ok(WithDiagnostics { + item: (programs, Arc::new(source_hashes)), + diagnostics, + }) +} + +pub fn build_project( + config: &Config, + project_config: &ProjectConfig, + compiler_state: &CompilerState, + graphql_asts_map: &FnvHashMap, + perf_logger: Arc, +) -> Result { + let log_event = perf_logger.create_event("build_project"); + let build_time = log_event.start("build_project_time"); + let project_name = project_config.name; + log_event.string("project", project_name.to_string()); + info!("[{}] compiling...", project_name); + + // Construct a schema instance including project specific extensions. + let schema = log_event + .time("build_schema_time", || { + build_schema( + compiler_state, + config, + project_config, + graphql_asts_map, + &log_event, + ) + }) + .map_err(|errors| { + BuildProjectFailure::Error(BuildProjectError::ValidationErrors { + errors, + project_name: project_config.name, + }) + })?; + + let ProjectAstData { + project_asts, + base_fragment_names, + } = get_project_asts(&schema, graphql_asts_map, project_config)?; + + if compiler_state.should_cancel_current_build() { + debug!("Build is cancelled: updates in source code/or new file changes are pending."); + return Err(BuildProjectFailure::Cancelled); + } + + // Apply different transform pipelines to produce the `Programs`. + let WithDiagnostics { + item: (programs, source_hashes), + diagnostics, + } = build_programs( + config, + project_config, + compiler_state, + project_asts, + base_fragment_names, + Arc::clone(&schema), + &log_event, + Arc::clone(&perf_logger), + )?; + + if compiler_state.should_cancel_current_build() { + debug!("Build is cancelled: updates in source code/or new file changes are pending."); + return Err(BuildProjectFailure::Cancelled); + } + + // Generate artifacts by collecting information from the `Programs`. + let artifacts_timer = log_event.start("generate_artifacts_time"); + let artifacts = programs + .par_iter() + .map(|programs| generate_artifacts(project_config, programs, Arc::clone(&source_hashes))) + .flatten() + .collect(); + log_event.stop(artifacts_timer); + + let mut iter: std::vec::IntoIter = programs.into_iter(); + let mut programs = iter.next().expect("Expect at least one result"); + for temp_programs in iter { + merge_programs(&mut programs, temp_programs); + } + + log_event.number( + "generated_artifacts", + programs.reader.document_count() + programs.normalization.document_count(), + ); + + log_event.stop(build_time); + log_event.complete(); + Ok(WithDiagnostics { + item: (project_config.name, schema, programs, artifacts), + diagnostics, + }) +} + +fn merge_programs(onto: &mut Programs, from: Programs) { + merge_program(Arc::get_mut(&mut onto.source).unwrap(), from.source); + merge_program(Arc::get_mut(&mut onto.reader).unwrap(), from.reader); + merge_program( + Arc::get_mut(&mut onto.normalization).unwrap(), + from.normalization, + ); + merge_program( + Arc::get_mut(&mut onto.operation_text).unwrap(), + from.operation_text, + ); + merge_program(Arc::get_mut(&mut onto.typegen).unwrap(), from.typegen); +} + +fn merge_program(onto: &mut Program, from: Arc) { + // Note: this it the inner implementation of the unstable "unwrap_or_clone" + let from = Arc::try_unwrap(from).unwrap_or_else(|arc| (*arc).clone()); + onto.fragments.extend(from.fragments); + onto.operations.extend(from.operations); +} + +#[allow(clippy::too_many_arguments)] +pub async fn commit_project( + config: &Config, + project_config: &ProjectConfig, + perf_logger: Arc, + schema: &SDLSchema, + programs: Programs, + mut artifacts: Vec, + artifact_map: Arc, + // Definitions/Sources that are removed from the previous artifact map + removed_artifact_sources: Vec, + // Dirty artifacts that should be removed if no longer in the artifacts map + mut artifacts_to_remove: DashSet, + source_control_update_status: Arc, +) -> Result { + let log_event = perf_logger.create_event("commit_project"); + log_event.string("project", project_config.name.to_string()); + let commit_time = log_event.start("commit_project_time"); + + let fragment_locations = FragmentLocations::new(programs.typegen.fragments()); + if source_control_update_status.is_started() { + debug!("commit_project cancelled before persisting due to source control updates"); + return Err(BuildProjectFailure::Cancelled); + } + + if let Some(operation_persister) = config + .create_operation_persister + .as_ref() + .and_then(|create_fn| create_fn(project_config)) + { + let persist_operations_timer = log_event.start("persist_operations_time"); + persist_operations::persist_operations( + &mut artifacts, + &config.root_dir, + config, + project_config, + &(*operation_persister), + &log_event, + &programs, + ) + .await?; + log_event.stop(persist_operations_timer); + } + + if source_control_update_status.is_started() { + debug!( + "commit_project cancelled before generating extra artifacts due to source control updates" + ); + return Err(BuildProjectFailure::Cancelled); + } + + // In some cases we need to create additional (platform specific) artifacts + // For that, we will use `generate_extra_artifacts` from the configs + if let Some(generate_extra_artifacts_fn) = &config.generate_extra_artifacts { + log_event.time("generate_extra_artifacts_time", || { + artifacts.extend(generate_extra_artifacts_fn( + config, + project_config, + schema, + &programs, + &artifacts, + )) + }); + } + + if source_control_update_status.is_started() { + debug!("commit_project cancelled before writing artifacts due to source control updates"); + return Err(BuildProjectFailure::Cancelled); + } + + let should_stop_updating_artifacts = || { + if source_control_update_status.is_started() { + debug!("artifact_writer updates cancelled due source control updates"); + true + } else { + false + } + }; + + let artifacts_file_hash_map = match &config.get_artifacts_file_hash_map { + Some(get_fn) => { + let get_artifacts_file_hash_map_timer = + log_event.start("get_artifacts_file_hash_map_time"); + let res = get_fn(&artifacts).await; + log_event.stop(get_artifacts_file_hash_map_timer); + res + } + _ => None, + }; + + // Write the generated artifacts to disk. This step is separate from + // generating artifacts or persisting to avoid partial writes in case of + // errors as much as possible. + let next_artifact_map = match Arc::as_ref(&artifact_map) { + ArtifactMapKind::Unconnected(existing_artifacts) => { + let mut existing_artifacts = existing_artifacts.clone(); + let write_artifacts_time = log_event.start("write_artifacts_time"); + write_artifacts( + config, + project_config, + schema, + should_stop_updating_artifacts, + &artifacts, + &fragment_locations, + &artifacts_file_hash_map, + )?; + for artifact in &artifacts { + if !existing_artifacts.remove(&artifact.path) { + debug!( + "[{}] new artifact {:?} from definitions {:?}", + project_config.name, &artifact.path, &artifact.artifact_source_keys + ); + } + } + log_event.stop(write_artifacts_time); + let delete_artifacts_time = log_event.start("delete_artifacts_time"); + for remaining_artifact in &existing_artifacts { + if should_stop_updating_artifacts() { + break; + } + let path = config.root_dir.join(remaining_artifact); + config.artifact_writer.remove(path)?; + } + log_event.stop(delete_artifacts_time); + ArtifactMap::from(artifacts) + } + ArtifactMapKind::Mapping(artifact_map) => { + let artifact_map = artifact_map.clone(); + let current_paths_map = ArtifactMap::default(); + let write_artifacts_incremental_time = + log_event.start("write_artifacts_incremental_time"); + + // Write or update artifacts + write_artifacts( + config, + project_config, + schema, + should_stop_updating_artifacts, + &artifacts, + &fragment_locations, + &artifacts_file_hash_map, + )?; + artifacts.into_par_iter().for_each(|artifact| { + current_paths_map.insert(artifact); + }); + log_event.stop(write_artifacts_incremental_time); + + log_event.time("update_artifact_map_time", || { + // All generated paths for removed definitions should be removed + for name in &removed_artifact_sources { + if let Some((_, artifacts)) = artifact_map.0.remove(name) { + artifacts_to_remove.extend(artifacts.into_iter().map(|a| a.path)); + } + } + // Update the artifact map, and delete any removed artifacts + current_paths_map.0.into_par_iter().for_each( + |(definition_name, artifact_records)| match artifact_map + .0 + .entry(definition_name) + { + Entry::Occupied(mut entry) => { + let prev_records = entry.get_mut(); + let current_records_paths = + FnvHashSet::from_iter(artifact_records.iter().map(|r| &r.path)); + + for prev_record in prev_records.drain(..) { + if !current_records_paths.contains(&prev_record.path) { + artifacts_to_remove.insert(prev_record.path); + } + } + *prev_records = artifact_records; + } + Entry::Vacant(entry) => { + entry.insert(artifact_records); + } + }, + ); + // Filter out any artifact that is in the artifact map + if !artifacts_to_remove.is_empty() { + artifact_map.0.par_iter().for_each(|entry| { + for artifact in entry.value() { + artifacts_to_remove.remove(&artifact.path); + } + }); + } + }); + let delete_artifacts_incremental_time = + log_event.start("delete_artifacts_incremental_time"); + // The remaining dirty artifacts are no longer required + for path in artifacts_to_remove { + if should_stop_updating_artifacts() { + break; + } + config.artifact_writer.remove(config.root_dir.join(path))?; + } + log_event.stop(delete_artifacts_incremental_time); + + artifact_map + } + }; + + if source_control_update_status.is_started() { + log_event.number("update_artifacts_after_source_control_update", 1); + debug!( + "We just updated artifacts after source control update happened. Most likely we have outdated artifacts now..." + ); + warn!( + r#" +Build canceled due to a source control update while we're writing artifacts. +The compiler may produce outdated artifacts, but it will regenerate the correct set after the update is completed."# + ); + return Err(BuildProjectFailure::Cancelled); + } else { + // For now, lets log how often this is happening, so we can decide if we want to + // adjust the way we write artifacts. For example, we could write them to the temp + // directory first, then move to a correct destination. + log_event.number("update_artifacts_after_source_control_update", 0); + } + + info!( + "[{}] compiled documents: {} reader, {} normalization, {} operation text", + project_config.name, + programs.reader.document_count(), + programs.normalization.document_count(), + programs.operation_text.document_count() + ); + log_event.stop(commit_time); + log_event.complete(); + + Ok(next_artifact_map) +} + +fn write_artifacts bool + Sync + Send>( + config: &Config, + project_config: &ProjectConfig, + schema: &SDLSchema, + should_stop_updating_artifacts: F, + artifacts: &[Artifact], + fragment_locations: &FragmentLocations, + artifacts_file_hash_map: &Option>>, +) -> Result<(), BuildProjectFailure> { + artifacts.par_chunks(8).try_for_each_init( + || Printer::with_dedupe(project_config), + |printer, artifacts| { + for artifact in artifacts { + if should_stop_updating_artifacts() { + return Err(BuildProjectFailure::Cancelled); + } + let path = config.root_dir.join(&artifact.path); + let content = artifact.content.as_bytes( + config, + project_config, + printer, + schema, + artifact.source_file, + fragment_locations, + ); + let file_hash = match artifact.path.to_str() { + Some(key) => artifacts_file_hash_map + .as_ref() + .and_then(|map| map.get(key).cloned().flatten()), + _ => None, + }; + if config + .artifact_writer + .should_write(&path, &content, file_hash)? + { + config.artifact_writer.write(path, content)?; + } + } + Ok(()) + }, + )?; + Ok(()) +} diff --git a/compiler/crates/relay-compiler/src/build_project/artifact_generated_types.rs b/compiler/crates/relay-compiler/src/build_project/artifact_generated_types.rs index e59e1066a8c05..3152d2eb87bcc 100644 --- a/compiler/crates/relay-compiler/src/build_project/artifact_generated_types.rs +++ b/compiler/crates/relay-compiler/src/build_project/artifact_generated_types.rs @@ -43,7 +43,13 @@ impl ArtifactGeneratedTypes { ("Query", "ConcreteRequest, Query") } } - OperationKind::Mutation => ("Mutation", "ConcreteRequest, Mutation"), + OperationKind::Mutation => { + if is_client_only { + ("Mutation", "ClientRequest, Mutation") + } else { + ("Mutation", "ConcreteRequest, Mutation") + } + } OperationKind::Subscription => ( "GraphQLSubscription", "ConcreteRequest, GraphQLSubscription", diff --git a/compiler/crates/relay-compiler/src/build_project/artifact_writer.rs b/compiler/crates/relay-compiler/src/build_project/artifact_writer.rs index 41657af3e3913..83578809f02d7 100644 --- a/compiler/crates/relay-compiler/src/build_project/artifact_writer.rs +++ b/compiler/crates/relay-compiler/src/build_project/artifact_writer.rs @@ -10,6 +10,7 @@ use std::fs::create_dir_all; use std::fs::File; use std::io; use std::io::prelude::*; +use std::path::Path; use std::path::PathBuf; use std::sync::atomic::AtomicUsize; use std::sync::Mutex; @@ -19,45 +20,44 @@ use dashmap::DashSet; use log::info; use serde::Serialize; use serde::Serializer; +use sha1::Digest; +use sha1::Sha1; +use crate::build_project::source_control::SourceControl; use crate::errors::BuildProjectError; use crate::errors::Error; type BuildProjectResult = Result<(), BuildProjectError>; pub trait ArtifactWriter { - fn should_write(&self, path: &PathBuf, content: &[u8]) -> Result; + fn should_write( + &self, + path: &Path, + content: &[u8], + hash: Option, + ) -> Result; fn write(&self, path: PathBuf, content: Vec) -> BuildProjectResult; fn remove(&self, path: PathBuf) -> BuildProjectResult; fn finalize(&self) -> crate::errors::Result<()>; } -type SourceControlFn = - fn(&PathBuf, &Mutex>, &Mutex>) -> crate::errors::Result<()>; +#[derive(Default)] pub struct ArtifactFileWriter { added: Mutex>, removed: Mutex>, - source_control_fn: Option, + source_control: Option>, root_dir: PathBuf, } -impl Default for ArtifactFileWriter { - fn default() -> Self { - Self { - added: Default::default(), - removed: Default::default(), - source_control_fn: None, - root_dir: Default::default(), - } - } -} - impl ArtifactFileWriter { - pub fn new(source_control_fn: Option, root_dir: PathBuf) -> Self { + pub fn new( + source_control: Option>, + root_dir: PathBuf, + ) -> Self { Self { added: Default::default(), removed: Default::default(), - source_control_fn, + source_control, root_dir, } } @@ -78,11 +78,21 @@ impl ArtifactFileWriter { } } impl ArtifactWriter for ArtifactFileWriter { - fn should_write(&self, path: &PathBuf, content: &[u8]) -> Result { - content_is_different(path, content).map_err(|error| BuildProjectError::WriteFileError { - file: path.clone(), + fn should_write( + &self, + path: &Path, + content: &[u8], + hash: Option, + ) -> Result { + let op = |error| BuildProjectError::WriteFileError { + file: path.to_owned(), source: error, - }) + }; + if let Some(file_hash) = hash { + hash_is_different(file_hash, content).map_err(op) + } else { + content_is_different(path, content).map_err(op) + } } fn write(&self, path: PathBuf, content: Vec) -> BuildProjectResult { @@ -104,8 +114,9 @@ impl ArtifactWriter for ArtifactFileWriter { } fn finalize(&self) -> crate::errors::Result<()> { - if let Some(source_control_fn) = self.source_control_fn { - (source_control_fn)(&self.root_dir, &self.added, &self.removed) + if let Some(source_control) = &self.source_control { + source_control.add_files(&self.root_dir, &self.added)?; + source_control.remove_files(&self.root_dir, &self.removed) } else { Ok(()) } @@ -160,14 +171,23 @@ impl ArtifactDifferenceWriter { } impl ArtifactWriter for ArtifactDifferenceWriter { - fn should_write(&self, path: &PathBuf, content: &[u8]) -> Result { - Ok(!self.verify_changes_against_filesystem - || content_is_different(path, content).map_err(|error| { - BuildProjectError::WriteFileError { - file: path.clone(), - source: error, - } - })?) + fn should_write( + &self, + path: &Path, + content: &[u8], + hash: Option, + ) -> Result { + let op = |error| BuildProjectError::WriteFileError { + file: path.to_owned(), + source: error, + }; + if !self.verify_changes_against_filesystem { + Ok(true) + } else if let Some(file_hash) = hash { + hash_is_different(file_hash, content).map_err(op) + } else { + content_is_different(path, content).map_err(op) + } } fn write(&self, path: PathBuf, content: Vec) -> BuildProjectResult { @@ -244,14 +264,23 @@ impl ArtifactDifferenceShardedWriter { } impl ArtifactWriter for ArtifactDifferenceShardedWriter { - fn should_write(&self, path: &PathBuf, content: &[u8]) -> Result { - Ok(!self.verify_changes_against_filesystem - || content_is_different(path, content).map_err(|error| { - BuildProjectError::WriteFileError { - file: path.clone(), - source: error, - } - })?) + fn should_write( + &self, + path: &Path, + content: &[u8], + hash: Option, + ) -> Result { + let op = |error| BuildProjectError::WriteFileError { + file: path.to_owned(), + source: error, + }; + if !self.verify_changes_against_filesystem { + Ok(true) + } else if let Some(file_hash) = hash { + hash_is_different(file_hash, content).map_err(op) + } else { + content_is_different(path, content).map_err(op) + } } fn write(&self, path: PathBuf, content: Vec) -> BuildProjectResult { @@ -261,7 +290,7 @@ impl ArtifactWriter for ArtifactDifferenceShardedWriter { file.write_all(&content) })() .map_err(|error| BuildProjectError::WriteFileError { - file: path.clone(), + file: path.to_owned(), source: error, })?; self.codegen_records @@ -295,7 +324,7 @@ impl ArtifactWriter for ArtifactDifferenceShardedWriter { } } -fn ensure_file_directory_exists(file_path: &PathBuf) -> io::Result<()> { +fn ensure_file_directory_exists(file_path: &Path) -> io::Result<()> { if let Some(file_directory) = file_path.parent() { if !file_directory.exists() { create_dir_all(file_directory)?; @@ -305,7 +334,7 @@ fn ensure_file_directory_exists(file_path: &PathBuf) -> io::Result<()> { Ok(()) } -fn content_is_different(path: &PathBuf, content: &[u8]) -> io::Result { +fn content_is_different(path: &Path, content: &[u8]) -> io::Result { if path.exists() { let existing_content = std::fs::read(path)?; Ok(existing_content != content) @@ -314,9 +343,20 @@ fn content_is_different(path: &PathBuf, content: &[u8]) -> io::Result { } } +fn hash_is_different(file_hash: String, content: &[u8]) -> io::Result { + let hasher = Sha1::new_with_prefix(content); + let content_hash = format!("{:x}", hasher.finalize()); + Ok(file_hash != content_hash) +} + pub struct NoopArtifactWriter; impl ArtifactWriter for NoopArtifactWriter { - fn should_write(&self, _: &PathBuf, _: &[u8]) -> Result { + fn should_write( + &self, + _: &Path, + _: &[u8], + _: Option, + ) -> Result { Ok(false) } @@ -339,11 +379,21 @@ pub struct ArtifactValidationWriter { } impl ArtifactWriter for ArtifactValidationWriter { - fn should_write(&self, path: &PathBuf, content: &[u8]) -> Result { - content_is_different(path, content).map_err(|error| BuildProjectError::WriteFileError { - file: path.clone(), + fn should_write( + &self, + path: &Path, + content: &[u8], + hash: Option, + ) -> Result { + let op = |error| BuildProjectError::WriteFileError { + file: path.to_owned(), source: error, - }) + }; + if let Some(file_hash) = hash { + hash_is_different(file_hash, content).map_err(op) + } else { + content_is_different(path, content).map_err(op) + } } fn write(&self, path: PathBuf, _: Vec) -> BuildProjectResult { diff --git a/compiler/crates/relay-compiler/src/build_project/build_ir.rs b/compiler/crates/relay-compiler/src/build_project/build_ir.rs index 867a59eba06a2..7f3167deb8fdb 100644 --- a/compiler/crates/relay-compiler/src/build_project/build_ir.rs +++ b/compiler/crates/relay-compiler/src/build_project/build_ir.rs @@ -5,7 +5,10 @@ * LICENSE file in the root directory of this source tree. */ +use std::collections::HashSet; + use common::Diagnostic; +use common::PerfLogEvent; use dependency_analyzer::get_reachable_ir; use fnv::FnvHashMap; use graphql_ir::ExecutableDefinitionName; @@ -15,8 +18,10 @@ use graphql_syntax::ExecutableDefinition; use graphql_text_printer::print_executable_definition_ast; use md5::Digest; use md5::Md5; +use relay_transforms::annotate_resolver_root_fragments; use schema::SDLSchema; +use super::BuildMode; use super::ProjectAsts; use crate::config::ProjectConfig; @@ -49,28 +54,41 @@ impl SourceHashes { } pub fn build_ir( - _project_config: &ProjectConfig, + project_config: &ProjectConfig, project_asts: ProjectAsts, schema: &SDLSchema, - is_incremental_build: bool, + build_mode: BuildMode, + log_event: &impl PerfLogEvent, ) -> Result> { let asts = project_asts.definitions; let source_hashes = SourceHashes::from_definitions(&asts); - let ir = graphql_ir::build_ir_in_relay_mode(schema, &asts)?; - if is_incremental_build { - let affected_ir = get_reachable_ir( + let mut ir = graphql_ir::build_ir_in_relay_mode(schema, &asts, &project_config.feature_flags)?; + if project_config.resolvers_schema_module.is_some() { + ir = annotate_resolver_root_fragments(schema, ir); + } + let affected_ir: Vec = match build_mode { + BuildMode::Incremental => get_reachable_ir( ir, project_asts.base_definition_names, project_asts.changed_names, schema, - ); - Ok(BuildIRResult { - ir: affected_ir, - source_hashes, - }) - } else { - Ok(BuildIRResult { ir, source_hashes }) - } + HashSet::default(), + log_event, + ), + BuildMode::IncrementalWithSchemaChanges(changes) => get_reachable_ir( + ir, + project_asts.base_definition_names, + project_asts.changed_names, + schema, + changes, + log_event, + ), + BuildMode::Full => ir, + }; + Ok(BuildIRResult { + ir: affected_ir, + source_hashes, + }) } fn md5(data: &str) -> String { diff --git a/compiler/crates/relay-compiler/src/build_project/build_resolvers_schema.rs b/compiler/crates/relay-compiler/src/build_project/build_resolvers_schema.rs index b1258e4e731c2..67c10c73140fe 100644 --- a/compiler/crates/relay-compiler/src/build_project/build_resolvers_schema.rs +++ b/compiler/crates/relay-compiler/src/build_project/build_resolvers_schema.rs @@ -5,146 +5,11 @@ * LICENSE file in the root directory of this source tree. */ -use common::DiagnosticsResult; -use docblock_syntax::DocblockAST; -use fnv::FnvHashMap; -use graphql_syntax::ExecutableDefinition; -use relay_docblock::extend_schema_with_resolver_type_system_definition; -use schema::SDLSchema; - -use crate::compiler_state::CompilerState; -use crate::compiler_state::ProjectName; -use crate::config::ProjectConfig; -use crate::docblocks::build_schema_documents_from_docblocks; -use crate::docblocks::parse_docblock_asts_from_sources; -use crate::GraphQLAsts; - -pub(crate) fn extend_schema_with_resolvers( - schema: &mut SDLSchema, - compiler_state: &CompilerState, - project_config: &ProjectConfig, - graphql_asts_map: &FnvHashMap, -) -> DiagnosticsResult<()> { - let ResolverSchemaDocuments { - type_asts, - field_asts_and_definitions, - } = extract_schema_documents_for_resolvers(compiler_state, project_config, graphql_asts_map)?; - - extend_schema_with_types(schema, project_config, type_asts)?; - extend_schema_with_fields(schema, project_config, field_asts_and_definitions)?; - - Ok(()) -} - -fn extend_schema_with_types( - schema: &mut SDLSchema, - project_config: &ProjectConfig, - type_asts: TypeAsts, -) -> DiagnosticsResult<()> { - let type_definitions = - build_schema_documents_from_docblocks(&type_asts.0, project_config, schema, None)?; - - for schema_document in type_definitions { - for definition in schema_document.definitions { - extend_schema_with_resolver_type_system_definition( - definition, - schema, - schema_document.location, - )?; - } - } - - Ok(()) -} - -fn extend_schema_with_fields<'a>( - schema: &mut SDLSchema, - project_config: &ProjectConfig, - field_asts_and_definitions: FieldAstsAndDefinitions<'a>, -) -> DiagnosticsResult<()> { - let mut field_definitions = vec![]; - for (asts, definitions) in field_asts_and_definitions.0 { - field_definitions.extend(build_schema_documents_from_docblocks( - &asts, - project_config, - schema, - definitions, - )?); - } - - for schema_document in field_definitions { - for definition in schema_document.definitions { - extend_schema_with_resolver_type_system_definition( - definition, - schema, - schema_document.location, - )?; - } - } - - Ok(()) -} - -struct ResolverSchemaDocuments<'a> { - type_asts: TypeAsts, - field_asts_and_definitions: FieldAstsAndDefinitions<'a>, -} -struct TypeAsts(Vec); -struct FieldAstsAndDefinitions<'a>(Vec<(Vec, Option<&'a Vec>)>); - -fn extract_schema_documents_for_resolvers<'a>( - compiler_state: &'a CompilerState, - project_config: &'a ProjectConfig, - graphql_asts_map: &'a FnvHashMap, -) -> DiagnosticsResult> { - let mut projects = vec![project_config.name]; - projects.extend(project_config.base); - - let docblock_ast_sources = projects.iter().map(|project_name| { - ( - compiler_state.docblocks.get(project_name), - graphql_asts_map.get(project_name), - ) - }); - - let mut errors = vec![]; - let mut type_asts = vec![]; - let mut field_asts_and_definitions = vec![]; - - for docblock_ast in docblock_ast_sources { - if let (Some(docblocks), Some(graphql_asts)) = docblock_ast { - for (file_path, docblock_sources) in &docblocks.get_all() { - match parse_docblock_asts_from_sources(file_path, docblock_sources) { - Ok(result) => { - // Type resolvers should not rely on any fragments - // @rootFragment is not supported for them, so - // we don't need to extract any fragments from the `file_path` - type_asts.extend(result.types); - - // But for fields, we may need to validate the correctness - // of the @rootFragment. - // And here we're reading GraphQL asts for the file, - // and keeping them together with Docblock ASTs - if !result.fields.is_empty() { - field_asts_and_definitions.push(( - result.fields, - graphql_asts.get_executable_definitions_for_file(file_path), - )); - } - } - Err(err) => errors.extend(err), - } - } - } else { - panic!("Expected to have access to AST and docblock sources."); - } - } - if errors.is_empty() { - Ok(ResolverSchemaDocuments { - type_asts: TypeAsts(type_asts), - field_asts_and_definitions: FieldAstsAndDefinitions(field_asts_and_definitions), - }) - } else { - Err(errors) - } -} +mod extract_docblock_ir; +mod ir_to_schema; +mod mark_document_as_base; + +pub use extract_docblock_ir::extract_docblock_ir; +pub use extract_docblock_ir::ExtractedDocblockIr; +pub use ir_to_schema::build_resolver_types_schema_documents; +pub use ir_to_schema::extend_schema_with_field_ir; diff --git a/compiler/crates/relay-compiler/src/build_project/build_resolvers_schema/extract_docblock_ir.rs b/compiler/crates/relay-compiler/src/build_project/build_resolvers_schema/extract_docblock_ir.rs new file mode 100644 index 0000000000000..eb7a51f1b0d74 --- /dev/null +++ b/compiler/crates/relay-compiler/src/build_project/build_resolvers_schema/extract_docblock_ir.rs @@ -0,0 +1,220 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::path::PathBuf; + +use common::Diagnostic; +use common::DiagnosticsResult; +use docblock_syntax::DocblockAST; +use fnv::FnvHashMap; +use graphql_syntax::ExecutableDefinition; +use relay_config::ProjectName; +use relay_docblock::parse_docblock_ast; +use relay_docblock::DocblockIr; +use relay_docblock::ParseOptions; +use relay_docblock::ResolverFieldDocblockIr; +use relay_docblock::ResolverTypeDocblockIr; +use rustc_hash::FxHashMap; + +use crate::compiler_state::CompilerState; +use crate::config::Config; +use crate::config::ProjectConfig; +use crate::docblocks::parse_docblock_asts_from_sources; +use crate::GraphQLAsts; + +#[derive(Default)] +pub struct ExtractedDocblockIr { + pub type_irs: Vec>, + pub field_irs: Vec>, +} + +pub struct AllocatedDocblockIr { + pub ir: T, + pub is_base: bool, +} + +/// Extract docblock IR for this, and the base project +pub fn extract_docblock_ir( + config: &Config, + compiler_state: &CompilerState, + project_config: &ProjectConfig, + graphql_asts_map: &FnvHashMap, +) -> DiagnosticsResult { + let mut irs = extract_docblock_ir_for_project( + config, + compiler_state, + project_config, + graphql_asts_map, + false, + )?; + if let Some(base_project_name) = &project_config.base { + let base_project_irs = extract_docblock_ir_for_project( + config, + compiler_state, + &config.projects[base_project_name], + graphql_asts_map, + true, + )?; + irs.type_irs.extend(base_project_irs.type_irs); + irs.field_irs.extend(base_project_irs.field_irs) + } + + Ok(irs) +} + +/// Extract docblock IR for a given project. This includes types and fields extracted directly from +/// docblocks as well as those extracted from custom extractors. +fn extract_docblock_ir_for_project( + config: &Config, + compiler_state: &CompilerState, + project_config: &ProjectConfig, + graphql_asts_map: &FnvHashMap, + is_base: bool, +) -> DiagnosticsResult { + let parse_options = ParseOptions { + enable_interface_output_type: &project_config + .feature_flags + .relay_resolver_enable_interface_output_type, + allow_resolver_non_nullable_return_type: &project_config + .feature_flags + .allow_resolver_non_nullable_return_type, + }; + + let mut type_irs = vec![]; + let mut field_irs = vec![]; + + let project_schema_docs = extract_schema_documents_for_resolvers( + &project_config.name, + compiler_state, + graphql_asts_map, + )?; + + if let Some(custom_extract_resolver) = &config.custom_extract_relay_resolvers { + let (extracted_types, extracted_fields) = custom_extract_resolver( + project_config.name, + compiler_state, + &project_schema_docs.field_asts_and_definitions.0, + )?; + type_irs.extend(extracted_types); + field_irs.extend(extracted_fields); + } + + let mut parse_errors: Vec = vec![]; + + for ast in project_schema_docs.type_asts.0 { + match parse_docblock_ast(&project_config.name, &ast, None, &parse_options) { + Ok(maybe_ir) => type_irs.extend(maybe_ir), + Err(errors) => parse_errors.extend(errors), + }; + } + + for (_, (asts, definitions)) in project_schema_docs.field_asts_and_definitions.0 { + for ast in asts { + match parse_docblock_ast(&project_config.name, &ast, definitions, &parse_options) { + Ok(maybe_ir) => field_irs.extend(maybe_ir), + Err(errors) => parse_errors.extend(errors), + }; + } + } + + if !parse_errors.is_empty() { + return Err(parse_errors); + } + + Ok(ExtractedDocblockIr { + type_irs: type_irs + .into_iter() + .map(|docblock_ir| { + let ir = expect_type_ir(docblock_ir); + AllocatedDocblockIr { ir, is_base } + }) + .collect(), + field_irs: field_irs + .into_iter() + .map(|docblock_ir| { + let ir = expect_field_ir(docblock_ir); + AllocatedDocblockIr { ir, is_base } + }) + .collect(), + }) +} + +fn expect_type_ir(docblock_ir: relay_docblock::DocblockIr) -> ResolverTypeDocblockIr { + match docblock_ir { + DocblockIr::Type(ir) => ir, + _ => panic!("Expected an IR that models a type"), + } +} + +fn expect_field_ir(docblock_ir: relay_docblock::DocblockIr) -> ResolverFieldDocblockIr { + match docblock_ir { + DocblockIr::Field(ir) => ir, + _ => panic!("Expected an IR that models a field"), + } +} + +struct ResolverSchemaDocuments<'a> { + type_asts: TypeAsts, + field_asts_and_definitions: FieldAstsAndDefinitions<'a>, +} +struct TypeAsts(Vec); +struct FieldAstsAndDefinitions<'a>( + FxHashMap<&'a PathBuf, (Vec, Option<&'a Vec>)>, +); + +fn extract_schema_documents_for_resolvers<'a>( + project_name: &'a ProjectName, + compiler_state: &'a CompilerState, + graphql_asts_map: &'a FnvHashMap, +) -> DiagnosticsResult> { + let docblock_ast_sources = ( + compiler_state.docblocks.get(project_name), + graphql_asts_map.get(project_name), + ); + let mut errors = vec![]; + let mut type_asts = vec![]; + let mut field_asts_and_definitions = FxHashMap::default(); + + if let (Some(docblocks), Some(graphql_asts)) = docblock_ast_sources { + for (file_path, docblock_sources) in docblocks.get_all() { + match parse_docblock_asts_from_sources(file_path, docblock_sources) { + Ok(result) => { + // Type resolvers should not rely on any fragments + // @rootFragment is not supported for them, so + // we don't need to extract any fragments from the `file_path` + type_asts.extend(result.types); + + // But for fields, we may need to validate the correctness + // of the @rootFragment. + // And here we're reading GraphQL asts for the file, + // and keeping them together with Docblock ASTs + if !result.fields.is_empty() { + field_asts_and_definitions.insert( + file_path, + ( + result.fields, + graphql_asts.get_executable_definitions_for_file(file_path), + ), + ); + } + } + Err(err) => errors.extend(err), + } + } + } else { + panic!("Expected to have access to AST and docblock sources."); + } + + if errors.is_empty() { + Ok(ResolverSchemaDocuments { + type_asts: TypeAsts(type_asts), + field_asts_and_definitions: FieldAstsAndDefinitions(field_asts_and_definitions), + }) + } else { + Err(errors) + } +} diff --git a/compiler/crates/relay-compiler/src/build_project/build_resolvers_schema/ir_to_schema.rs b/compiler/crates/relay-compiler/src/build_project/build_resolvers_schema/ir_to_schema.rs new file mode 100644 index 0000000000000..35c8a313005f5 --- /dev/null +++ b/compiler/crates/relay-compiler/src/build_project/build_resolvers_schema/ir_to_schema.rs @@ -0,0 +1,88 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::DiagnosticsResult; +use errors::try_all; +use graphql_syntax::SchemaDocument; +use relay_docblock::extend_schema_with_resolver_type_system_definition; +use relay_docblock::ResolverFieldDocblockIr; +use relay_docblock::ResolverTypeDocblockIr; +use schema::SDLSchema; + +use super::extract_docblock_ir::AllocatedDocblockIr; +use super::mark_document_as_base::mark_document_as_base; +use crate::config::Config; +use crate::config::ProjectConfig; + +// FIXME: Use the base project's schema config where needed +/// FIXME: Move to relay-docblock crate +/// +/// For types we construct SDL ASTs and use them to construct the initial SDLSchema +pub fn build_resolver_types_schema_documents( + type_irs: &[AllocatedDocblockIr], + config: &Config, + project_config: &ProjectConfig, +) -> Vec { + let schema_config = &project_config.schema_config; + let base_config = project_config + .base + .map(|base_project_name| &config.projects[&base_project_name].schema_config); + + type_irs + .iter() + .map(|type_ir| { + let ir_schema_config = if type_ir.is_base { + base_config.unwrap() + } else { + schema_config + }; + let schema_document = type_ir.ir.to_graphql_schema_ast(ir_schema_config); + if type_ir.is_base { + mark_document_as_base(schema_document) + } else { + schema_document + } + }) + .collect() +} + +/// For fields, we extend the existing schema with the field definitions. This is achieved by generating +/// SDL ASTs for the fields and then using them to extend the schema. +pub fn extend_schema_with_field_ir( + irs: Vec>, + schema: &mut SDLSchema, + config: &Config, + project_config: &ProjectConfig, +) -> DiagnosticsResult<()> { + let base_project_config = project_config + .base + .map(|base_project_name| &config.projects[&base_project_name]); + + try_all(irs.into_iter().map(|field_ir| { + let ir_project_config = if field_ir.is_base { + base_project_config.unwrap() + } else { + project_config + }; + let mut document = field_ir.ir.to_graphql_schema_ast( + ir_project_config.name, + schema, + &ir_project_config.schema_config, + )?; + if field_ir.is_base { + document = mark_document_as_base(document); + } + try_all(document.definitions.into_iter().map(|definition| { + extend_schema_with_resolver_type_system_definition( + definition, + schema, + document.location, + ) + })) + }))?; + Ok(()) +} diff --git a/compiler/crates/relay-compiler/src/build_project/build_resolvers_schema/mark_document_as_base.rs b/compiler/crates/relay-compiler/src/build_project/build_resolvers_schema/mark_document_as_base.rs new file mode 100644 index 0000000000000..94eb284b6f675 --- /dev/null +++ b/compiler/crates/relay-compiler/src/build_project/build_resolvers_schema/mark_document_as_base.rs @@ -0,0 +1,124 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::Span; +use graphql_syntax::ConstantDirective; +use graphql_syntax::FieldDefinition; +use graphql_syntax::Identifier; +use graphql_syntax::InterfaceTypeExtension; +use graphql_syntax::List; +use graphql_syntax::ObjectTypeDefinition; +use graphql_syntax::ObjectTypeExtension; +use graphql_syntax::ScalarTypeDefinition; +use graphql_syntax::SchemaDocument; +use graphql_syntax::Token; +use graphql_syntax::TokenKind; +use relay_transforms::RESOLVER_BELONGS_TO_BASE_SCHEMA_DIRECTIVE; +use schema::TypeSystemDefinition; + +/// Mark schema definitions as base schema extension (add special directive to the type/field) +/// This directive is used by other stages of Relay to know if artifacts for these types have +/// already been generated. +pub fn mark_document_as_base(document: SchemaDocument) -> SchemaDocument { + SchemaDocument { + definitions: document + .definitions + .into_iter() + .map(mark_extension_as_base) + .collect(), + ..document + } +} + +fn mark_extension_as_base(definition: TypeSystemDefinition) -> TypeSystemDefinition { + match definition { + TypeSystemDefinition::ObjectTypeDefinition(def) => { + TypeSystemDefinition::ObjectTypeDefinition(ObjectTypeDefinition { + directives: merge_directives( + &def.directives, + &[belongs_to_base_schema_directive()], + ), + ..def + }) + } + TypeSystemDefinition::ScalarTypeDefinition(def) => { + TypeSystemDefinition::ScalarTypeDefinition(ScalarTypeDefinition { + directives: merge_directives( + &def.directives, + &[belongs_to_base_schema_directive()], + ), + ..def + }) + } + TypeSystemDefinition::ObjectTypeExtension(def) => { + TypeSystemDefinition::ObjectTypeExtension(ObjectTypeExtension { + fields: mark_fields_as_base(def.fields), + ..def + }) + } + TypeSystemDefinition::InterfaceTypeExtension(def) => { + TypeSystemDefinition::InterfaceTypeExtension(InterfaceTypeExtension { + fields: mark_fields_as_base(def.fields), + ..def + }) + } + _ => panic!( + "Expected docblocks to only expose object and scalar definitions, and object and interface extensions." + ), + } +} + +/// Mark fields as base schema extension fields +fn mark_fields_as_base(fields: Option>) -> Option> { + fields.map(|list| List { + items: list + .items + .iter() + .map(|item| FieldDefinition { + directives: merge_directives( + &item.directives, + &[belongs_to_base_schema_directive()], + ), + ..item.clone() + }) + .collect(), + ..list + }) +} + +/// Merge two lists of directives +fn merge_directives(a: &[ConstantDirective], b: &[ConstantDirective]) -> Vec { + if a.is_empty() { + b.to_vec() + } else if b.is_empty() { + a.to_vec() + } else { + let mut directives = a.to_vec(); + directives.extend(b.iter().cloned()); + directives + } +} + +/// Create special directive to mark types/fields as belonging to base schema +fn belongs_to_base_schema_directive() -> ConstantDirective { + ConstantDirective { + name: Identifier { + value: RESOLVER_BELONGS_TO_BASE_SCHEMA_DIRECTIVE.0, + span: Span::empty(), + token: Token { + span: Span::empty(), + kind: TokenKind::Empty, + }, + }, + arguments: None, + span: Span::empty(), + at: Token { + span: Span::empty(), + kind: TokenKind::Empty, + }, + } +} diff --git a/compiler/crates/relay-compiler/src/build_project/build_schema.rs b/compiler/crates/relay-compiler/src/build_project/build_schema.rs index 89d9f52a9a0f7..0c18c1d18e313 100644 --- a/compiler/crates/relay-compiler/src/build_project/build_schema.rs +++ b/compiler/crates/relay-compiler/src/build_project/build_schema.rs @@ -8,57 +8,150 @@ use std::sync::Arc; use common::DiagnosticsResult; +use common::PerfLogEvent; use fnv::FnvHashMap; +use relay_config::ProjectName; +use relay_docblock::validate_resolver_schema; +use schema::parse_schema_with_extensions; use schema::SDLSchema; +use schema::SchemaDocuments; +use schema_validate_lib::validate; +use schema_validate_lib::SchemaValidationOptions; -use super::build_resolvers_schema::extend_schema_with_resolvers; +use super::build_resolvers_schema::build_resolver_types_schema_documents; +use super::build_resolvers_schema::extend_schema_with_field_ir; +use super::build_resolvers_schema::extract_docblock_ir; +use super::build_resolvers_schema::ExtractedDocblockIr; use crate::compiler_state::CompilerState; -use crate::compiler_state::ProjectName; +use crate::config::Config; use crate::config::ProjectConfig; use crate::GraphQLAsts; pub fn build_schema( compiler_state: &CompilerState, + config: &Config, project_config: &ProjectConfig, graphql_asts_map: &FnvHashMap, + log_event: &impl PerfLogEvent, ) -> DiagnosticsResult> { - let schema = compiler_state.schema_cache.get(&project_config.name); - match schema { - Some(schema) if !compiler_state.project_has_pending_schema_changes(project_config.name) => { - Ok(schema.clone()) + if let Some(schema) = compiler_state.schema_cache.get(&project_config.name) { + if !compiler_state.project_has_pending_schema_changes(project_config.name) { + return Ok(schema.clone()); } - _ => { - let mut extensions = vec![]; - if let Some(project_extensions) = compiler_state.extensions.get(&project_config.name) { - extensions.extend(project_extensions.get_sources_with_location()); - } - if let Some(base_project_name) = project_config.base { - if let Some(base_project_extensions) = - compiler_state.extensions.get(&base_project_name) - { - extensions.extend(base_project_extensions.get_sources_with_location()); - } - } - let mut schema_sources = Vec::new(); - schema_sources.extend( - compiler_state.schemas[&project_config.name] - .get_sources_with_location() - .into_iter() - .map(|(schema, location_key)| (schema.as_str(), location_key)), - ); - let mut schema = - relay_schema::build_schema_with_extensions(&schema_sources, &extensions)?; - - if project_config.feature_flags.enable_relay_resolver_transform { - extend_schema_with_resolvers( - &mut schema, - compiler_state, - project_config, - graphql_asts_map, - )?; - } - - Ok(Arc::new(schema)) + } + build_schema_impl( + compiler_state, + project_config, + log_event, + config, + graphql_asts_map, + ) +} + +fn build_schema_impl( + compiler_state: &CompilerState, + project_config: &ProjectConfig, + log_event: &impl PerfLogEvent, + config: &Config, + graphql_asts_map: &FnvHashMap, +) -> DiagnosticsResult> { + let schema_sources = get_schema_sources(compiler_state, project_config); + let extensions = get_extension_sources(compiler_state, project_config); + + // Parse the server and extension schema text + let SchemaDocuments { + server: server_asts, + extensions: mut extension_asts, + } = log_event.time("parse_schema_time", || { + parse_schema_with_extensions(&schema_sources, &extensions) + })?; + + // Collect Relay Resolver schema IR + let resolver_schema_data = log_event.time("collect_resolver_schema_time", || { + if project_config.feature_flags.enable_relay_resolver_transform { + extract_docblock_ir(config, compiler_state, project_config, graphql_asts_map) + } else { + Ok(ExtractedDocblockIr::default()) + } + })?; + + // Convert resolver schema to AST and append it to extension ASTs + log_event.time("build_resolver_types_schema_time", || { + extension_asts.extend(build_resolver_types_schema_documents( + &resolver_schema_data.type_irs, + config, + project_config, + )); + }); + + // Now that all the named types have been collected, we can build + // the normalized schema. All names should be able to be resolved. + let mut schema = log_event.time("build_schema_time", || { + relay_schema::build_schema_with_extensions_from_asts(server_asts, extension_asts) + })?; + + // Now that the normalized schema has been built we can add fields to existing types by name. + log_event.time("extend_schema_with_resolver_fields_time", || { + extend_schema_with_field_ir( + resolver_schema_data.field_irs, + &mut schema, + config, + project_config, + ) + })?; + + // Now that the schema has been fully extended to include all Resolver types + // and fields we can apply resolver-specific validations. + log_event.time("validate_resolver_schema_time", || { + validate_resolver_schema(&schema, &project_config.feature_flags) + })?; + + log_event.time("validate_composite_schema_time", || { + maybe_validate_schema(project_config, &schema) + })?; + + Ok(Arc::new(schema)) +} + +fn get_schema_sources<'a>( + compiler_state: &'a CompilerState, + project_config: &'a ProjectConfig, +) -> Vec<(&'a str, common::SourceLocationKey)> { + compiler_state.schemas[&project_config.name] + .get_sources_with_location() + .into_iter() + .map(|(schema, location_key)| (schema.as_str(), location_key)) + .collect() +} + +fn get_extension_sources<'a>( + compiler_state: &'a CompilerState, + project_config: &'a ProjectConfig, +) -> Vec<(&'a String, common::SourceLocationKey)> { + let mut extensions = vec![]; + if let Some(project_extensions) = compiler_state.extensions.get(&project_config.name) { + extensions.extend(project_extensions.get_sources_with_location()); + } + if let Some(base_project_name) = project_config.base { + if let Some(base_project_extensions) = compiler_state.extensions.get(&base_project_name) { + extensions.extend(base_project_extensions.get_sources_with_location()); } } + extensions +} + +fn maybe_validate_schema( + project_config: &ProjectConfig, + schema: &SDLSchema, +) -> DiagnosticsResult<()> { + if project_config.feature_flags.disable_schema_validation { + return Ok(()); + } + + validate( + &schema, + SchemaValidationOptions { + allow_introspection_names: true, + }, + ) } diff --git a/compiler/crates/relay-compiler/src/build_project/generate_artifacts.rs b/compiler/crates/relay-compiler/src/build_project/generate_artifacts.rs index 22d202797712d..e4d5d7dece703 100644 --- a/compiler/crates/relay-compiler/src/build_project/generate_artifacts.rs +++ b/compiler/crates/relay-compiler/src/build_project/generate_artifacts.rs @@ -11,13 +11,15 @@ use std::sync::Arc; use common::NamedItem; use common::SourceLocationKey; use fnv::FnvHashMap; -use graphql_ir::ExecutableDefinitionName; use graphql_ir::FragmentDefinition; use graphql_ir::OperationDefinition; use graphql_text_printer::OperationPrinter; use graphql_text_printer::PrinterOptions; use intern::string_key::StringKey; use intern::Lookup; +use relay_codegen::QueryID; +use relay_config::ResolversSchemaModuleConfig; +use relay_transforms::ArtifactSourceKeyData; use relay_transforms::ClientEdgeGeneratedQueryMetadataDirective; use relay_transforms::Programs; use relay_transforms::RawResponseGenerationMode; @@ -27,12 +29,14 @@ use relay_transforms::UPDATABLE_DIRECTIVE; pub use super::artifact_content::ArtifactContent; use super::build_ir::SourceHashes; -use crate::config::Config; +use crate::artifact_map::ArtifactSourceKey; use crate::config::ProjectConfig; /// Represents a generated output artifact. pub struct Artifact { - pub source_definition_names: Vec, + /// List of source definitions that this artifact is generated from. + /// It may be the name of the query/fragment or relay resolver hash. + pub artifact_source_keys: Vec, pub path: PathBuf, pub content: ArtifactContent, /// The source file responsible for generating this file. @@ -41,7 +45,6 @@ pub struct Artifact { } pub fn generate_artifacts( - _config: &Config, project_config: &ProjectConfig, programs: &Programs, source_hashes: Arc, @@ -68,10 +71,19 @@ pub fn generate_artifacts( None }; + let artifact_source_keys = if let Some(artifact_source) = ArtifactSourceKeyData::find(&normalization.directives) { + vec![ + ArtifactSourceKey::ResolverHash(artifact_source.0) + ] + } else { + // TODO: refactor `parent_documents` to include ArtifactSource and not ExecutableDefinition + metadata.parent_documents.iter().copied().map(ArtifactSourceKey::ExecutableDefinition).collect() + }; + return Artifact { - source_definition_names: metadata.parent_documents.iter().copied().collect(), + artifact_source_keys, path: project_config - .path_for_artifact(source_file, normalization.name.item.0), + .artifact_path_for_definition(normalization.name), content: ArtifactContent::SplitOperation { normalization_operation: Arc::clone(normalization), typegen_operation, @@ -84,36 +96,26 @@ pub fn generate_artifacts( RefetchableDerivedFromMetadata::find(&normalization.directives) { let source_name = derived_from_metadata.0; - let source_fragment = programs - .source - .fragment(source_name) - .expect("Expected the source document for the SplitOperation to exist."); let source_hash = source_hashes.get(&source_name.into()).cloned().unwrap(); return generate_normalization_artifact( &mut operation_printer, - source_name.into(), + ArtifactSourceKey::ExecutableDefinition(source_name.into()), project_config, &operations, source_hash, - source_fragment.name.location.source_location(), ) } else if let Some(client_edges_directive) = ClientEdgeGeneratedQueryMetadataDirective::find(&normalization.directives) { let source_name = client_edges_directive.source_name.item; - let source_file = client_edges_directive - .source_name - .location - .source_location(); let source_hash = source_hashes.get(&source_name).cloned().unwrap(); return generate_normalization_artifact( &mut operation_printer, - source_name, + ArtifactSourceKey::ExecutableDefinition(source_name), project_config, &operations, source_hash, - source_file, ) } else { let source_hash = source_hashes @@ -122,11 +124,10 @@ pub fn generate_artifacts( .unwrap(); return generate_normalization_artifact( &mut operation_printer, - normalization.name.item.into(), + ArtifactSourceKey::ExecutableDefinition(normalization.name.item.into()), project_config, &operations, source_hash, - normalization.name.location.source_location(), ) } } else if let Some(reader) = operations.reader { @@ -142,11 +143,10 @@ pub fn generate_artifacts( .cloned() .unwrap(); return generate_updatable_query_artifact( - reader.name.item.into(), + ArtifactSourceKey::ExecutableDefinition(reader.name.item.into()), project_config, &operations, source_hash, - reader.name.location.source_location(), ) } } @@ -160,27 +160,51 @@ pub fn generate_artifacts( } else { reader_fragment.name.item.into() }; - + // If the fragment is generated for the RelayResolver model (id, or model instance) + // we need to update the source definition to include the original text of the resolver. let source_hash = source_hashes.get(&source_name).cloned(); - let source_definition_names = vec![source_name]; + + // We need this `if/else` here because of the way the compiler is handling the aritfacts + // deletion (see commit_project in compiler.rs). + // To remove the artifact, the artifact map should not contain any document/source that may + // generate the artifact. If we merge these sources (fragment name and resolver hash) + // then the removal of the source hash won't trigger the removal of the artifact, because + // there will be anothe key (fragment name) in the artifacts map that will point to the + // same generate artifact. + let artifact_source_keys = if let Some(artifact_source) = ArtifactSourceKeyData::find(&reader_fragment.directives) { + vec![ + ArtifactSourceKey::ResolverHash(artifact_source.0) + ] + } else { + vec![ArtifactSourceKey::ExecutableDefinition(source_name)] + }; + generate_reader_artifact( project_config, programs, reader_fragment, source_hash, - source_definition_names, + artifact_source_keys, ) })) + .chain( + match project_config.resolvers_schema_module { + Some(ResolversSchemaModuleConfig { ref path , .. }) => + vec![ + generate_resolvers_schema_module_artifact(path.clone()) + ], + _ => vec![], + } + ) .collect(); } fn generate_normalization_artifact( operation_printer: &mut OperationPrinter<'_>, - source_definition_name: ExecutableDefinitionName, + artifact_source: ArtifactSourceKey, project_config: &ProjectConfig, operations: &OperationGroup<'_>, source_hash: String, - source_file: SourceLocationKey, ) -> Artifact { let text = operations .operation_text @@ -191,8 +215,8 @@ fn generate_normalization_artifact( .expect("Operations must have a normalization entry."); Artifact { - source_definition_names: vec![source_definition_name], - path: project_config.path_for_artifact(source_file, normalization.name.item.0), + artifact_source_keys: vec![artifact_source], + path: project_config.artifact_path_for_definition(normalization.name), content: ArtifactContent::Operation { normalization_operation: Arc::clone(normalization), reader_operation: operations.expect_reader(), @@ -205,20 +229,43 @@ fn generate_normalization_artifact( } } +pub fn generate_preloadable_query_parameters_artifact( + project_config: &ProjectConfig, + normalization: &Arc, + id_and_text_hash: &Option, + source_keys: Vec, + source_file: SourceLocationKey, +) -> Artifact { + let query_id = id_and_text_hash + .clone() + .expect("Expected operation artifact to have an `id`. Ensure a `persistConfig` is setup for the current project."); + + let artifact_name = normalization.name.item.0.to_string() + "$parameters"; + + Artifact { + artifact_source_keys: source_keys, + path: project_config.path_for_language_specific_artifact(source_file, artifact_name), + content: ArtifactContent::PreloadableQueryParameters { + normalization_operation: Arc::clone(normalization), + query_id, + }, + source_file, + } +} + fn generate_updatable_query_artifact( - source_definition_name: ExecutableDefinitionName, + artifact_source: ArtifactSourceKey, project_config: &ProjectConfig, operations: &OperationGroup<'_>, source_hash: String, - source_file: SourceLocationKey, ) -> Artifact { let reader = operations .reader .expect("Updatable operations must have a reader entry."); Artifact { - source_definition_names: vec![source_definition_name], - path: project_config.path_for_artifact(source_file, reader.name.item.0), + artifact_source_keys: vec![artifact_source], + path: project_config.artifact_path_for_definition(reader.name), content: ArtifactContent::UpdatableQuery { reader_operation: operations.expect_reader(), typegen_operation: operations.expect_typegen(), @@ -233,7 +280,7 @@ fn generate_reader_artifact( programs: &Programs, reader_fragment: &Arc, source_hash: Option, - source_definition_names: Vec, + artifact_source_keys: Vec, ) -> Artifact { let name = reader_fragment.name.item; let typegen_fragment = programs @@ -241,9 +288,8 @@ fn generate_reader_artifact( .fragment(name) .expect("a type fragment should be generated for this fragment"); Artifact { - source_definition_names, - path: project_config - .path_for_artifact(reader_fragment.name.location.source_location(), name.0), + artifact_source_keys, + path: project_config.artifact_path_for_definition(reader_fragment.name), content: ArtifactContent::Fragment { reader_fragment: Arc::clone(reader_fragment), typegen_fragment: Arc::clone(typegen_fragment), @@ -336,3 +382,12 @@ fn group_operations(programs: &Programs) -> FnvHashMap Artifact { + Artifact { + artifact_source_keys: vec![ArtifactSourceKey::Schema()], + path, + content: ArtifactContent::ResolversSchema, + source_file: SourceLocationKey::generated(), + } +} diff --git a/compiler/crates/relay-compiler/src/build_project/generate_extra_artifacts.rs b/compiler/crates/relay-compiler/src/build_project/generate_extra_artifacts.rs index bcc9815cbc74e..6ebd4ce729ad9 100644 --- a/compiler/crates/relay-compiler/src/build_project/generate_extra_artifacts.rs +++ b/compiler/crates/relay-compiler/src/build_project/generate_extra_artifacts.rs @@ -5,11 +5,50 @@ * LICENSE file in the root directory of this source tree. */ +use relay_transforms::is_operation_preloadable; use schema::SDLSchema; +use super::generate_preloadable_query_parameters_artifact; use super::Artifact; +use super::Config; use super::Programs; use super::ProjectConfig; +use crate::ArtifactContent; -pub type GenerateExtraArtifactsFn = - Box Vec + Send + Sync>; +pub type GenerateExtraArtifactsFn = Box< + dyn Fn(&Config, &ProjectConfig, &SDLSchema, &Programs, &[Artifact]) -> Vec + + Send + + Sync, +>; + +pub fn default_generate_extra_artifacts_fn( + _config: &Config, + project_config: &ProjectConfig, + _schema: &SDLSchema, + _program: &Programs, + artifacts: &[Artifact], +) -> Vec { + artifacts + .iter() + .filter_map(|artifact| match &artifact.content { + ArtifactContent::Operation { + normalization_operation, + id_and_text_hash, + .. + } => { + if !is_operation_preloadable(normalization_operation) { + return None; + } + + Some(generate_preloadable_query_parameters_artifact( + project_config, + normalization_operation, + id_and_text_hash, + artifact.artifact_source_keys.clone(), + artifact.source_file, + )) + } + _ => None, + }) + .collect() +} diff --git a/compiler/crates/relay-compiler/src/build_project/get_artifacts_file_hash_map.rs b/compiler/crates/relay-compiler/src/build_project/get_artifacts_file_hash_map.rs new file mode 100644 index 0000000000000..e7f908e5ae5f0 --- /dev/null +++ b/compiler/crates/relay-compiler/src/build_project/get_artifacts_file_hash_map.rs @@ -0,0 +1,17 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use futures::future::BoxFuture; +use rustc_hash::FxHashMap; + +use super::Artifact; + +pub type GetArtifactsFileHashMapFn = Box< + dyn Send + + Sync + + for<'a> Fn(&'a [Artifact]) -> BoxFuture<'a, Option>>>, +>; diff --git a/compiler/crates/relay-compiler/src/build_project/mod.rs b/compiler/crates/relay-compiler/src/build_project/mod.rs deleted file mode 100644 index 8c62c11b62fb7..0000000000000 --- a/compiler/crates/relay-compiler/src/build_project/mod.rs +++ /dev/null @@ -1,552 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -//! This module is responsible to build a single project. It does not handle -//! watch mode or other state. - -mod artifact_generated_types; -pub mod artifact_writer; -mod build_ir; -mod build_resolvers_schema; -pub mod build_schema; -mod generate_artifacts; -pub mod generate_extra_artifacts; -mod log_program_stats; -mod persist_operations; -mod project_asts; -mod source_control; -mod validate; - -use std::path::PathBuf; -use std::sync::Arc; - -pub use artifact_generated_types::ArtifactGeneratedTypes; -use build_ir::BuildIRResult; -pub use build_ir::SourceHashes; -pub use build_schema::build_schema; -use common::sync::*; -use common::Diagnostic; -use common::PerfLogEvent; -use common::PerfLogger; -use common::WithDiagnostics; -use dashmap::mapref::entry::Entry; -use dashmap::DashSet; -use fnv::FnvBuildHasher; -use fnv::FnvHashMap; -use fnv::FnvHashSet; -pub use generate_artifacts::generate_artifacts; -pub use generate_artifacts::Artifact; -pub use generate_artifacts::ArtifactContent; -use graphql_ir::ExecutableDefinitionName; -use graphql_ir::FragmentDefinitionNameSet; -use graphql_ir::Program; -use log::debug; -use log::info; -use log::warn; -use rayon::iter::IntoParallelRefIterator; -use rayon::slice::ParallelSlice; -use relay_codegen::Printer; -use relay_transforms::apply_transforms; -use relay_transforms::CustomTransformsConfig; -use relay_transforms::Programs; -use relay_typegen::FragmentLocations; -use schema::SDLSchema; -pub use source_control::add_to_mercurial; -pub use validate::validate; -pub use validate::AdditionalValidations; - -use self::log_program_stats::print_stats; -pub use self::project_asts::find_duplicates; -pub use self::project_asts::get_project_asts; -pub use self::project_asts::ProjectAstData; -pub use self::project_asts::ProjectAsts; -use super::artifact_content; -use crate::artifact_map::ArtifactMap; -use crate::compiler_state::ArtifactMapKind; -use crate::compiler_state::CompilerState; -use crate::compiler_state::ProjectName; -use crate::config::Config; -use crate::config::ProjectConfig; -use crate::errors::BuildProjectError; -use crate::file_source::SourceControlUpdateStatus; -use crate::graphql_asts::GraphQLAsts; - -type BuildProjectOutput = WithDiagnostics<(ProjectName, Arc, Programs, Vec)>; -type BuildProgramsOutput = WithDiagnostics<(Programs, Arc)>; - -pub enum BuildProjectFailure { - Error(BuildProjectError), - Cancelled, -} - -impl From for BuildProjectFailure { - fn from(err: BuildProjectError) -> BuildProjectFailure { - BuildProjectFailure::Error(err) - } -} - -/// This program doesn't have IR transforms applied to it, so it's not optimized. -/// It's perfect for the LSP server: we have all the documents with -/// their locations to provide information to go_to_definition, hover, etc. -pub fn build_raw_program( - project_config: &ProjectConfig, - project_asts: ProjectAsts, - schema: Arc, - log_event: &impl PerfLogEvent, - is_incremental_build: bool, -) -> Result<(Program, SourceHashes), BuildProjectError> { - // Build a type aware IR. - let BuildIRResult { ir, source_hashes } = log_event.time("build_ir_time", || { - build_ir::build_ir(project_config, project_asts, &schema, is_incremental_build).map_err( - |errors| BuildProjectError::ValidationErrors { - errors, - project_name: project_config.name, - }, - ) - })?; - - // Turn the IR into a base Program. - let program = log_event.time("build_program_time", || { - Program::from_definitions(schema, ir) - }); - - Ok((program, source_hashes)) -} - -pub fn validate_program( - config: &Config, - project_config: &ProjectConfig, - program: &Program, - log_event: &impl PerfLogEvent, -) -> Result, BuildProjectError> { - let timer = log_event.start("validate_time"); - log_event.number("validate_documents_count", program.document_count()); - let result = validate(program, project_config, &config.additional_validations).map_or_else( - |errors| { - Err(BuildProjectError::ValidationErrors { - errors, - project_name: project_config.name, - }) - }, - |result| Ok(result.diagnostics), - ); - - log_event.stop(timer); - - result -} - -/// Apply various chains of transforms to create a set of output programs. -pub fn transform_program( - project_config: &ProjectConfig, - program: Arc, - base_fragment_names: Arc, - perf_logger: Arc, - log_event: &impl PerfLogEvent, - custom_transforms_config: Option<&CustomTransformsConfig>, -) -> Result { - let timer = log_event.start("apply_transforms_time"); - let result = apply_transforms( - project_config, - program, - base_fragment_names, - perf_logger, - Some(print_stats), - custom_transforms_config, - ) - .map_err(|errors| { - BuildProjectFailure::Error(BuildProjectError::ValidationErrors { - errors, - project_name: project_config.name, - }) - }); - - log_event.stop(timer); - - result -} - -pub fn build_programs( - config: &Config, - project_config: &ProjectConfig, - compiler_state: &CompilerState, - project_asts: ProjectAsts, - base_fragment_names: FragmentDefinitionNameSet, - schema: Arc, - log_event: &impl PerfLogEvent, - perf_logger: Arc, -) -> Result { - let project_name = project_config.name; - let is_incremental_build = compiler_state.has_processed_changes() - && !compiler_state.has_breaking_schema_change(project_name, &project_config.schema_config) - && if let Some(base) = project_config.base { - !compiler_state.has_breaking_schema_change(base, &project_config.schema_config) - } else { - true - }; - - let (program, source_hashes) = build_raw_program( - project_config, - project_asts, - schema, - log_event, - is_incremental_build, - )?; - - if compiler_state.should_cancel_current_build() { - debug!("Build is cancelled: updates in source code/or new file changes are pending."); - return Err(BuildProjectFailure::Cancelled); - } - - // Call validation rules that go beyond type checking. - // FIXME: Return non-fatal diagnostics from transforms (only validations for now) - let diagnostics = validate_program(config, project_config, &program, log_event)?; - - let programs = transform_program( - project_config, - Arc::new(program), - Arc::new(base_fragment_names), - Arc::clone(&perf_logger), - log_event, - config.custom_transforms.as_ref(), - )?; - - Ok(WithDiagnostics { - item: (programs, Arc::new(source_hashes)), - diagnostics, - }) -} - -pub fn build_project( - config: &Config, - project_config: &ProjectConfig, - compiler_state: &CompilerState, - graphql_asts_map: &FnvHashMap, - perf_logger: Arc, -) -> Result { - let log_event = perf_logger.create_event("build_project"); - let build_time = log_event.start("build_project_time"); - let project_name = project_config.name; - log_event.string("project", project_name.to_string()); - info!("[{}] compiling...", project_name); - - // Construct a schema instance including project specific extensions. - let schema = log_event - .time("build_schema_time", || { - build_schema(compiler_state, project_config, graphql_asts_map) - }) - .map_err(|errors| { - BuildProjectFailure::Error(BuildProjectError::ValidationErrors { - errors, - project_name: project_config.name, - }) - })?; - - let ProjectAstData { - project_asts, - base_fragment_names, - } = get_project_asts(&schema, graphql_asts_map, project_config)?; - - if compiler_state.should_cancel_current_build() { - debug!("Build is cancelled: updates in source code/or new file changes are pending."); - return Err(BuildProjectFailure::Cancelled); - } - - // Apply different transform pipelines to produce the `Programs`. - let WithDiagnostics { - item: (programs, source_hashes), - diagnostics, - } = build_programs( - config, - project_config, - compiler_state, - project_asts, - base_fragment_names, - Arc::clone(&schema), - &log_event, - Arc::clone(&perf_logger), - )?; - - if compiler_state.should_cancel_current_build() { - debug!("Build is cancelled: updates in source code/or new file changes are pending."); - return Err(BuildProjectFailure::Cancelled); - } - - // Generate artifacts by collecting information from the `Programs`. - let artifacts_timer = log_event.start("generate_artifacts_time"); - let artifacts = generate_artifacts( - config, - project_config, - &programs, - Arc::clone(&source_hashes), - ); - log_event.stop(artifacts_timer); - - log_event.number( - "generated_artifacts", - programs.reader.document_count() + programs.normalization.document_count(), - ); - - log_event.stop(build_time); - log_event.complete(); - Ok(WithDiagnostics { - item: (project_config.name, schema, programs, artifacts), - diagnostics, - }) -} - -#[allow(clippy::too_many_arguments)] -pub async fn commit_project( - config: &Config, - project_config: &ProjectConfig, - perf_logger: Arc, - schema: &SDLSchema, - programs: Programs, - mut artifacts: Vec, - artifact_map: Arc, - // Definitions that are removed from the previous artifact map - removed_definition_names: Vec, - // Dirty artifacts that should be removed if no longer in the artifacts map - mut artifacts_to_remove: DashSet, - source_control_update_status: Arc, -) -> Result { - let log_event = perf_logger.create_event("commit_project"); - log_event.string("project", project_config.name.to_string()); - let commit_time = log_event.start("commit_project_time"); - - let fragment_locations = FragmentLocations::new(programs.typegen.fragments()); - if source_control_update_status.is_started() { - debug!("commit_project cancelled before persisting due to source control updates"); - return Err(BuildProjectFailure::Cancelled); - } - - if let Some(operation_persister) = config - .create_operation_persister - .as_ref() - .and_then(|create_fn| create_fn(project_config)) - { - let persist_operations_timer = log_event.start("persist_operations_time"); - persist_operations::persist_operations( - &mut artifacts, - &config.root_dir, - config, - project_config, - &(*operation_persister), - &log_event, - &programs, - ) - .await?; - log_event.stop(persist_operations_timer); - } - - if source_control_update_status.is_started() { - debug!( - "commit_project cancelled before generating extra artifacts due to source control updates" - ); - return Err(BuildProjectFailure::Cancelled); - } - - // In some cases we need to create additional (platform specific) artifacts - // For that, we will use `generate_extra_artifacts` from the configs - if let Some(generate_extra_artifacts_fn) = &config.generate_extra_artifacts { - log_event.time("generate_extra_artifacts_time", || { - artifacts.extend(generate_extra_artifacts_fn( - project_config, - schema, - &programs, - &artifacts, - )) - }); - } - - if source_control_update_status.is_started() { - debug!("commit_project cancelled before writing artifacts due to source control updates"); - return Err(BuildProjectFailure::Cancelled); - } - - let should_stop_updating_artifacts = || { - if source_control_update_status.is_started() { - debug!("artifact_writer updates cancelled due source control updates"); - true - } else { - false - } - }; - - // Write the generated artifacts to disk. This step is separate from - // generating artifacts or persisting to avoid partial writes in case of - // errors as much as possible. - let next_artifact_map = match Arc::as_ref(&artifact_map) { - ArtifactMapKind::Unconnected(existing_artifacts) => { - let mut existing_artifacts = existing_artifacts.clone(); - let write_artifacts_time = log_event.start("write_artifacts_time"); - write_artifacts( - config, - project_config, - schema, - should_stop_updating_artifacts, - &artifacts, - &fragment_locations, - )?; - for artifact in &artifacts { - if !existing_artifacts.remove(&artifact.path) { - debug!( - "[{}] new artifact {:?} from definitions {:?}", - project_config.name, &artifact.path, &artifact.source_definition_names - ); - } - } - log_event.stop(write_artifacts_time); - let delete_artifacts_time = log_event.start("delete_artifacts_time"); - for remaining_artifact in &existing_artifacts { - if should_stop_updating_artifacts() { - break; - } - let path = config.root_dir.join(remaining_artifact); - config.artifact_writer.remove(path)?; - } - log_event.stop(delete_artifacts_time); - ArtifactMap::from(artifacts) - } - ArtifactMapKind::Mapping(artifact_map) => { - let artifact_map = artifact_map.clone(); - let current_paths_map = ArtifactMap::default(); - let write_artifacts_incremental_time = - log_event.start("write_artifacts_incremental_time"); - - // Write or update artifacts - write_artifacts( - config, - project_config, - schema, - should_stop_updating_artifacts, - &artifacts, - &fragment_locations, - )?; - artifacts.into_par_iter().for_each(|artifact| { - current_paths_map.insert(artifact); - }); - log_event.stop(write_artifacts_incremental_time); - - log_event.time("update_artifact_map_time", || { - // All generated paths for removed definitions should be removed - for name in &removed_definition_names { - if let Some((_, artifacts)) = artifact_map.0.remove(name) { - artifacts_to_remove.extend(artifacts.into_iter().map(|a| a.path)); - } - } - // Update the artifact map, and delete any removed artifacts - current_paths_map.0.into_par_iter().for_each( - |(definition_name, artifact_records)| match artifact_map - .0 - .entry(definition_name) - { - Entry::Occupied(mut entry) => { - let prev_records = entry.get_mut(); - let current_records_paths = - FnvHashSet::from_iter(artifact_records.iter().map(|r| &r.path)); - - for prev_record in prev_records.drain(..) { - if !current_records_paths.contains(&prev_record.path) { - artifacts_to_remove.insert(prev_record.path); - } - } - *prev_records = artifact_records; - } - Entry::Vacant(entry) => { - entry.insert(artifact_records); - } - }, - ); - // Filter out any artifact that is in the artifact map - if !artifacts_to_remove.is_empty() { - artifact_map.0.par_iter().for_each(|entry| { - for artifact in entry.value() { - artifacts_to_remove.remove(&artifact.path); - } - }); - } - }); - let delete_artifacts_incremental_time = - log_event.start("delete_artifacts_incremental_time"); - // The remaining dirty artifacts are no longer required - for path in artifacts_to_remove { - if should_stop_updating_artifacts() { - break; - } - config.artifact_writer.remove(config.root_dir.join(path))?; - } - log_event.stop(delete_artifacts_incremental_time); - - artifact_map - } - }; - - if source_control_update_status.is_started() { - log_event.number("update_artifacts_after_source_control_update", 1); - debug!( - "We just updated artifacts after source control update happened. Most likely we have outdated artifacts now..." - ); - warn!( - r#" -Build canceled due to a source control update while we're writing artifacts. -The compiler may produce outdated artifacts, but it will regenerate the correct set after the update is completed."# - ); - return Err(BuildProjectFailure::Cancelled); - } else { - // For now, lets log how often this is happening, so we can decide if we want to - // adjust the way we write artifacts. For example, we could write them to the temp - // directory first, then move to a correct destination. - log_event.number("update_artifacts_after_source_control_update", 0); - } - - info!( - "[{}] compiled documents: {} reader, {} normalization, {} operation text", - project_config.name, - programs.reader.document_count(), - programs.normalization.document_count(), - programs.operation_text.document_count() - ); - log_event.stop(commit_time); - log_event.complete(); - - Ok(next_artifact_map) -} - -fn write_artifacts bool + Sync + Send>( - config: &Config, - project_config: &ProjectConfig, - schema: &SDLSchema, - should_stop_updating_artifacts: F, - artifacts: &[Artifact], - fragment_locations: &FragmentLocations, -) -> Result<(), BuildProjectFailure> { - artifacts.par_chunks(8192).try_for_each_init( - || Printer::with_dedupe(project_config), - |mut printer, artifacts| { - for artifact in artifacts { - if should_stop_updating_artifacts() { - return Err(BuildProjectFailure::Cancelled); - } - let path = config.root_dir.join(&artifact.path); - let content = artifact.content.as_bytes( - config, - project_config, - &mut printer, - schema, - artifact.source_file, - fragment_locations, - ); - if config.artifact_writer.should_write(&path, &content)? { - config.artifact_writer.write(path, content)?; - } - } - Ok(()) - }, - )?; - Ok(()) -} diff --git a/compiler/crates/relay-compiler/src/build_project/persist_operations.rs b/compiler/crates/relay-compiler/src/build_project/persist_operations.rs index bb4890d6cdb79..8652d87c282cb 100644 --- a/compiler/crates/relay-compiler/src/build_project/persist_operations.rs +++ b/compiler/crates/relay-compiler/src/build_project/persist_operations.rs @@ -6,6 +6,7 @@ */ use std::fs; +use std::path::Path; use std::path::PathBuf; use common::sync::ParallelIterator; @@ -34,7 +35,7 @@ lazy_static! { pub async fn persist_operations( artifacts: &mut [Artifact], - root_dir: &PathBuf, + root_dir: &Path, config: &Config, project_config: &ProjectConfig, operation_persister: &'_ (dyn OperationPersister + Send + Sync), @@ -48,6 +49,7 @@ pub async fn persist_operations( ref text, ref mut id_and_text_hash, ref reader_operation, + ref normalization_operation, .. } = artifact.content { @@ -62,8 +64,17 @@ pub async fn persist_operations( None } else if let Some(text) = text { let text_hash = md5(text); - let artifact_path = root_dir.join(&artifact.path); let relative_path = artifact.path.to_owned(); + let mut override_schema = None; + if let Some(custom_override_schema_determinator) = + config.custom_override_schema_determinator.as_ref() + { + override_schema = custom_override_schema_determinator( + project_config, + normalization_operation, + ); + } + let artifact_path = root_dir.join(&artifact.path); let extracted_persist_id = if config.repersist_operations { None } else { @@ -79,6 +90,7 @@ pub async fn persist_operations( .persist_artifact(ArtifactForPersister { text, relative_path, + override_schema, }) .await .map(|id| { diff --git a/compiler/crates/relay-compiler/src/build_project/project_asts.rs b/compiler/crates/relay-compiler/src/build_project/project_asts.rs index c14e88380be54..b7522fd200a65 100644 --- a/compiler/crates/relay-compiler/src/build_project/project_asts.rs +++ b/compiler/crates/relay-compiler/src/build_project/project_asts.rs @@ -15,11 +15,11 @@ use graphql_ir::FragmentDefinitionNameSet; use graphql_ir::OperationDefinitionName; use graphql_syntax::ExecutableDefinition; use relay_config::ProjectConfig; +use relay_config::ProjectName; use relay_transforms::get_resolver_fragment_dependency_name; use schema::SDLSchema; use schema::Schema; -use crate::compiler_state::ProjectName; use crate::errors::BuildProjectError; use crate::GraphQLAsts; @@ -156,7 +156,7 @@ fn find_base_resolver_fragment_asts( ) -> Vec { let mut base_resolver_fragments = ExecutableDefinitionNameSet::default(); for field in schema.fields() { - if let Some(fragment_name) = get_resolver_fragment_dependency_name(field, schema) { + if let Some(fragment_name) = get_resolver_fragment_dependency_name(field) { if base_definition_asts.contains(&fragment_name.into()) { base_resolver_fragments.insert(fragment_name.into()); } diff --git a/compiler/crates/relay-compiler/src/build_project/source_control.rs b/compiler/crates/relay-compiler/src/build_project/source_control.rs index 1e7421d34f2ab..e7ab5339d1715 100644 --- a/compiler/crates/relay-compiler/src/build_project/source_control.rs +++ b/compiler/crates/relay-compiler/src/build_project/source_control.rs @@ -5,25 +5,41 @@ * LICENSE file in the root directory of this source tree. */ +use std::path::Path; use std::path::PathBuf; use std::process::Command; use std::process::Stdio; use std::sync::Mutex; +use log::debug; use log::info; -pub fn add_to_mercurial( - root_dir: &PathBuf, - added: &Mutex>, - removed: &Mutex>, -) -> crate::errors::Result<()> { - { +pub trait SourceControl { + fn add_files(&self, root_dir: &Path, added: &Mutex>) -> crate::errors::Result<()>; + + fn remove_files( + &self, + root_dir: &Path, + removed: &Mutex>, + ) -> crate::errors::Result<()>; +} + +trait SourceControlStartAndStopCommands { + fn start_tracking_command() -> Command; + + fn stop_tracking_command() -> Command; +} + +impl SourceControl for T +where + T: SourceControlStartAndStopCommands, +{ + fn add_files(&self, root_dir: &Path, added: &Mutex>) -> crate::errors::Result<()> { let mut added = added.lock().unwrap(); if !added.is_empty() { for added_files in added.chunks(100) { - if Command::new("hg") + if Self::start_tracking_command() .current_dir(root_dir) - .arg("add") .args(added_files) .stdout(Stdio::null()) .stderr(Stdio::null()) @@ -31,19 +47,24 @@ pub fn add_to_mercurial( .spawn() .is_err() { - info!("Failed to run `hg add`."); + info!("Failed to run source control 'add' operation."); } } added.clear(); } + Ok(()) } - { + + fn remove_files( + &self, + root_dir: &Path, + removed: &Mutex>, + ) -> crate::errors::Result<()> { let mut removed = removed.lock().unwrap(); if !removed.is_empty() { for removed_files in removed.chunks(100) { - if Command::new("hg") + if Self::stop_tracking_command() .current_dir(root_dir) - .arg("forget") .args(removed_files) .stdout(Stdio::null()) .stderr(Stdio::null()) @@ -51,11 +72,92 @@ pub fn add_to_mercurial( .spawn() .is_err() { - info!("Failed to run `hg forget`."); + info!("Failed to run source control 'remove' operation."); } } removed.clear(); } + Ok(()) + } +} + +/// Sapling is Meta's fork of Mercurial. +/// Inside Meta, it is available as both +/// `sl`, and `hg`. +struct Sapling; + +impl SourceControlStartAndStopCommands for Sapling { + fn start_tracking_command() -> Command { + let mut command = Command::new("sl"); + command.arg("add"); + command + } + + fn stop_tracking_command() -> Command { + let mut command = Command::new("sl"); + command.arg("forget"); + command + } +} + +struct Git; + +impl SourceControlStartAndStopCommands for Git { + fn start_tracking_command() -> Command { + let mut command = Command::new("git"); + command.arg("add"); + command } - Ok(()) + + fn stop_tracking_command() -> Command { + let mut command = Command::new("git"); + command.arg("rm").arg("--cached"); + command + } +} + +pub fn source_control_for_root(root_dir: &PathBuf) -> Option> { + let check_git = Command::new("git") + .arg("status") + .current_dir(root_dir) + .output(); + + if let Ok(check_git) = check_git { + if check_git.status.success() { + debug!("Enabling git source control integration"); + return Some(Box::new(Git)); + } + } + + // Warning: `sl` can support git repos, so it's important that we + // check the native `git` command first due to differences in + // staging behavior between the two. + let check_sapling = Command::new("sl") + .arg("root") + .current_dir(root_dir) + .output(); + + if let Ok(check_sapling) = check_sapling { + if check_sapling.status.success() { + let possible_steam_locomotive_check = Command::new("sl").arg("--version").output(); + + // The "Steam Locomotive" command also uses `sl` and doesn't have an easy way to detect + // if it is actually the `sl` command (it exits with code 0 if run as `sl root`), so we + // need to do some additional checking to make sure we can enable Sapling integration: + if let Ok(output) = possible_steam_locomotive_check { + if output.status.success() + && String::from_utf8_lossy(&output.stdout).contains("Sapling") + { + debug!("Enabling Sapling source control integration"); + return Some(Box::new(Sapling)); + } else { + debug!( + "The `sl` command is not Sapling, so Sapling source control integration is disabled" + ); + } + } + } + } + + None } diff --git a/compiler/crates/relay-compiler/src/build_project/validate.rs b/compiler/crates/relay-compiler/src/build_project/validate.rs index 3318af26c353c..7707b8de59ac1 100644 --- a/compiler/crates/relay-compiler/src/build_project/validate.rs +++ b/compiler/crates/relay-compiler/src/build_project/validate.rs @@ -8,13 +8,14 @@ use common::escalate_and_check; use common::CriticalDiagnostics; use common::DiagnosticsResult; -use common::FeatureFlags; use common::StableDiagnostics; use common::WithDiagnostics; use errors::try_all; use graphql_ir::Program; use relay_config::ProjectConfig; use relay_transforms::disallow_circular_no_inline_fragments; +use relay_transforms::disallow_readtime_features_in_mutations; +use relay_transforms::disallow_required_on_non_null_field; use relay_transforms::disallow_reserved_aliases; use relay_transforms::disallow_typename_on_root; use relay_transforms::validate_assignable_directive; @@ -33,7 +34,7 @@ use relay_transforms::validate_updatable_directive; use relay_transforms::validate_updatable_fragment_spread; pub type AdditionalValidations = - Box DiagnosticsResult<()> + Sync + Send>; + Box DiagnosticsResult<()> + Sync + Send>; pub fn validate( program: &Program, @@ -54,7 +55,7 @@ pub fn validate( disallow_typename_on_root(program), validate_static_args(program), if let Some(ref validate) = additional_validations { - validate(program, &project_config.feature_flags) + validate(program, project_config) } else { Ok(()) }, @@ -67,6 +68,25 @@ pub fn validate( } else { Ok(()) }, + disallow_readtime_features_in_mutations( + program, + &project_config + .feature_flags + .allow_resolvers_in_mutation_response, + &project_config + .feature_flags + .allow_required_in_mutation_response, + project_config.feature_flags.enable_relay_resolver_mutations, + ), + disallow_required_on_non_null_field( + program, + project_config + .feature_flags + .disallow_required_on_non_null_fields, + project_config + .typegen_config + .experimental_emit_semantic_nullability_types, + ), ]); match output { diff --git a/compiler/crates/relay-compiler/src/compiler.rs b/compiler/crates/relay-compiler/src/compiler.rs index ad18fb3c0f878..5bcc563495422 100644 --- a/compiler/crates/relay-compiler/src/compiler.rs +++ b/compiler/crates/relay-compiler/src/compiler.rs @@ -5,12 +5,14 @@ * LICENSE file in the root directory of this source tree. */ +use std::collections::HashSet; use std::sync::Arc; use common::Diagnostic; use common::PerfLogEvent; use common::PerfLogger; use common::WithDiagnostics; +use docblock_shared::ResolverSourceHash; use futures::future::join_all; use graphql_watchman::WatchmanFileSourceSubscriptionNextChange; use log::debug; @@ -20,16 +22,19 @@ use tokio::sync::Notify; use tokio::task; use tokio::task::JoinHandle; +use crate::artifact_map::ArtifactSourceKey; use crate::build_project::build_project; use crate::build_project::commit_project; use crate::build_project::BuildProjectFailure; use crate::compiler_state::ArtifactMapKind; use crate::compiler_state::CompilerState; +use crate::compiler_state::DocblockSources; use crate::config::Config; use crate::errors::Error; use crate::errors::Result; use crate::file_source::FileSource; use crate::file_source::FileSourceSubscriptionNextChange; +use crate::file_source::LocatedDocblockSource; use crate::graphql_asts::GraphQLAsts; use crate::red_to_green::RedToGreen; use crate::FileSourceResult; @@ -90,6 +95,7 @@ impl Compiler { pub async fn watch(&self) -> Result<()> { 'watch: loop { let setup_event = self.perf_logger.create_event("compiler_setup"); + let initial_watch_compile_timer = setup_event.start("initial_watch_compile"); self.config.status_reporter.build_starts(); let result: Result<(CompilerState, Arc, JoinHandle<()>)> = async { if let Some(initialize_resources) = &self.config.initialize_resources { @@ -166,7 +172,7 @@ impl Compiler { self.config.status_reporter.build_errors(&err); } }; - + setup_event.stop(initial_watch_compile_timer); setup_event.complete(); info!("Watching for new changes..."); @@ -293,10 +299,12 @@ async fn build_projects( setup_event: &impl PerfLogEvent, compiler_state: &mut CompilerState, ) -> Result> { + let dirty_artifact_sources = compiler_state.get_dirty_artifact_sources(&config); let mut graphql_asts = setup_event.time("parse_sources_time", || { GraphQLAsts::from_graphql_sources_map( &compiler_state.graphql_sources, - &compiler_state.get_dirty_definitions(&config), + &dirty_artifact_sources, + &config, ) })?; @@ -361,10 +369,16 @@ async fn build_projects( .get(&project_name) .cloned() .unwrap_or_else(|| Arc::new(ArtifactMapKind::Unconnected(Default::default()))); - let removed_definition_names = graphql_asts + let mut removed_artifact_sources = graphql_asts .remove(&project_name) .expect("Expect GraphQLAsts to exist.") .removed_definition_names; + + let removed_docblock_artifact_sources = + get_removed_docblock_artifact_source_keys(compiler_state.docblocks.get(&project_name)); + + removed_artifact_sources.extend(removed_docblock_artifact_sources); + let dirty_artifact_paths = compiler_state .dirty_artifact_paths .get(&project_name) @@ -385,7 +399,7 @@ async fn build_projects( programs, artifacts, artifact_map, - removed_definition_names, + removed_artifact_sources, dirty_artifact_paths, source_control_update_status, ) @@ -433,3 +447,42 @@ async fn build_projects( Ok(all_diagnostics) } + +/// Get the list of removed docblock sources. +fn get_removed_docblock_artifact_source_keys( + docblock_sources: Option<&DocblockSources>, +) -> Vec { + let mut removed_docblocks: Vec = vec![]; + + if let Some(docblock_sources) = docblock_sources { + for (file_name, pending_docblock_sources_for_file) in docblock_sources.pending.iter() { + let mut docblocks_in_file = HashSet::new(); + + for LocatedDocblockSource { + docblock_source, .. + } in pending_docblock_sources_for_file + { + docblocks_in_file.insert(&docblock_source.text_source().text); + } + + if let Some(processed) = docblock_sources.processed.get(file_name) { + for LocatedDocblockSource { + docblock_source, .. + } in processed + { + // If new content of the file doesn't contain the docblock, we should remove + // the generated artifacts for this docblock. + if !docblocks_in_file.contains(&docblock_source.text_source().text) { + removed_docblocks.push(ArtifactSourceKey::ResolverHash( + ResolverSourceHash::new(&docblock_source.text_source().text), + )); + } + } + } + } + + removed_docblocks + } else { + vec![] + } +} diff --git a/compiler/crates/relay-compiler/src/compiler_state.rs b/compiler/crates/relay-compiler/src/compiler_state.rs index f19db00de470b..0a35074c3ce40 100644 --- a/compiler/crates/relay-compiler/src/compiler_state.rs +++ b/compiler/crates/relay-compiler/src/compiler_state.rs @@ -6,6 +6,7 @@ */ use std::collections::hash_map::Entry; +use std::collections::HashMap; use std::env; use std::fmt; use std::fs::File as FsFile; @@ -26,11 +27,12 @@ use dashmap::DashSet; use fnv::FnvBuildHasher; use fnv::FnvHashMap; use fnv::FnvHashSet; -use graphql_ir::ExecutableDefinitionName; -use intern::string_key::StringKey; +use log::debug; use rayon::prelude::*; +use relay_config::ProjectName; use relay_config::SchemaConfig; use schema::SDLSchema; +use schema_diff::check::SchemaChangeSafety; use schema_diff::definitions::SchemaChange; use schema_diff::detect_changes; use serde::Deserialize; @@ -39,6 +41,7 @@ use zstd::stream::read::Decoder as ZstdDecoder; use zstd::stream::write::Encoder as ZstdEncoder; use crate::artifact_map::ArtifactMap; +use crate::artifact_map::ArtifactSourceKey; use crate::config::Config; use crate::errors::Error; use crate::errors::Result; @@ -54,9 +57,6 @@ use crate::file_source::LocatedGraphQLSource; use crate::file_source::LocatedJavascriptSourceFeatures; use crate::file_source::SourceControlUpdateStatus; -/// Name of a compiler project. -pub type ProjectName = StringKey; - /// Set of project names. #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] #[serde(from = "DeserializableProjectSet")] @@ -79,7 +79,7 @@ impl ProjectSet { existing_names.push(project_name); } - pub fn iter(&self) -> slice::Iter<'_, StringKey> { + pub fn iter(&self) -> slice::Iter<'_, ProjectName> { self.0.iter() } @@ -94,7 +94,7 @@ impl ProjectSet { impl IntoIterator for ProjectSet { type Item = ProjectName; - type IntoIter = vec::IntoIter; + type IntoIter = vec::IntoIter; fn into_iter(self) -> Self::IntoIter { self.0.into_iter() @@ -205,6 +205,27 @@ impl IncrementalSources { sources.sort_by_key(|file_content| file_content.0); sources } + + pub fn get_all_non_empty(&self) -> Vec<(&PathBuf, &V)> { + let mut sources: Vec<_> = if self.pending.is_empty() { + self.processed + .iter() + .filter(|(_, value)| !value.is_empty()) + .collect() + } else { + self.pending + .iter() + .chain( + self.processed + .iter() + .filter(|(key, _)| !self.pending.contains_key(*key)), + ) + .filter(|(_, value)| !value.is_empty()) + .collect() + }; + sources.sort_by_key(|file_content| file_content.0); + sources + } } impl Default for IncrementalSources { @@ -218,9 +239,11 @@ impl Default for IncrementalSources { type GraphQLSourceSet = IncrementalSourceSet>; type DocblockSourceSet = IncrementalSourceSet>; +type FullSourceSet = IncrementalSourceSet; pub type GraphQLSources = IncrementalSources>; pub type SchemaSources = IncrementalSources; pub type DocblockSources = IncrementalSources>; +pub type FullSources = IncrementalSources; impl Source for String { fn is_empty(&self) -> bool { @@ -269,6 +292,7 @@ pub struct CompilerState { pub schemas: FnvHashMap, pub extensions: FnvHashMap, pub docblocks: FnvHashMap, + pub full_sources: FnvHashMap, pub artifacts: FnvHashMap>, #[serde(with = "clock_json_string")] pub clock: Option, @@ -282,6 +306,21 @@ pub struct CompilerState { #[serde(skip)] pub source_control_update_status: Arc, } +/// Used to store the intermediate results of processing a file source in parallel. +/// Because the compiler state does not support concurrency, we process in parallel, +/// then merge the results into the compiler state. +enum FileSourceIntermediateResult { + Source( + ProjectSet, + GraphQLSourceSet, + DocblockSourceSet, + FullSourceSet, + ), + Schema(ProjectSet, FnvHashMap, Vec), + Extension(ProjectSet, FnvHashMap, Vec), + Generated(ProjectName, FnvHashSet), + Ignore, +} impl CompilerState { pub fn from_file_source_changes( @@ -300,47 +339,51 @@ impl CompilerState { ..Default::default() }; - for (category, files) in categorized { - match category { - FileGroup::Source { project_set } => { - let (graphql_sources, docblock_sources) = extract_sources( - &project_set, - files, - file_source_changes, - false, - perf_logger, - )?; - + let file_source_result = process_categorized_sources( + categorized, + config, + file_source_changes, + perf_logger, + false, + )?; + for file_source in file_source_result { + match file_source { + FileSourceIntermediateResult::Source( + project_set, + graphql_sources, + docblock_sources, + full_sources, + ) => { for project_name in project_set { result.set_pending_source_set(project_name, &graphql_sources); result.set_pending_docblock_set(project_name, &docblock_sources); + result.set_pending_full_source_set(project_name, &full_sources); } } - FileGroup::Schema { project_set } => { - Self::process_schema_change( - file_source_changes, - files, - project_set, - &mut result.schemas, - )?; + FileSourceIntermediateResult::Schema(project_set, added, removed) => { + for project_name in project_set { + let entry = result.schemas.entry(project_name).or_default(); + for source in &removed { + entry.pending.insert(source.clone(), "".to_string()); + } + entry.merge_pending_sources(&added); + } } - FileGroup::Extension { project_set } => { - Self::process_schema_change( - file_source_changes, - files, - project_set, - &mut result.extensions, - )?; + FileSourceIntermediateResult::Extension(project_set, added, removed) => { + for project_name in project_set { + let entry = result.extensions.entry(project_name).or_default(); + for source in &removed { + entry.pending.insert(source.clone(), "".to_string()); + } + entry.merge_pending_sources(&added); + } } - FileGroup::Generated { project_name } => { - result.artifacts.insert( - project_name, - Arc::new(ArtifactMapKind::Unconnected( - files.into_iter().map(|file| file.name).collect(), - )), - ); + FileSourceIntermediateResult::Generated(project_name, files) => { + result + .artifacts + .insert(project_name, Arc::new(ArtifactMapKind::Unconnected(files))); } - FileGroup::Ignore => {} + FileSourceIntermediateResult::Ignore => {} } } @@ -385,9 +428,13 @@ impl CompilerState { .docblocks .values() .any(|sources| !sources.processed.is_empty()) + || self + .full_sources + .values() + .any(|sources| !sources.processed.is_empty()) } - fn is_change_safe(&self, sources: &SchemaSources, schema_config: &SchemaConfig) -> bool { + fn get_schema_change(&self, sources: &SchemaSources) -> SchemaChange { let previous = sources .get_old_sources() .into_iter() @@ -400,10 +447,17 @@ impl CompilerState { .map(String::as_str) .collect::>(); - let schema_change = detect_changes(¤t, &previous); + detect_changes(¤t, &previous) + } + fn get_schema_change_safety( + &self, + sources: &SchemaSources, + schema_change: SchemaChange, + schema_config: &SchemaConfig, + ) -> SchemaChangeSafety { if schema_change == SchemaChange::None { - true + SchemaChangeSafety::Safe } else { let current_sources_with_location = sources .get_sources_with_location() @@ -415,49 +469,54 @@ impl CompilerState { ¤t_sources_with_location, &Vec::<(&str, SourceLocationKey)>::new(), ) { - Ok(schema) => schema_change.is_safe(&schema, schema_config), - Err(_) => false, + Ok(schema) => schema_change.get_safety(&schema, schema_config), + Err(_) => SchemaChangeSafety::Unsafe, } } } - /// This method will detect any schema changes in the pending sources (for LSP Server, to invalidate schema cache) - pub fn has_schema_changes(&self) -> bool { - self.docblocks - .values() - .any(|sources| !sources.pending.is_empty()) - || self - .extensions - .values() - .any(|sources| !sources.pending.is_empty()) - || self - .schemas - .iter() - .any(|(_, sources)| !sources.pending.is_empty()) - } - /// This method is looking at the pending schema changes to see if they may be breaking (removed types, renamed field, etc) - pub fn has_breaking_schema_change( + pub fn schema_change_safety( &self, - project_name: StringKey, + log_event: &impl PerfLogEvent, + project_name: ProjectName, schema_config: &SchemaConfig, - ) -> bool { + ) -> SchemaChangeSafety { if let Some(extension) = self.extensions.get(&project_name) { if !extension.pending.is_empty() { - return true; + log_event.string("has_breaking_schema_change", "extension".to_owned()); + return SchemaChangeSafety::Unsafe; } } if let Some(docblocks) = self.docblocks.get(&project_name) { if !docblocks.pending.is_empty() { - return true; + log_event.string("has_breaking_schema_change", "docblock".to_owned()); + return SchemaChangeSafety::Unsafe; + } + } + if let Some(full_sources) = self.full_sources.get(&project_name) { + if !full_sources.pending.is_empty() { + log_event.string("has_breaking_schema_change", "full_source".to_owned()); + return SchemaChangeSafety::Unsafe; } } if let Some(schema) = self.schemas.get(&project_name) { - if !(schema.pending.is_empty() || self.is_change_safe(schema, schema_config)) { - return true; + if !schema.pending.is_empty() { + let schema_change = self.get_schema_change(schema); + let schema_change_string = schema_change.to_string(); + let schema_change_safety = + self.get_schema_change_safety(schema, schema_change, schema_config); + match schema_change_safety { + SchemaChangeSafety::Unsafe => { + log_event.string("schema_change", schema_change_string); + log_event.string("has_breaking_schema_change", "schema_change".to_owned()); + } + SchemaChangeSafety::SafeWithIncrementalBuild(_) | SchemaChangeSafety::Safe => {} + } + return schema_change_safety; } } - false + SchemaChangeSafety::Safe } /// Merges pending changes from the file source into the compiler state. @@ -476,22 +535,25 @@ impl CompilerState { let categorized = log_event.time("categorize_files_time", || { categorize_files(config, &file_source_changes) }); - - for (category, files) in categorized { - match category { - FileGroup::Source { project_set } => { + let file_source_result = process_categorized_sources( + categorized, + config, + &file_source_changes, + perf_logger, + true, + )?; + for file_source in file_source_result { + match file_source { + FileSourceIntermediateResult::Source( + project_set, + graphql_sources, + docblock_sources, + full_sources, + ) => { // TODO: possible optimization to only set this if the // extracted sources actually differ. has_changed = true; - let (graphql_sources, docblock_sources) = extract_sources( - &project_set, - files, - &file_source_changes, - true, - perf_logger, - )?; - for project_name in project_set { self.graphql_sources .entry(project_name) @@ -501,35 +563,39 @@ impl CompilerState { .entry(project_name) .or_default() .merge_pending_sources(&docblock_sources); + self.full_sources + .entry(project_name) + .or_default() + .merge_pending_sources(&full_sources); } } - FileGroup::Schema { project_set } => { + FileSourceIntermediateResult::Schema(project_set, added, removed) => { has_changed = true; - Self::process_schema_change( - &file_source_changes, - files, - project_set, - &mut self.schemas, - )?; + for project_name in project_set { + let entry = self.schemas.entry(project_name).or_default(); + for source in &removed { + entry.pending.insert(source.clone(), "".to_string()); + } + entry.merge_pending_sources(&added); + } } - FileGroup::Extension { project_set } => { + FileSourceIntermediateResult::Extension(project_set, added, removed) => { has_changed = true; - Self::process_schema_change( - &file_source_changes, - files, - project_set, - &mut self.extensions, - )?; + for project_name in project_set { + let entry = self.extensions.entry(project_name).or_default(); + for source in &removed { + entry.pending.insert(source.clone(), "".to_string()); + } + entry.merge_pending_sources(&added); + } } - FileGroup::Generated { project_name } => { + FileSourceIntermediateResult::Generated(project_name, files) => { if should_collect_changed_artifacts { - let mut dashset = - DashSet::with_capacity_and_hasher(files.len(), Default::default()); - dashset.extend(files.into_iter().map(|f| f.name)); - self.dirty_artifact_paths.insert(project_name, dashset); + self.dirty_artifact_paths + .insert(project_name, files.into_iter().collect()); } } - FileGroup::Ignore => {} + FileSourceIntermediateResult::Ignore => {} } } } @@ -550,14 +616,17 @@ impl CompilerState { for sources in self.docblocks.values_mut() { sources.commit_pending_sources(); } + for sources in self.full_sources.values_mut() { + sources.commit_pending_sources(); + } self.dirty_artifact_paths.clear(); } - /// Calculate dirty definitions from dirty artifacts - pub fn get_dirty_definitions( + /// Calculate dirty definitions/sources from dirty artifacts + pub fn get_dirty_artifact_sources( &self, config: &Config, - ) -> FnvHashMap> { + ) -> FnvHashMap> { if self.dirty_artifact_paths.is_empty() { return Default::default(); } @@ -576,7 +645,7 @@ impl CompilerState { let mut added = false; for artifact_record in artifact_records { if paths.remove(&artifact_record.path).is_some() && !added { - dirty_definitions.push(*definition_name); + dirty_definitions.push(definition_name.clone()); if paths.is_empty() { break 'outer; } @@ -596,13 +665,35 @@ impl CompilerState { } pub fn serialize_to_file(&self, path: &PathBuf) -> Result<()> { + let zstd_level: i32 = env::var("RELAY_SAVED_STATE_ZSTD_LEVEL").map_or_else( + |_| 12, + |level| { + level.parse::().expect( + "Expected RELAY_SAVED_STATE_ZSTD_LEVEL environment variable to be a number.", + ) + }, + ); + let writer = FsFile::create(path) - .and_then(|writer| ZstdEncoder::new(writer, 12)) + .and_then(|writer| { + let mut encoder = ZstdEncoder::new(writer, zstd_level)?; + match u32::try_from(std::thread::available_parallelism()?.get()) { + Ok(threads) => { + debug!("Using {} zstd threads", threads); + encoder.multithread(threads).ok(); + } + Err(_) => { + debug!("Using single-threaded zstd"); + } + } + Ok(encoder) + }) .map_err(|err| Error::WriteFileError { file: path.clone(), source: err, })? .auto_finish(); + let writer = BufWriter::with_capacity(ZstdEncoder::::recommended_input_size(), writer); bincode::serialize_into(writer, self).map_err(|err| Error::SerializationError { @@ -623,13 +714,14 @@ impl CompilerState { reader, ); - let memory_limit: u64 = env::var("RELAY_SAVED_STATE_MEMORY_LIMIT") - .map(|limit| { + let memory_limit: u64 = env::var("RELAY_SAVED_STATE_MEMORY_LIMIT").map_or_else( + |_| 10_u64.pow(10), /* 10GB */ + |limit| { limit.parse::().expect( "Expected RELAY_SAVED_STATE_MEMORY_LIMIT environment variable to be a number.", ) - }) - .unwrap_or_else(|_| 10_u64.pow(10) /* 10GB */); + }, + ); bincode::DefaultOptions::new() .with_fixint_encoding() @@ -660,30 +752,13 @@ impl CompilerState { entry.merge_pending_sources(source_set); } - fn process_schema_change( - file_source_changes: &FileSourceResult, - files: Vec, - project_set: ProjectSet, - source_map: &mut FnvHashMap, - ) -> Result<()> { - let mut removed_sources = vec![]; - let mut added_sources = FnvHashMap::default(); - for file in files { - let file_name = file.name.clone(); - if file.exists { - added_sources.insert(file_name, read_file_to_string(file_source_changes, &file)?); - } else { - removed_sources.push(file_name); - } - } - for project_name in project_set { - let entry = source_map.entry(project_name).or_default(); - for source in &removed_sources { - entry.pending.insert(source.clone(), "".to_string()); - } - entry.merge_pending_sources(&added_sources); - } - Ok(()) + fn set_pending_full_source_set( + &mut self, + project_name: ProjectName, + source_set: &FullSourceSet, + ) { + let entry = &mut self.full_sources.entry(project_name).or_default(); + entry.merge_pending_sources(source_set); } pub fn is_source_control_update_in_progress(&self) -> bool { @@ -697,13 +772,85 @@ impl CompilerState { } } +fn process_intermediate_schema_change( + file_source_changes: &FileSourceResult, + files: Vec, +) -> Result<(FnvHashMap, Vec)> { + let mut removed_sources = vec![]; + let mut added_sources = FnvHashMap::default(); + for file in files { + let file_name = file.name.clone(); + if file.exists { + added_sources.insert(file_name, read_file_to_string(file_source_changes, &file)?); + } else { + removed_sources.push(file_name); + } + } + Ok((added_sources, removed_sources)) +} + +fn process_categorized_sources( + categorized: HashMap>, + config: &Config, + file_source_changes: &FileSourceResult, + perf_logger: &impl PerfLogger, + preserve_empty: bool, +) -> Result> { + categorized + .into_par_iter() + .map(|(category, files)| match category { + FileGroup::Source { project_set } => { + // Slow step: reading from disk and parsing + let (graphql_sources, docblock_sources, full_sources) = extract_sources( + &project_set, + files, + file_source_changes, + preserve_empty, + perf_logger, + config, + )?; + Ok(FileSourceIntermediateResult::Source( + project_set, + graphql_sources, + docblock_sources, + full_sources, + )) + } + FileGroup::Schema { project_set } => { + let (added, removed) = + process_intermediate_schema_change(file_source_changes, files)?; + Ok(FileSourceIntermediateResult::Schema( + project_set, + added, + removed, + )) + } + FileGroup::Extension { project_set } => { + let (added, removed) = + process_intermediate_schema_change(file_source_changes, files)?; + Ok(FileSourceIntermediateResult::Extension( + project_set, + added, + removed, + )) + } + FileGroup::Generated { project_name } => Ok(FileSourceIntermediateResult::Generated( + project_name, + files.into_iter().map(|file: File| file.name).collect(), + )), + FileGroup::Ignore => Ok(FileSourceIntermediateResult::Ignore), + }) + .collect() +} + fn extract_sources( project_set: &ProjectSet, files: Vec, file_source_changes: &FileSourceResult, preserve_empty: bool, perf_logger: &impl PerfLogger, -) -> Result<(GraphQLSourceSet, DocblockSourceSet)> { + config: &Config, +) -> Result<(GraphQLSourceSet, DocblockSourceSet, FullSourceSet)> { let log_event = perf_logger.create_event("categorize"); log_event.string("source_set_name", project_set.to_string()); let extract_timer = log_event.start("extract_graphql_strings_from_file_time"); @@ -712,7 +859,7 @@ fn extract_sources( .par_iter() .map(|file| { if file.exists { - match extract_javascript_features_from_file(file_source_changes, file) { + match extract_javascript_features_from_file(file_source_changes, file, config) { Ok(features) => Ok((file, features)), Err(err) => Err(err), } @@ -726,6 +873,7 @@ fn extract_sources( let mut graphql_sources: GraphQLSourceSet = FnvHashMap::default(); let mut docblock_sources: DocblockSourceSet = FnvHashMap::default(); + let mut full_sources = FnvHashMap::default(); for (file, features) in source_features { if preserve_empty || !features.graphql_sources.is_empty() { graphql_sources.insert(file.name.clone(), features.graphql_sources); @@ -733,9 +881,12 @@ fn extract_sources( if preserve_empty || !features.docblock_sources.is_empty() { docblock_sources.insert(file.name.clone(), features.docblock_sources); } + if preserve_empty || !features.full_source.is_empty() { + full_sources.insert(file.name.clone(), features.full_source); + } } - Ok((graphql_sources, docblock_sources)) + Ok((graphql_sources, docblock_sources, full_sources)) } /// A module to serialize a watchman Clock value via JSON. diff --git a/compiler/crates/relay-compiler/src/config.rs b/compiler/crates/relay-compiler/src/config.rs index 478dc4097a590..f9746b1ac343a 100644 --- a/compiler/crates/relay-compiler/src/config.rs +++ b/compiler/crates/relay-compiler/src/config.rs @@ -14,36 +14,40 @@ use std::sync::Arc; use std::vec; use async_trait::async_trait; +use common::DiagnosticsResult; use common::FeatureFlags; use common::Rollout; -use common::ScalarName; +use docblock_syntax::DocblockAST; use dunce::canonicalize; use fnv::FnvBuildHasher; use fnv::FnvHashSet; use graphql_ir::OperationDefinition; use graphql_ir::Program; +use graphql_syntax::ExecutableDefinition; use indexmap::IndexMap; -use intern::string_key::Intern; use intern::string_key::StringKey; use js_config_loader::LoaderSource; use log::warn; use persist_query::PersistError; use rayon::prelude::*; use regex::Regex; -use relay_config::CustomScalarType; use relay_config::DiagnosticReportConfig; -use relay_config::FlowTypegenConfig; +pub use relay_config::ExtraArtifactsConfig; use relay_config::JsModuleFormat; pub use relay_config::LocalPersistConfig; use relay_config::ModuleImportConfig; pub use relay_config::PersistConfig; pub use relay_config::ProjectConfig; +use relay_config::ProjectName; pub use relay_config::RemotePersistConfig; +use relay_config::ResolversSchemaModuleConfig; use relay_config::SchemaConfig; pub use relay_config::SchemaLocation; use relay_config::TypegenConfig; pub use relay_config::TypegenLanguage; +use relay_docblock::DocblockIr; use relay_transforms::CustomTransformsConfig; +use rustc_hash::FxHashMap; use serde::de::Error as DeError; use serde::Deserialize; use serde::Deserializer; @@ -56,13 +60,15 @@ use watchman_client::pdu::ScmAwareClockData; use crate::build_project::artifact_writer::ArtifactFileWriter; use crate::build_project::artifact_writer::ArtifactWriter; use crate::build_project::generate_extra_artifacts::GenerateExtraArtifactsFn; +use crate::build_project::get_artifacts_file_hash_map::GetArtifactsFileHashMapFn; use crate::build_project::AdditionalValidations; -use crate::compiler_state::ProjectName; +use crate::compiler_state::CompilerState; use crate::compiler_state::ProjectSet; use crate::errors::ConfigValidationError; use crate::errors::Error; use crate::errors::Result; use crate::saved_state::SavedStateLoader; +use crate::source_control_for_root; use crate::status_reporter::ConsoleStatusReporter; use crate::status_reporter::StatusReporter; @@ -77,6 +83,9 @@ type PostArtifactsWriter = Box< type OperationPersisterCreator = Box Option> + Send + Sync>; +type UpdateCompilerStateFromSavedState = + Option>; + /// The full compiler config. This is a combination of: /// - the configuration file /// - the absolute path to the root of the compiled projects @@ -91,6 +100,10 @@ pub struct Config { pub root_dir: PathBuf, pub sources: FnvIndexMap, pub excludes: Vec, + /// Some projects may need to include extra source directories without being + /// affected by exclusion globs from the `excludes` config (e.g. generated + /// directories). + pub generated_sources: FnvIndexMap, pub projects: FnvIndexMap, pub header: Vec, pub codegen_command: Option, @@ -108,6 +121,8 @@ pub struct Config { /// Path to which to write the output of the compilation pub artifact_writer: Box, + // Function to get the file hash for an artifact file. + pub get_artifacts_file_hash_map: Option, /// Compile all files. Persist ids are still re-used unless /// `Config::repersist_operations` is also set. @@ -143,12 +158,36 @@ pub struct Config { /// and after each major transformation step (common, operations, etc) /// in the `apply_transforms(...)`. pub custom_transforms: Option, - + pub custom_override_schema_determinator: + Option Option + Send + Sync>>, pub export_persisted_query_ids_to_file: Option, /// The async function is called before the compiler connects to the file /// source. pub initialize_resources: Option>, + + /// Runs in `try_saved_state` when the compiler state is initialized from saved state. + pub update_compiler_state_from_saved_state: UpdateCompilerStateFromSavedState, + + // Allow incremental build for some schema changes + pub has_schema_change_incremental_build: bool, + + /// A custom function to extract resolver Dockblock IRs from sources + pub custom_extract_relay_resolvers: Option< + Box< + dyn Fn( + ProjectName, + &CompilerState, + &FxHashMap<&PathBuf, (Vec, Option<&Vec>)>, + ) -> DiagnosticsResult<(Vec, Vec)> + // (Types, Fields) + + Send + + Sync, + >, + >, + + /// A function to determine if full file source should be extracted instead of docblock + pub should_extract_full_source: Option bool + Send + Sync>>, } pub enum FileSourceKind { @@ -228,19 +267,19 @@ impl Config { Ok(None) => Err(Error::ConfigError { details: format!( r#" -Configuration for Relay compiler not found. + Configuration for Relay compiler not found. -Please make sure that the configuration file is created in {}. + Please make sure that the configuration file is created in {}. -You can also pass the path to the configuration file as `relay-compiler ./path-to-config/relay.json`. + You can also pass the path to the configuration file as `relay-compiler ./path-to-config/relay.json`. -Example file: -{{ - "src": "./src", - "schema": "./path-to/schema.graphql", - "language": "javascript" -}} -"#, + Example file: + {{ + "src": "./src", + "schema": "./path-to/schema.graphql", + "language": "javascript" + }} + "#, match loaders_sources.len() { 1 => loaders_sources[0].to_string(), 2 => format!("{} or {}", loaders_sources[0], loaders_sources[1]), @@ -346,6 +385,8 @@ Example file: base: config_file_project.base, enabled: true, schema_extensions: config_file_project.schema_extensions, + extra_artifacts_config: None, + extra: config_file_project.extra, output: config_file_project.output, extra_artifacts_output: config_file_project.extra_artifacts_output, shard_output: config_file_project.shard_output, @@ -355,19 +396,18 @@ Example file: typegen_config: config_file_project.typegen_config, persist: config_file_project.persist, variable_names_comment: config_file_project.variable_names_comment, - extra: config_file_project.extra, test_path_regex, feature_flags: Arc::new( config_file_project .feature_flags .unwrap_or_else(|| config_file_feature_flags.clone()), ), - filename_for_artifact: None, - skip_types_for_artifact: None, rollout: config_file_project.rollout, js_module_format: config_file_project.js_module_format, module_import_config: config_file_project.module_import_config, diagnostic_report_config: config_file_project.diagnostic_report_config, + resolvers_schema_module: config_file_project.resolvers_schema_module, + codegen_command: config_file_project.codegen_command, }; Ok((project_name, project_config)) }) @@ -383,7 +423,10 @@ Example file: let config = Self { name: config_file.name, - artifact_writer: Box::new(ArtifactFileWriter::new(None, root_dir.clone())), + artifact_writer: Box::new(ArtifactFileWriter::new( + source_control_for_root(&root_dir), + root_dir.clone(), + )), status_reporter: Box::new(ConsoleStatusReporter::new( root_dir.clone(), is_multi_project, @@ -391,12 +434,14 @@ Example file: root_dir, sources: config_file.sources, excludes: config_file.excludes, + generated_sources: config_file.generated_sources, projects, header: config_file.header, codegen_command: config_file.codegen_command, load_saved_state_file: None, generate_extra_artifacts: None, generate_virtual_id_file_name: None, + get_artifacts_file_hash_map: None, saved_state_config: config_file.saved_state_config, saved_state_loader: None, saved_state_version: hex::encode(hash.finalize()), @@ -408,8 +453,13 @@ Example file: is_dev_variable_name: config_file.is_dev_variable_name, file_source_config: FileSourceKind::Watchman, custom_transforms: None, + custom_override_schema_determinator: None, export_persisted_query_ids_to_file: None, initialize_resources: None, + update_compiler_state_from_saved_state: None, + has_schema_change_incremental_build: false, + custom_extract_relay_resolvers: None, + should_extract_full_source: None, }; let mut validation_errors = Vec::new(); @@ -543,6 +593,7 @@ impl fmt::Debug for Config { root_dir, sources, excludes, + generated_sources, compile_everything, repersist_operations, projects, @@ -567,6 +618,7 @@ impl fmt::Debug for Config { .field("root_dir", root_dir) .field("sources", sources) .field("excludes", excludes) + .field("generated_sources", generated_sources) .field("compile_everything", compile_everything) .field("repersist_operations", repersist_operations) .field("projects", projects) @@ -626,13 +678,17 @@ struct MultiProjectConfigFile { /// A mapping from directory paths (relative to the root) to a source set. /// If a path is a subdirectory of another path, the more specific path /// wins. - sources: IndexMap, + sources: FnvIndexMap, /// Glob patterns that should not be part of the sources even if they are /// in the source set directories. #[serde(default = "get_default_excludes")] excludes: Vec, + /// Similar to sources but not affected by excludes. + #[serde(default)] + generated_sources: FnvIndexMap, + /// Configuration of projects to compile. projects: FnvIndexMap, @@ -650,7 +706,7 @@ struct MultiProjectConfigFile { #[serde(deny_unknown_fields, rename_all = "camelCase", default)] pub struct SingleProjectConfigFile { #[serde(skip)] - pub project_name: StringKey, + pub project_name: ProjectName, /// Path to schema.graphql pub schema: PathBuf, @@ -662,11 +718,11 @@ pub struct SingleProjectConfigFile { /// the babel plugin needs `artifactDirectory` set as well. pub artifact_directory: Option, - /// [DEPRECATED] This is deprecated field, we're not using it in the V13. + /// \[DEPRECATED\] This is deprecated field, we're not using it in the V13. /// Adding to the config, to show the warning, and not a parse error. pub include: Vec, - /// [DEPRECATED] This is deprecated field, we're not using it in the V13. + /// \[DEPRECATED\] This is deprecated field, we're not using it in the V13. /// Adding to the config, to show the warning, and not a parse error. pub extensions: Vec, @@ -678,21 +734,8 @@ pub struct SingleProjectConfigFile { /// List of directories with schema extensions. pub schema_extensions: Vec, - /// This option controls whether or not a catch-all entry is added to enum type definitions - /// for values that may be added in the future. Enabling this means you will have to update - /// your application whenever the GraphQL server schema adds new enum values to prevent it - /// from breaking. - pub no_future_proof_enums: bool, - - /// The name of the language plugin (?) used for input files and artifacts - pub language: Option, - - /// Mappings from custom scalars in your schema to built-in GraphQL - /// types, for type emission purposes. - pub custom_scalars: FnvIndexMap, - - /// This option enables emitting es modules artifacts. - pub eager_es_modules: bool, + #[serde(flatten)] + pub typegen_config: TypegenConfig, /// Query Persist Configuration /// It contains URL and addition parameters that will be included @@ -723,12 +766,15 @@ pub struct SingleProjectConfigFile { #[serde(default)] pub feature_flags: Option, + + #[serde(default)] + pub resolvers_schema_module: Option, } impl Default for SingleProjectConfigFile { fn default() -> Self { Self { - project_name: "default".intern(), + project_name: ProjectName::default(), schema: Default::default(), src: Default::default(), artifact_directory: Default::default(), @@ -736,11 +782,8 @@ impl Default for SingleProjectConfigFile { extensions: vec![], excludes: get_default_excludes(), schema_extensions: vec![], - no_future_proof_enums: false, - language: None, - custom_scalars: Default::default(), schema_config: Default::default(), - eager_es_modules: false, + typegen_config: Default::default(), persist_config: None, is_dev_variable_name: None, codegen_command: None, @@ -748,6 +791,7 @@ impl Default for SingleProjectConfigFile { typegen_phase: None, feature_flags: None, module_import_config: Default::default(), + resolvers_schema_module: Default::default(), } } } @@ -826,18 +870,6 @@ impl SingleProjectConfigFile { } })?; - let language = self.language.ok_or_else(|| { - let mut variants = vec![]; - for lang in TypegenLanguage::get_variants_as_string() { - variants.push(format!(r#" "language": "{}""#, lang)); - } - - Error::ConfigError { - details: format!("The `language` option is missing in the Relay configuration file. Please, specify one of the following options:\n{}", variants.join("\n")), - } - } - )?; - let project_config = ConfigFileProject { output: self.artifact_directory.map(|dir| { normalize_path_from_config(current_dir.clone(), common_root_dir.clone(), dir) @@ -860,19 +892,11 @@ impl SingleProjectConfigFile { }) .collect(), persist: self.persist_config, - typegen_config: TypegenConfig { - language, - custom_scalar_types: self.custom_scalars.clone(), - eager_es_modules: self.eager_es_modules, - flow_typegen: FlowTypegenConfig { - no_future_proof_enums: self.no_future_proof_enums, - ..Default::default() - }, - ..Default::default() - }, + typegen_config: self.typegen_config, js_module_format: self.js_module_format, feature_flags: self.feature_flags, module_import_config: self.module_import_config, + resolvers_schema_module: self.resolvers_schema_module, ..Default::default() }; @@ -918,12 +942,12 @@ impl<'de> Deserialize<'de> for ConfigFile { Ok(single_project_config) => Ok(ConfigFile::SingleProject(single_project_config)), Err(single_project_error) => { let error_message = format!( - r#"The config file cannot be parsed as a multi-project config file due to: -- {:?}. + r#"The config file cannot be parsed as a single-project config file due to: + - {:?}. -It also cannot be a single project config file due to: -- {:?}."#, - multi_project_error, single_project_error + It also cannot be a multi-project config file due to: + - {:?}."#, + single_project_error, multi_project_error, ); Err(DeError::custom(error_message)) @@ -1013,6 +1037,12 @@ pub struct ConfigFileProject { #[serde(default)] pub diagnostic_report_config: DiagnosticReportConfig, + + #[serde(default)] + pub resolvers_schema_module: Option, + + #[serde(default)] + pub codegen_command: Option, } pub type PersistId = String; @@ -1023,6 +1053,7 @@ pub type PersistResult = std::result::Result; pub struct ArtifactForPersister { pub text: String, pub relative_path: PathBuf, + pub override_schema: Option, } #[async_trait] diff --git a/compiler/crates/relay-compiler/src/docblocks.rs b/compiler/crates/relay-compiler/src/docblocks.rs index 2a4127629c22f..95fcf1cd772f6 100644 --- a/compiler/crates/relay-compiler/src/docblocks.rs +++ b/compiler/crates/relay-compiler/src/docblocks.rs @@ -12,13 +12,7 @@ use common::SourceLocationKey; use docblock_syntax::parse_docblock; use docblock_syntax::DocblockAST; use errors::try_all; -use graphql_syntax::ExecutableDefinition; -use graphql_syntax::SchemaDocument; -use relay_config::ProjectConfig; -use relay_docblock::parse_docblock_ast; use relay_docblock::resolver_maybe_defining_type; -use relay_docblock::ParseOptions; -use schema::SDLSchema; use crate::file_source::LocatedDocblockSource; pub struct DocblockASTs { @@ -41,17 +35,6 @@ pub fn parse_docblock_asts_from_sources( Ok(DocblockASTs { types, fields }) } -pub fn build_schema_documents_from_docblocks( - docblocks: &[DocblockAST], - project_config: &ProjectConfig, - schema: &SDLSchema, - definitions: Option<&Vec>, -) -> DiagnosticsResult> { - try_all(docblocks.iter().filter_map(|ast: &DocblockAST| { - parse_source(ast, project_config, schema, definitions).transpose() - })) -} - fn parse_source_to_ast( file_path: &Path, docblock_source: &LocatedDocblockSource, @@ -64,23 +47,3 @@ fn parse_source_to_ast( source_location, ) } - -fn parse_source( - ast: &DocblockAST, - project_config: &ProjectConfig, - schema: &SDLSchema, - definitions: Option<&Vec>, -) -> DiagnosticsResult> { - let maybe_ir = parse_docblock_ast( - &ast, - definitions, - ParseOptions { - enable_output_type: &project_config - .feature_flags - .relay_resolver_enable_output_type, - }, - )?; - maybe_ir - .map(|ir| ir.to_graphql_schema_ast(schema, &project_config.schema_config)) - .transpose() -} diff --git a/compiler/crates/relay-compiler/src/errors.rs b/compiler/crates/relay-compiler/src/errors.rs index 658e19ae0ff89..67aca8747be48 100644 --- a/compiler/crates/relay-compiler/src/errors.rs +++ b/compiler/crates/relay-compiler/src/errors.rs @@ -11,13 +11,14 @@ use std::path::PathBuf; use common::Diagnostic; use glob::PatternError; use persist_query::PersistError; +use relay_config::ProjectName; +use serde::Serialize; use thiserror::Error; -use crate::compiler_state::ProjectName; - pub type Result = std::result::Result; -#[derive(Debug, Error)] +#[derive(Debug, Error, Serialize)] +#[serde(tag = "type")] pub enum Error { #[error("Unable to initialize relay compiler configuration. Error details: \n{details}")] ConfigError { details: String }, @@ -32,6 +33,7 @@ pub enum Error { )] ConfigFileValidation { config_path: PathBuf, + #[serde(skip_serializing)] validation_errors: Vec, }, @@ -56,31 +58,43 @@ pub enum Error { BuildProjectsErrors { errors: Vec }, #[error("Failed to read file `{file}`: {source}")] - ReadFileError { file: PathBuf, source: io::Error }, + ReadFileError { + file: PathBuf, + #[serde(skip_serializing)] + source: io::Error, + }, #[error("Failed to write file `{file}`: {source}")] - WriteFileError { file: PathBuf, source: io::Error }, + WriteFileError { + file: PathBuf, + #[serde(skip_serializing)] + source: io::Error, + }, #[error("Unable to serialize state to file: `{file}`, because of `{source}`.")] SerializationError { file: PathBuf, + #[serde(skip_serializing)] source: Box, }, #[error("Unable to deserialize state from file: `{file}`, because of `{source}`.")] DeserializationError { file: PathBuf, + #[serde(skip_serializing)] source: Box, }, #[error("Failed to canonicalize root: `{root}`.")] CanonicalizeRoot { root: PathBuf, + #[serde(skip_serializing)] source: std::io::Error, }, #[error("Watchman error: {source}")] Watchman { + #[serde(skip_serializing)] #[from] source: watchman_client::Error, }, @@ -91,6 +105,7 @@ pub enum Error { #[error("Failed to read file: `{file}`.")] FileRead { file: PathBuf, + #[serde(skip_serializing)] source: std::io::Error, }, @@ -102,12 +117,14 @@ pub enum Error { #[error("Error in post artifact writer: {error}")] PostArtifactsError { + #[serde(skip_serializing)] error: Box, }, #[error("Compilation cancelled due to new changes")] Cancelled, + #[serde(skip_serializing)] #[error("IO error {0}")] IOError(std::io::Error), @@ -117,9 +134,11 @@ pub enum Error { #[error("JSON parse error in `{file}`: {source}")] SerdeError { file: PathBuf, + #[serde(skip_serializing)] source: serde_json::Error, }, + #[serde(skip_serializing)] #[error("glob pattern error: {0}")] PatternError(PatternError), @@ -227,15 +246,13 @@ pub enum ConfigValidationError { }, } -#[derive(Debug, Error)] +#[derive(Debug, Error, serde::Serialize)] +#[serde(tag = "type")] pub enum BuildProjectError { #[error( - "Validation errors:{}", + "Validation errors: {} error(s) encountered above.", errors - .iter() - .map(|err| format!("\n - {}", err.print_without_source())) - .collect::>() - .join("") + .len() )] ValidationErrors { errors: Vec, @@ -250,10 +267,15 @@ pub enum BuildProjectError { .join("") )] PersistErrors { + #[serde(skip_serializing)] errors: Vec, project_name: ProjectName, }, #[error("Failed to write file `{file}`: {source}")] - WriteFileError { file: PathBuf, source: io::Error }, + WriteFileError { + file: PathBuf, + #[serde(skip_serializing)] + source: io::Error, + }, } diff --git a/compiler/crates/relay-compiler/src/file_source.rs b/compiler/crates/relay-compiler/src/file_source.rs new file mode 100644 index 0000000000000..d76f9029e943f --- /dev/null +++ b/compiler/crates/relay-compiler/src/file_source.rs @@ -0,0 +1,209 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +mod external_file_source; +mod extract_graphql; +mod file_categorizer; +mod file_filter; +mod file_group; +mod read_file_to_string; +mod source_control_update_status; +mod walk_dir_file_source; +mod watchman_file_source; +mod watchman_query_builder; + +use std::path::PathBuf; +use std::sync::Arc; + +use common::PerfLogEvent; +use common::PerfLogger; +use external_file_source::ExternalFileSource; +pub use file_categorizer::categorize_files; +pub use file_categorizer::FileCategorizer; +pub use file_group::FileGroup; +use graphql_watchman::WatchmanFileSourceResult; +use graphql_watchman::WatchmanFileSourceSubscription; +use graphql_watchman::WatchmanFileSourceSubscriptionNextChange; +use log::warn; +pub use read_file_to_string::read_file_to_string; +use serde::Deserialize; +use serde_bser::value::Value; +pub use source_control_update_status::SourceControlUpdateStatus; +pub use watchman_client::prelude::Clock; +use watchman_file_source::WatchmanFileSource; + +pub use self::external_file_source::ExternalFileSourceResult; +pub use self::extract_graphql::extract_javascript_features_from_file; +pub use self::extract_graphql::source_for_location; +pub use self::extract_graphql::FsSourceReader; +pub use self::extract_graphql::LocatedDocblockSource; +pub use self::extract_graphql::LocatedGraphQLSource; +pub use self::extract_graphql::LocatedJavascriptSourceFeatures; +pub use self::extract_graphql::SourceReader; +use self::walk_dir_file_source::WalkDirFileSource; +use self::walk_dir_file_source::WalkDirFileSourceResult; +use crate::compiler_state::CompilerState; +use crate::config::Config; +use crate::config::FileSourceKind; +use crate::errors::Error; +use crate::errors::Result; + +pub enum FileSource { + Watchman(WatchmanFileSource), + External(ExternalFileSource), + WalkDir(WalkDirFileSource), +} + +impl FileSource { + pub async fn connect( + config: &Arc, + perf_logger_event: &impl PerfLogEvent, + ) -> Result { + match &config.file_source_config { + FileSourceKind::Watchman => Ok(Self::Watchman( + WatchmanFileSource::connect(config, perf_logger_event).await?, + )), + FileSourceKind::External(changed_files_list) => Ok(Self::External( + ExternalFileSource::new(changed_files_list.to_path_buf(), Arc::clone(config)), + )), + FileSourceKind::WalkDir => { + Ok(Self::WalkDir(WalkDirFileSource::new(Arc::clone(config)))) + } + } + } + + pub async fn query( + &self, + perf_logger_event: &impl PerfLogEvent, + perf_logger: &impl PerfLogger, + ) -> Result { + match self { + Self::Watchman(file_source) => file_source.query(perf_logger_event, perf_logger).await, + Self::External(file_source) => { + let result = file_source.create_compiler_state(perf_logger); + if let Err(err) = &result { + perf_logger_event.string( + "external_file_source_create_compiler_state_error", + format!("{:?}", err), + ); + warn!( + "Unable to create state from external source: {:?}. Sending a full watchman query...", + err + ); + let watchman_file_source = + WatchmanFileSource::connect(&file_source.config, perf_logger_event).await?; + watchman_file_source + .full_query(perf_logger_event, perf_logger) + .await + } else { + result + } + } + Self::WalkDir(file_source) => file_source.create_compiler_state(perf_logger), + } + } + + pub async fn subscribe( + self, + perf_logger_event: &impl PerfLogEvent, + perf_logger: &impl PerfLogger, + ) -> Result<(CompilerState, FileSourceSubscription)> { + match self { + Self::Watchman(file_source) => { + let (compiler_state, watchman_subscription) = file_source + .subscribe(perf_logger_event, perf_logger) + .await?; + Ok(( + compiler_state, + FileSourceSubscription::Watchman(watchman_subscription), + )) + } + Self::External(_) | Self::WalkDir(_) => { + unimplemented!( + "watch-mode (subscribe) is not available for non-watchman file sources." + ) + } + } + } +} + +#[derive(Debug, Clone, Deserialize)] +pub struct File { + pub name: PathBuf, + pub exists: bool, +} + +impl File { + pub fn absolute_path(&self, resolved_root: PathBuf) -> PathBuf { + let mut absolute_path = resolved_root; + absolute_path.push(&self.name); + absolute_path + } +} + +#[derive(Debug)] +pub enum FileSourceResult { + Watchman(WatchmanFileSourceResult), + External(ExternalFileSourceResult), + WalkDir(WalkDirFileSourceResult), +} + +impl FileSourceResult { + pub fn clock(&self) -> Option { + match self { + Self::Watchman(file_source) => Some(file_source.clock.clone()), + Self::External(_) => None, + Self::WalkDir(_) => None, + } + } + + pub fn resolved_root(&self) -> PathBuf { + match self { + Self::Watchman(file_source_result) => file_source_result.resolved_root.path(), + Self::External(file_source_result) => file_source_result.resolved_root.clone(), + Self::WalkDir(file_source_result) => file_source_result.resolved_root.clone(), + } + } + + pub fn saved_state_info(&self) -> &Option { + match self { + Self::Watchman(file_source_result) => &file_source_result.saved_state_info, + Self::External(_) => unimplemented!(), + Self::WalkDir(_) => unimplemented!(), + } + } + + pub fn size(&self) -> usize { + match self { + Self::Watchman(file_source_result) => file_source_result.files.len(), + Self::External(file_source_result) => file_source_result.files.len(), + Self::WalkDir(file_source_result) => file_source_result.files.len(), + } + } +} + +pub enum FileSourceSubscription { + Watchman(WatchmanFileSourceSubscription), // Oss(OssFileSourceSubscription) +} + +impl FileSourceSubscription { + pub async fn next_change(&mut self) -> Result { + match self { + Self::Watchman(file_source_subscription) => { + file_source_subscription.next_change().await.map_or_else( + |err| Err(Error::from(err)), + |next_change| Ok(FileSourceSubscriptionNextChange::Watchman(next_change)), + ) + } + } + } +} + +#[derive(Debug)] +pub enum FileSourceSubscriptionNextChange { + Watchman(WatchmanFileSourceSubscriptionNextChange), +} diff --git a/compiler/crates/relay-compiler/src/file_source/external_file_source.rs b/compiler/crates/relay-compiler/src/file_source/external_file_source.rs index 432a5f77f0b5c..9c9d22348aced 100644 --- a/compiler/crates/relay-compiler/src/file_source/external_file_source.rs +++ b/compiler/crates/relay-compiler/src/file_source/external_file_source.rs @@ -8,6 +8,7 @@ use std::fs::File as FsFile; use std::io::BufReader; use std::path::PathBuf; +use std::sync::Arc; use common::PerfLogger; @@ -20,9 +21,9 @@ use crate::FileSourceResult; /// The purpose of this module is to handle saved state and list of changed files /// from the external source, and not from the watchman -pub struct ExternalFileSource<'config> { +pub struct ExternalFileSource { changed_files_list: PathBuf, - pub config: &'config Config, + pub config: Arc, } #[derive(Debug)] @@ -50,8 +51,8 @@ impl ExternalFileSourceResult { } } -impl<'config> ExternalFileSource<'config> { - pub fn new(changed_files_list: PathBuf, config: &'config Config) -> Self { +impl ExternalFileSource { + pub fn new(changed_files_list: PathBuf, config: Arc) -> Self { Self { config, changed_files_list, @@ -79,7 +80,7 @@ impl<'config> ExternalFileSource<'config> { ExternalFileSourceResult::read_from_fs(&self.changed_files_list, root_dir.clone())?, )); - compiler_state.merge_file_source_changes(self.config, perf_logger, true)?; + compiler_state.merge_file_source_changes(&self.config, perf_logger, true)?; Ok(compiler_state) } diff --git a/compiler/crates/relay-compiler/src/file_source/extract_graphql.rs b/compiler/crates/relay-compiler/src/file_source/extract_graphql.rs index 3d35fe6cc104d..db9a6c501a289 100644 --- a/compiler/crates/relay-compiler/src/file_source/extract_graphql.rs +++ b/compiler/crates/relay-compiler/src/file_source/extract_graphql.rs @@ -7,7 +7,6 @@ use std::fs; use std::path::Path; -use std::path::PathBuf; use common::SourceLocationKey; use docblock_syntax::DocblockSource; @@ -21,6 +20,7 @@ use super::read_file_to_string; use super::File; use super::FileSourceResult; use crate::errors::Result; +use crate::file_source::Config; #[derive(Debug, Serialize, Deserialize, Clone)] pub struct LocatedGraphQLSource { @@ -38,10 +38,11 @@ pub struct LocatedDocblockSource { pub struct LocatedJavascriptSourceFeatures { pub graphql_sources: Vec, pub docblock_sources: Vec, + pub full_source: String, } pub trait SourceReader { - fn read_file_to_string(&self, path: &PathBuf) -> std::io::Result; + fn read_file_to_string(&self, path: &Path) -> std::io::Result; } /// Default implementation of the file source reader @@ -49,7 +50,7 @@ pub trait SourceReader { pub struct FsSourceReader; impl SourceReader for FsSourceReader { - fn read_file_to_string(&self, path: &PathBuf) -> std::io::Result { + fn read_file_to_string(&self, path: &Path) -> std::io::Result { fs::read_to_string(path) } } @@ -59,11 +60,17 @@ impl SourceReader for FsSourceReader { pub fn extract_javascript_features_from_file( file_source_result: &FileSourceResult, file: &File, + config: &Config, ) -> Result { let contents = read_file_to_string(file_source_result, file)?; let features = extract_graphql::extract(&contents); let mut graphql_sources = Vec::new(); let mut docblock_sources = Vec::new(); + let extract_full_source_for_docblock = match &config.should_extract_full_source { + Some(f) => f(&contents), + None => false, + }; + for (index, feature) in features.into_iter().enumerate() { match feature { JavaScriptSourceFeature::GraphQL(graphql_source) => { @@ -73,10 +80,12 @@ pub fn extract_javascript_features_from_file( }) } JavaScriptSourceFeature::Docblock(docblock_source) => { - docblock_sources.push(LocatedDocblockSource { - docblock_source, - index, - }) + if !extract_full_source_for_docblock { + docblock_sources.push(LocatedDocblockSource { + docblock_source, + index, + }) + } } } } @@ -84,6 +93,11 @@ pub fn extract_javascript_features_from_file( Ok(LocatedJavascriptSourceFeatures { graphql_sources, docblock_sources, + full_source: if extract_full_source_for_docblock { + contents + } else { + String::new() + }, }) } diff --git a/compiler/crates/relay-compiler/src/file_source/file_categorizer.rs b/compiler/crates/relay-compiler/src/file_source/file_categorizer.rs index a920232c62d4e..25c678b04d5ea 100644 --- a/compiler/crates/relay-compiler/src/file_source/file_categorizer.rs +++ b/compiler/crates/relay-compiler/src/file_source/file_categorizer.rs @@ -18,12 +18,12 @@ use common::sync::ParallelIterator; use fnv::FnvHashSet; use log::warn; use rayon::iter::IntoParallelRefIterator; +use relay_config::ProjectName; use relay_typegen::TypegenLanguage; use super::file_filter::FileFilter; use super::File; use super::FileGroup; -use crate::compiler_state::ProjectName; use crate::compiler_state::ProjectSet; use crate::config::Config; use crate::config::SchemaLocation; @@ -137,6 +137,7 @@ pub struct FileCategorizer { extensions_mapping: PathMapping, default_generated_dir: &'static OsStr, generated_dir_mapping: PathMapping, + generated_sources: Vec, source_mapping: PathMapping, schema_file_mapping: HashMap, schema_dir_mapping: PathMapping, @@ -148,6 +149,17 @@ impl FileCategorizer { for (path, project_set) in &config.sources { source_mapping.push((path.clone(), project_set.clone())); } + for (path, project_set) in &config.generated_sources { + source_mapping.push((path.clone(), project_set.clone())); + } + + let default_generated_dir = OsStr::new("__generated__"); + let mut generated_sources = vec![]; + for (path, _project_set) in &config.generated_sources { + if in_relative_generated_dir(default_generated_dir, path) { + generated_sources.push(path.clone()); + } + } let mut extensions_map: HashMap = Default::default(); for (&project_name, project_config) in &config.projects { @@ -216,8 +228,9 @@ impl FileCategorizer { Self { source_language, extensions_mapping: PathMapping::new(extensions_map.into_iter().collect()), - default_generated_dir: OsStr::new("__generated__"), + default_generated_dir, generated_dir_mapping: PathMapping::new(generated_dir_mapping), + generated_sources, schema_file_mapping, schema_dir_mapping: PathMapping::new(schema_dir_mapping), source_mapping: PathMapping::new(source_mapping), @@ -230,15 +243,24 @@ impl FileCategorizer { pub fn categorize(&self, path: &Path) -> Result> { let extension = path.extension(); + let in_generated_sources = self + .generated_sources + .iter() + .any(|generated_root| path.starts_with(generated_root)); + if let Some(project_name) = self.generated_dir_mapping.find(path) { - return if let Some(extension) = extension { - if is_source_code_extension(extension) || is_extra_extensions(extension) { - Ok(FileGroup::Generated { project_name }) + if let Some(extension) = extension { + if is_source_code_extension(extension) { + if !in_generated_sources { + return Ok(FileGroup::Generated { project_name }); + } + } else if is_extra_extensions(extension) { + return Ok(FileGroup::Generated { project_name }); } else { - Ok(FileGroup::Ignore) + return Ok(FileGroup::Ignore); } } else { - Ok(FileGroup::Ignore) + return Ok(FileGroup::Ignore); }; } @@ -249,7 +271,11 @@ impl FileCategorizer { .source_mapping .find(path) .ok_or(Cow::Borrowed("File is not in any source set."))?; - if self.in_relative_generated_dir(path) { + + let in_generated_dir = in_relative_generated_dir(self.default_generated_dir, path); + // If the path is in a generated directory and is not a generated source + // Some generated files can be treated as sources files. For example, resolver codegen. + if in_generated_dir && !in_generated_sources { if project_set.has_multiple_projects() { Err(Cow::Owned(format!( "Overlapping input sources are incompatible with relative generated \ @@ -290,13 +316,6 @@ impl FileCategorizer { } } - fn in_relative_generated_dir(&self, path: &Path) -> bool { - path.components().any(|comp| match comp { - Component::Normal(comp) => comp == self.default_generated_dir, - _ => false, - }) - } - fn is_valid_extension_for_project_set( &self, project_set: &ProjectSet, @@ -304,7 +323,7 @@ impl FileCategorizer { path: &Path, ) -> bool { for project_name in project_set.iter() { - if let Some(language) = self.source_language.get(&project_name) { + if let Some(language) = self.source_language.get(project_name) { if !is_valid_source_code_extension(language, extension) { warn!( "Unexpected file `{:?}` for language `{:?}`.", @@ -340,6 +359,13 @@ impl PathMapping { } } +fn in_relative_generated_dir(default_generated_dir: &OsStr, path: &Path) -> bool { + path.components().any(|comp| match comp { + Component::Normal(comp) => comp == default_generated_dir, + _ => false, + }) +} + fn is_source_code_extension(extension: &OsStr) -> bool { extension == "js" || extension == "jsx" || extension == "ts" || extension == "tsx" } @@ -379,6 +405,9 @@ mod tests { "src/typescript": "typescript", "src/custom_overlapping": ["with_custom_generated_dir", "overlapping_generated_dir"] }, + "generatedSources": { + "src/resolver_codegen/__generated__": "public" + }, "projects": { "public": { "schema": "graphql/public.graphql", @@ -418,7 +447,25 @@ mod tests { .categorize(&PathBuf::from("src/js/a.js")) .unwrap(), FileGroup::Source { - project_set: ProjectSet::of("public".intern()), + project_set: ProjectSet::of("public".intern().into()), + }, + ); + assert_eq!( + categorizer + .categorize(&PathBuf::from("src/js/__generated__/a.js")) + .unwrap(), + FileGroup::Generated { + project_name: "public".intern().into(), + }, + ); + assert_eq!( + categorizer + .categorize(&PathBuf::from( + "src/resolver_codegen/__generated__/resolvers.js" + )) + .unwrap(), + FileGroup::Source { + project_set: ProjectSet::of("public".intern().into()), }, ); assert_eq!( @@ -426,7 +473,7 @@ mod tests { .categorize(&PathBuf::from("src/js/nested/b.js")) .unwrap(), FileGroup::Source { - project_set: ProjectSet::of("public".intern()), + project_set: ProjectSet::of("public".intern().into()), }, ); assert_eq!( @@ -434,7 +481,7 @@ mod tests { .categorize(&PathBuf::from("src/js/internal/nested/c.js")) .unwrap(), FileGroup::Source { - project_set: ProjectSet::of("internal".intern()), + project_set: ProjectSet::of("internal".intern().into()), }, ); assert_eq!( @@ -446,7 +493,7 @@ mod tests { .categorize(&PathBuf::from("src/custom/custom-generated/c.js")) .unwrap(), FileGroup::Source { - project_set: ProjectSet::of("with_custom_generated_dir".intern()), + project_set: ProjectSet::of("with_custom_generated_dir".intern().into()), }, ); assert_eq!( @@ -454,7 +501,7 @@ mod tests { .categorize(&PathBuf::from("src/js/internal/nested/__generated__/c.js")) .unwrap(), FileGroup::Generated { - project_name: "internal".intern() + project_name: "internal".intern().into() }, ); assert_eq!( @@ -462,7 +509,7 @@ mod tests { .categorize(&PathBuf::from("graphql/custom-generated/c.js")) .unwrap(), FileGroup::Generated { - project_name: "with_custom_generated_dir".intern() + project_name: "with_custom_generated_dir".intern().into() }, ); assert_eq!( @@ -470,7 +517,7 @@ mod tests { .categorize(&PathBuf::from("graphql/public.graphql")) .unwrap(), FileGroup::Schema { - project_set: ProjectSet::of("public".intern()) + project_set: ProjectSet::of("public".intern().into()) }, ); assert_eq!( @@ -478,7 +525,7 @@ mod tests { .categorize(&PathBuf::from("graphql/__generated__/internal.graphql")) .unwrap(), FileGroup::Schema { - project_set: ProjectSet::of("internal".intern()) + project_set: ProjectSet::of("internal".intern().into()) }, ); assert_eq!( @@ -486,7 +533,7 @@ mod tests { .categorize(&PathBuf::from("src/typescript/a.ts")) .unwrap(), FileGroup::Source { - project_set: ProjectSet::of("typescript".intern()), + project_set: ProjectSet::of("typescript".intern().into()), }, ); } @@ -540,7 +587,7 @@ mod tests { assert_eq!( categorizer.categorize(&PathBuf::from("src/custom_overlapping/__generated__/c.js")), Err(Cow::Borrowed( - "Overlapping input sources are incompatible with relative generated directories. Got file in a relative generated directory with source set ProjectSet([\"with_custom_generated_dir\", \"overlapping_generated_dir\"])." + "Overlapping input sources are incompatible with relative generated directories. Got file in a relative generated directory with source set ProjectSet([Named(\"with_custom_generated_dir\"), Named(\"overlapping_generated_dir\")])." )), ); } diff --git a/compiler/crates/relay-compiler/src/file_source/file_filter.rs b/compiler/crates/relay-compiler/src/file_source/file_filter.rs index da791645df3be..e39871e7bb3da 100644 --- a/compiler/crates/relay-compiler/src/file_source/file_filter.rs +++ b/compiler/crates/relay-compiler/src/file_source/file_filter.rs @@ -10,7 +10,7 @@ use std::path::PathBuf; use fnv::FnvHashSet; use glob::Pattern; -use intern::string_key::StringKey; +use relay_config::ProjectName; use crate::config::Config; use crate::config::SchemaLocation; @@ -58,8 +58,11 @@ impl FileFilter { } // Get roots for extensions, schemas and output dirs -fn get_extra_roots(config: &Config, enabled_projects: &FnvHashSet) -> Vec { +fn get_extra_roots(config: &Config, enabled_projects: &FnvHashSet) -> Vec { let mut roots = vec![]; + for (path, _project_set) in &config.generated_sources { + roots.push(path); + } for project_config in config.projects.values() { if !enabled_projects.contains(&project_config.name) { continue; @@ -78,7 +81,7 @@ fn get_extra_roots(config: &Config, enabled_projects: &FnvHashSet) -> unify_roots(roots) } -fn get_sources_root(config: &Config, enabled_projects: &FnvHashSet) -> Vec { +fn get_sources_root(config: &Config, enabled_projects: &FnvHashSet) -> Vec { unify_roots( config .sources @@ -87,7 +90,7 @@ fn get_sources_root(config: &Config, enabled_projects: &FnvHashSet) - let is_enabled = project_set .iter() .any(|name| enabled_projects.contains(name)); - is_enabled.then(|| path) + is_enabled.then_some(path) }) .collect(), ) @@ -99,7 +102,7 @@ fn unify_roots(mut paths: Vec<&PathBuf>) -> Vec { let mut roots = Vec::new(); for path in paths { match roots.last() { - Some(prev) if path.starts_with(&prev) => { + Some(prev) if path.starts_with(prev) => { // skip } _ => { @@ -109,3 +112,77 @@ fn unify_roots(mut paths: Vec<&PathBuf>) -> Vec { } roots } + +#[cfg(test)] +mod tests { + use super::*; + + fn create_test_config() -> Config { + Config::from_string_for_test( + r#" + { + "sources": { + "src/js": "public", + "src/js/internal": "internal", + "src/vendor": "public", + "src/custom": "with_custom_generated_dir", + "src/typescript": "typescript", + "src/custom_overlapping": ["with_custom_generated_dir", "overlapping_generated_dir"] + }, + "generatedSources": { + "src/resolver_codegen/__generated__": "public" + }, + "projects": { + "public": { + "schema": "graphql/public.graphql", + "language": "flow" + }, + "internal": { + "schema": "graphql/__generated__/internal.graphql", + "language": "flow" + }, + "with_custom_generated_dir": { + "schema": "graphql/__generated__/custom.graphql", + "output": "graphql/custom-generated", + "language": "flow" + }, + "typescript": { + "schema": "graphql/ts_schema.graphql", + "language": "typescript" + }, + "overlapping_generated_dir": { + "schema": "graphql/__generated__/custom.graphql", + "language": "flow" + } + } + } + "#, + ) + .unwrap() + } + + #[test] + fn test_file_filter() { + let config = create_test_config(); + let file_filter = FileFilter::from_config(&config); + + assert!(file_filter.is_file_relevant(&PathBuf::from("src/js/a.js")),); + assert!(!file_filter.is_file_relevant(&PathBuf::from("src/js/__generated__/a.js")),); + assert!(file_filter.is_file_relevant(&PathBuf::from( + "src/resolver_codegen/__generated__/resolvers.js" + )),); + assert!(file_filter.is_file_relevant(&PathBuf::from("src/js/nested/b.js")),); + assert!(file_filter.is_file_relevant(&PathBuf::from("src/js/internal/nested/c.js")),); + assert!(file_filter.is_file_relevant(&PathBuf::from("src/custom/custom-generated/c.js")),); + assert!( + !file_filter + .is_file_relevant(&PathBuf::from("src/js/internal/nested/__generated__/c.js")), + ); + assert!(file_filter.is_file_relevant(&PathBuf::from("graphql/custom-generated/c.js")),); + assert!(file_filter.is_file_relevant(&PathBuf::from("graphql/public.graphql")),); + assert!( + file_filter.is_file_relevant(&PathBuf::from("graphql/__generated__/internal.graphql")), + ); + assert!(file_filter.is_file_relevant(&PathBuf::from("src/typescript/a.ts")),); + } +} diff --git a/compiler/crates/relay-compiler/src/file_source/file_group.rs b/compiler/crates/relay-compiler/src/file_source/file_group.rs index 1d194d8ef16b5..14c135062d570 100644 --- a/compiler/crates/relay-compiler/src/file_source/file_group.rs +++ b/compiler/crates/relay-compiler/src/file_source/file_group.rs @@ -5,7 +5,8 @@ * LICENSE file in the root directory of this source tree. */ -use crate::compiler_state::ProjectName; +use relay_config::ProjectName; + use crate::compiler_state::ProjectSet; #[derive(Debug, PartialEq, Eq, Hash)] diff --git a/compiler/crates/relay-compiler/src/file_source/mod.rs b/compiler/crates/relay-compiler/src/file_source/mod.rs deleted file mode 100644 index 7d5b0f2b659be..0000000000000 --- a/compiler/crates/relay-compiler/src/file_source/mod.rs +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -mod external_file_source; -mod extract_graphql; -mod file_categorizer; -mod file_filter; -mod file_group; -mod read_file_to_string; -mod source_control_update_status; -mod walk_dir_file_source; -mod watchman_file_source; -mod watchman_query_builder; - -use std::path::PathBuf; - -use common::PerfLogEvent; -use common::PerfLogger; -use external_file_source::ExternalFileSource; -pub use file_categorizer::categorize_files; -pub use file_categorizer::FileCategorizer; -pub use file_group::FileGroup; -use graphql_watchman::WatchmanFileSourceResult; -use graphql_watchman::WatchmanFileSourceSubscription; -use graphql_watchman::WatchmanFileSourceSubscriptionNextChange; -use log::warn; -pub use read_file_to_string::read_file_to_string; -use serde::Deserialize; -use serde_bser::value::Value; -pub use source_control_update_status::SourceControlUpdateStatus; -pub use watchman_client::prelude::Clock; -use watchman_file_source::WatchmanFileSource; - -use self::external_file_source::ExternalFileSourceResult; -pub use self::extract_graphql::extract_javascript_features_from_file; -pub use self::extract_graphql::source_for_location; -pub use self::extract_graphql::FsSourceReader; -pub use self::extract_graphql::LocatedDocblockSource; -pub use self::extract_graphql::LocatedGraphQLSource; -pub use self::extract_graphql::LocatedJavascriptSourceFeatures; -pub use self::extract_graphql::SourceReader; -use self::walk_dir_file_source::WalkDirFileSource; -use self::walk_dir_file_source::WalkDirFileSourceResult; -use crate::compiler_state::CompilerState; -use crate::config::Config; -use crate::config::FileSourceKind; -use crate::errors::Error; -use crate::errors::Result; - -pub enum FileSource<'config> { - Watchman(WatchmanFileSource<'config>), - External(ExternalFileSource<'config>), - WalkDir(WalkDirFileSource<'config>), -} - -impl<'config> FileSource<'config> { - pub async fn connect( - config: &'config Config, - perf_logger_event: &impl PerfLogEvent, - ) -> Result> { - match &config.file_source_config { - FileSourceKind::Watchman => Ok(Self::Watchman( - WatchmanFileSource::connect(config, perf_logger_event).await?, - )), - FileSourceKind::External(changed_files_list) => Ok(Self::External( - ExternalFileSource::new(changed_files_list.to_path_buf(), config), - )), - FileSourceKind::WalkDir => Ok(Self::WalkDir(WalkDirFileSource::new(config))), - } - } - - pub async fn query( - &self, - perf_logger_event: &impl PerfLogEvent, - perf_logger: &impl PerfLogger, - ) -> Result { - match self { - Self::Watchman(file_source) => file_source.query(perf_logger_event, perf_logger).await, - Self::External(file_source) => { - let result = file_source.create_compiler_state(perf_logger); - if let Err(err) = &result { - perf_logger_event.string( - "external_file_source_create_compiler_state_error", - format!("{:?}", err), - ); - warn!( - "Unable to create state from external source: {:?}. Sending a full watchman query...", - err - ); - let watchman_file_source = - WatchmanFileSource::connect(file_source.config, perf_logger_event).await?; - watchman_file_source - .full_query(perf_logger_event, perf_logger) - .await - } else { - result - } - } - Self::WalkDir(file_source) => file_source.create_compiler_state(perf_logger), - } - } - - pub async fn subscribe( - self, - perf_logger_event: &impl PerfLogEvent, - perf_logger: &impl PerfLogger, - ) -> Result<(CompilerState, FileSourceSubscription)> { - match self { - Self::Watchman(file_source) => { - let (compiler_state, watchman_subscription) = file_source - .subscribe(perf_logger_event, perf_logger) - .await?; - Ok(( - compiler_state, - FileSourceSubscription::Watchman(watchman_subscription), - )) - } - Self::External(_) | Self::WalkDir(_) => { - unimplemented!( - "watch-mode (subscribe) is not available for non-watchman file sources." - ) - } - } - } -} - -#[derive(Debug, Clone, Deserialize)] -pub struct File { - pub name: PathBuf, - pub exists: bool, -} - -impl File { - pub fn absolute_path(&self, resolved_root: PathBuf) -> PathBuf { - let mut absolute_path = resolved_root; - absolute_path.push(&self.name); - absolute_path - } -} - -#[derive(Debug)] -pub enum FileSourceResult { - Watchman(WatchmanFileSourceResult), - External(ExternalFileSourceResult), - WalkDir(WalkDirFileSourceResult), -} - -impl FileSourceResult { - pub fn clock(&self) -> Option { - match self { - Self::Watchman(file_source) => Some(file_source.clock.clone()), - Self::External(_) => None, - Self::WalkDir(_) => None, - } - } - - pub fn resolved_root(&self) -> PathBuf { - match self { - Self::Watchman(file_source_result) => file_source_result.resolved_root.path(), - Self::External(file_source_result) => file_source_result.resolved_root.clone(), - Self::WalkDir(file_source_result) => file_source_result.resolved_root.clone(), - } - } - - pub fn saved_state_info(&self) -> &Option { - match self { - Self::Watchman(file_source_result) => &file_source_result.saved_state_info, - Self::External(_) => unimplemented!(), - Self::WalkDir(_) => unimplemented!(), - } - } - - pub fn size(&self) -> usize { - match self { - Self::Watchman(file_source_result) => file_source_result.files.len(), - Self::External(file_source_result) => file_source_result.files.len(), - Self::WalkDir(file_source_result) => file_source_result.files.len(), - } - } -} - -pub enum FileSourceSubscription { - Watchman(WatchmanFileSourceSubscription), // Oss(OssFileSourceSubscription) -} - -impl FileSourceSubscription { - pub async fn next_change(&mut self) -> Result { - match self { - Self::Watchman(file_source_subscription) => { - file_source_subscription.next_change().await.map_or_else( - |err| Err(Error::from(err)), - |next_change| Ok(FileSourceSubscriptionNextChange::Watchman(next_change)), - ) - } - } - } -} - -#[derive(Debug)] -pub enum FileSourceSubscriptionNextChange { - Watchman(WatchmanFileSourceSubscriptionNextChange), -} diff --git a/compiler/crates/relay-compiler/src/file_source/walk_dir_file_source.rs b/compiler/crates/relay-compiler/src/file_source/walk_dir_file_source.rs index cc6636d3c07ee..cc67b3c412abf 100644 --- a/compiler/crates/relay-compiler/src/file_source/walk_dir_file_source.rs +++ b/compiler/crates/relay-compiler/src/file_source/walk_dir_file_source.rs @@ -8,6 +8,7 @@ use std::collections::HashSet; use std::path::Path; use std::path::PathBuf; +use std::sync::Arc; use common::PerfLogEvent; use common::PerfLogger; @@ -27,12 +28,12 @@ pub struct WalkDirFileSourceResult { pub resolved_root: PathBuf, } -pub struct WalkDirFileSource<'config> { - pub config: &'config Config, - expected_file_extensions: HashSet<&'config str>, +pub struct WalkDirFileSource { + pub config: Arc, + expected_file_extensions: HashSet<&'static str>, } -fn get_expected_file_extensions(config: &Config) -> HashSet<&str> { +fn get_expected_file_extensions(config: &Config) -> HashSet<&'static str> { let mut file_extensions = HashSet::<&str>::with_capacity(6); file_extensions.insert("graphql"); file_extensions.insert("gql"); @@ -54,14 +55,15 @@ fn get_expected_file_extensions(config: &Config) -> HashSet<&str> { file_extensions } -impl<'config> WalkDirFileSource<'config> { - pub fn new(config: &'config Config) -> Self { +impl WalkDirFileSource { + pub fn new(config: Arc) -> Self { debug!( "Watchman server is disabled, or not available. Using GlobFileSource to find files." ); + let expected_file_extensions = get_expected_file_extensions(&config); Self { config, - expected_file_extensions: get_expected_file_extensions(config), + expected_file_extensions, } } @@ -85,7 +87,7 @@ impl<'config> WalkDirFileSource<'config> { .ok()? .to_path_buf(); - self.should_include_file(&relative_path).then(|| File { + self.should_include_file(&relative_path).then_some(File { name: relative_path, exists: true, }) @@ -102,7 +104,7 @@ impl<'config> WalkDirFileSource<'config> { }); setup_event.stop(timer); let compiler_state = CompilerState::from_file_source_changes( - self.config, + &self.config, &file_source_changes, &setup_event, perf_logger, diff --git a/compiler/crates/relay-compiler/src/file_source/watchman_file_source.rs b/compiler/crates/relay-compiler/src/file_source/watchman_file_source.rs index 6954e4958d00e..4fbb717b29e53 100644 --- a/compiler/crates/relay-compiler/src/file_source/watchman_file_source.rs +++ b/compiler/crates/relay-compiler/src/file_source/watchman_file_source.rs @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +use std::sync::Arc; + use common::PerfLogEvent; use common::PerfLogger; use graphql_watchman::WatchmanFile; @@ -25,17 +27,17 @@ use crate::errors::Error; use crate::errors::Result; use crate::saved_state::SavedStateLoader; -pub struct WatchmanFileSource<'config> { - client: Client, - config: &'config Config, +pub struct WatchmanFileSource { + client: Arc, + config: Arc, resolved_root: ResolvedRoot, } -impl<'config> WatchmanFileSource<'config> { +impl WatchmanFileSource { pub async fn connect( - config: &'config Config, + config: &Arc, perf_logger_event: &impl PerfLogEvent, - ) -> Result> { + ) -> Result { let connect_timer = perf_logger_event.start("file_source_connect_time"); let client = Connector::new().connect().await?; let canonical_root = CanonicalPath::canonicalize(&config.root_dir).map_err(|err| { @@ -51,8 +53,8 @@ impl<'config> WatchmanFileSource<'config> { resolved_root ); Ok(Self { - client, - config, + client: Arc::new(client), + config: config.clone(), resolved_root, }) } @@ -64,7 +66,7 @@ impl<'config> WatchmanFileSource<'config> { perf_logger_event: &impl PerfLogEvent, perf_logger: &impl PerfLogger, ) -> Result { - info!("querying files to compile..."); + info!("Querying files to compile..."); let query_time = perf_logger_event.start("file_source_query_time"); // If the saved state flag is passed, load from it or fail. if let Some(saved_state_path) = &self.config.load_saved_state_file { @@ -72,14 +74,21 @@ impl<'config> WatchmanFileSource<'config> { CompilerState::deserialize_from_file(saved_state_path) })?; let query_timer = perf_logger_event.start("watchman_query_time"); - let file_source_result = self.query_file_result(compiler_state.clock.clone()).await?; + let file_source_result = query_file_result( + &self.config, + &self.client, + &self.resolved_root.clone(), + compiler_state.clock.clone(), + false, + ) + .await?; perf_logger_event.stop(query_timer); compiler_state .pending_file_source_changes .write() .unwrap() .push(file_source_result); - compiler_state.merge_file_source_changes(self.config, perf_logger, true)?; + compiler_state.merge_file_source_changes(&self.config, perf_logger, true)?; perf_logger_event.stop(query_time); return Ok(compiler_state); } @@ -92,7 +101,7 @@ impl<'config> WatchmanFileSource<'config> { saved_state_loader: Some(saved_state_loader), saved_state_version, .. - } = self.config + } = self.config.as_ref() { match self .try_saved_state( @@ -106,9 +115,12 @@ impl<'config> WatchmanFileSource<'config> { { Ok(load_result) => { perf_logger_event.stop(query_time); + perf_logger_event.string("try_saved_state_result", "success".to_owned()); return load_result; } Err(saved_state_failure) => { + perf_logger_event + .string("try_saved_state_result", saved_state_failure.to_owned()); warn!( "Unable to load saved state, falling back to full build: {}", saved_state_failure @@ -129,10 +141,17 @@ impl<'config> WatchmanFileSource<'config> { perf_logger_event: &impl PerfLogEvent, perf_logger: &impl PerfLogger, ) -> Result { - let file_source_result = self.query_file_result(None).await?; + let file_source_result = query_file_result( + &self.config, + &self.client, + &self.resolved_root.clone(), + None, + false, + ) + .await?; let compiler_state = perf_logger_event.time("from_file_source_changes", || { CompilerState::from_file_source_changes( - self.config, + &self.config, &file_source_result, perf_logger_event, perf_logger, @@ -150,10 +169,17 @@ impl<'config> WatchmanFileSource<'config> { let timer = perf_logger_event.start("file_source_subscribe_time"); let compiler_state = self.query(perf_logger_event, perf_logger).await?; - let expression = get_watchman_expr(self.config); + let expression = get_watchman_expr(&self.config); let query_timer = perf_logger_event.start("watchman_query_time_before_subscribe"); - let file_source_result = self.query_file_result(compiler_state.clock.clone()).await?; + let file_source_result = query_file_result( + &self.config, + &self.client, + &self.resolved_root.clone(), + compiler_state.clock.clone(), + true, + ) + .await?; perf_logger_event.stop(query_timer); let query_timer = perf_logger_event.start("watchman_query_time_subscribe"); @@ -179,57 +205,6 @@ impl<'config> WatchmanFileSource<'config> { )) } - /// Internal method to issue a watchman query, returning a raw - /// WatchmanFileSourceResult. - async fn query_file_result(&self, since_clock: Option) -> Result { - let expression = get_watchman_expr(self.config); - debug!( - "WatchmanFileSource::query_file_result(...) get_watchman_expr = {:?}", - &expression - ); - - // If `since` is available, we should not pass the `path` parameter. - // Watchman ignores `since` parameter if both `path` and `since` are - // passed as the request params - let request = if since_clock.is_some() { - QueryRequestCommon { - expression: Some(expression), - since: since_clock, - ..Default::default() - } - } else { - let query_roots = get_all_roots(self.config) - .into_iter() - .map(PathGeneratorElement::RecursivePath) - .collect(); - QueryRequestCommon { - expression: Some(expression), - path: Some(query_roots), - ..Default::default() - } - }; - debug!( - "WatchmanFileSource::query_file_result(...) request = {:?}", - &request - ); - let query_result = self - .client - .query::(&self.resolved_root, request) - .await?; - - // print debug information for this result - // (file list will include only files with specified extension) - debug_query_results(&query_result, "graphql"); - - let files = query_result.files.ok_or(Error::EmptyQueryResult)?; - Ok(FileSourceResult::Watchman(WatchmanFileSourceResult { - files, - resolved_root: self.resolved_root.clone(), - clock: query_result.clock, - saved_state_info: query_result.saved_state_info, - })) - } - /// Tries to load saved state with a watchman query. /// The return value is a nested Result: /// The outer Result indicates the result of a possible saved state infrastructure failure. @@ -242,6 +217,7 @@ impl<'config> WatchmanFileSource<'config> { saved_state_loader: &'_ (dyn SavedStateLoader + Send + Sync), saved_state_version: &str, ) -> std::result::Result, &'static str> { + let try_saved_state_event = perf_logger_event.start("try_saved_state_time"); let scm_since = Clock::ScmAware(FatClockData { clock: ClockSpec::null(), scm: Some(saved_state_config), @@ -250,18 +226,32 @@ impl<'config> WatchmanFileSource<'config> { "WatchmanFileSource::try_saved_state(...) scm_since = {:?}", &scm_since ); - let query_timer = perf_logger_event.start("watchman_query_time_try_saved_state"); - let file_source_result = self - .query_file_result(Some(scm_since)) + + // Issue two watchman queries: One to get the saved state info, and one to get the changed files. + // We'll download and deserialize saved state from manifold while the second watchman query executes. + + let since = Some(scm_since.clone()); + let root = self.resolved_root.clone(); + let saved_state_result = query_file_result(&self.config, &self.client, &root, since, true) .await .map_err(|_| "query failed")?; - perf_logger_event.stop(query_timer); + let since = Some(scm_since.clone()); + let config = Arc::clone(&self.config); + let client = Arc::clone(&self.client); + let root = self.resolved_root.clone(); + let changed_files_result_future = tokio::task::spawn(async move { + query_file_result(&config, &client, &root, since, false) + .await + .map_err(|_| "query failed") + }); + + // First, use saved state query to download saved state from manifold. debug!( - "WatchmanFileSource::try_saved_state(...) file_source_result = {:?}", - &file_source_result + "WatchmanFileSource::try_saved_state(...) saved_state_result = {:?}", + &saved_state_result ); - let saved_state_info = file_source_result + let saved_state_info = saved_state_result .saved_state_info() .as_ref() .ok_or("no saved state in watchman response")?; @@ -271,7 +261,7 @@ impl<'config> WatchmanFileSource<'config> { ); let saved_state_path = perf_logger_event.time("saved_state_loading_time", || { saved_state_loader - .load(saved_state_info, self.config) + .load(saved_state_info, &self.config) .ok_or("unable to load") })?; let mut compiler_state = perf_logger_event @@ -291,13 +281,28 @@ impl<'config> WatchmanFileSource<'config> { { return Err("Saved state version doesn't match."); } + + // Then await the changed files query. + let file_source_result = changed_files_result_future + .await + .map_err(|_| "query failed")??; + compiler_state .pending_file_source_changes .write() .unwrap() .push(file_source_result); + + if let Some(update_compiler_state_from_saved_state) = + &self.config.update_compiler_state_from_saved_state + { + update_compiler_state_from_saved_state(&mut compiler_state, &self.config); + } + if let Err(parse_error) = perf_logger_event.time("merge_file_source_changes", || { - compiler_state.merge_file_source_changes(self.config, perf_logger, true) + let result = compiler_state.merge_file_source_changes(&self.config, perf_logger, true); + perf_logger_event.stop(try_saved_state_event); + result }) { Ok(Err(parse_error)) } else { @@ -306,6 +311,62 @@ impl<'config> WatchmanFileSource<'config> { } } +async fn query_file_result( + config: &Config, + client: &Client, + resolved_root: &ResolvedRoot, + since_clock: Option, + omit_changed_files: bool, +) -> Result { + let expression = get_watchman_expr(config); + debug!( + "WatchmanFileSource::query_file_result(...) get_watchman_expr = {:?}", + &expression + ); + + // If `since` is available, we should not pass the `path` parameter. + // Watchman ignores `since` parameter if both `path` and `since` are + // passed as the request params + let request = if since_clock.is_some() { + QueryRequestCommon { + omit_changed_files, + empty_on_fresh_instance: omit_changed_files, + expression: Some(expression), + since: since_clock, + ..Default::default() + } + } else { + let query_roots = get_all_roots(config) + .into_iter() + .map(PathGeneratorElement::RecursivePath) + .collect(); + QueryRequestCommon { + omit_changed_files, + empty_on_fresh_instance: omit_changed_files, + expression: Some(expression), + path: Some(query_roots), + ..Default::default() + } + }; + debug!( + "WatchmanFileSource::query_file_result(...) request = {:?}", + &request + ); + let query_result = client.query::(resolved_root, request).await?; + + // print debug information for this result + // (file list will include only files with specified extension) + debug_query_results(&query_result, "graphql"); + + let files = query_result.files.ok_or(Error::EmptyQueryResult)?; + Ok(FileSourceResult::Watchman(WatchmanFileSourceResult { + files, + resolved_root: resolved_root.clone(), + clock: query_result.clock, + saved_state_info: query_result.saved_state_info, + })) +} + fn debug_query_results(query_result: &QueryResult, extension_filter: &str) { if let Ok(rust_log) = std::env::var("RUST_LOG") { if rust_log == *"debug" { diff --git a/compiler/crates/relay-compiler/src/file_source/watchman_query_builder.rs b/compiler/crates/relay-compiler/src/file_source/watchman_query_builder.rs index 151d3117af87d..89dc9bd239364 100644 --- a/compiler/crates/relay-compiler/src/file_source/watchman_query_builder.rs +++ b/compiler/crates/relay-compiler/src/file_source/watchman_query_builder.rs @@ -7,48 +7,19 @@ use std::path::PathBuf; +use fnv::FnvBuildHasher; +use indexmap::IndexMap; use relay_typegen::TypegenLanguage; use watchman_client::prelude::*; +use crate::compiler_state::ProjectSet; use crate::config::Config; use crate::config::SchemaLocation; +type FnvIndexMap = IndexMap; + pub fn get_watchman_expr(config: &Config) -> Expr { - let mut sources_conditions = vec![expr_any( - config - .sources - .iter() - .flat_map(|(path, project_set)| { - project_set - .iter() - .map(|name| (path, &config.projects[name])) - .collect::>() - }) - .map(|(path, project)| { - Expr::All(vec![ - // Ending in *.js(x) or *.ts(x) depending on the project language. - Expr::Suffix(match &project.typegen_config.language { - TypegenLanguage::Flow | TypegenLanguage::JavaScript => { - vec![PathBuf::from("js"), PathBuf::from("jsx")] - } - TypegenLanguage::TypeScript => { - vec![ - PathBuf::from("js"), - PathBuf::from("jsx"), - PathBuf::from("ts"), - PathBuf::from("tsx"), - ] - } - }), - // In the related source root. - Expr::DirName(DirNameTerm { - path: path.clone(), - depth: None, - }), - ]) - }) - .collect(), - )]; + let mut sources_conditions = vec![expr_any(get_sources_dir_exprs(config, &config.sources))]; // not excluded by any glob if !config.excludes.is_empty() { sources_conditions.push(Expr::Not(Box::new(expr_any( @@ -69,6 +40,11 @@ pub fn get_watchman_expr(config: &Config) -> Expr { let mut expressions = vec![sources_expr]; + let generated_sources_dir_exprs = get_sources_dir_exprs(config, &config.generated_sources); + if !generated_sources_dir_exprs.is_empty() { + expressions.push(expr_any(generated_sources_dir_exprs)); + } + let output_dir_paths = get_output_dir_paths(config); if !output_dir_paths.is_empty() { let output_dir_expr = expr_files_in_dirs(output_dir_paths); @@ -103,10 +79,54 @@ pub fn get_watchman_expr(config: &Config) -> Expr { ]) } +fn get_sources_dir_exprs( + config: &Config, + paths_to_project: &FnvIndexMap, +) -> Vec { + paths_to_project + .iter() + .flat_map(|(path, project_set)| { + project_set + .iter() + .map(|name| (path, &config.projects[name])) + .collect::>() + }) + .map(|(path, project)| { + Expr::All(vec![ + // In the related source root. + Expr::DirName(DirNameTerm { + path: path.clone(), + depth: None, + }), + // Match file extensions + get_project_file_ext_expr(project.typegen_config.language), + ]) + }) + .collect() +} + +fn get_project_file_ext_expr(typegen_language: TypegenLanguage) -> Expr { + // Ending in *.js(x) or *.ts(x) depending on the project language. + Expr::Suffix(match &typegen_language { + TypegenLanguage::Flow | TypegenLanguage::JavaScript => { + vec![PathBuf::from("js"), PathBuf::from("jsx")] + } + TypegenLanguage::TypeScript => { + vec![ + PathBuf::from("js"), + PathBuf::from("jsx"), + PathBuf::from("ts"), + PathBuf::from("tsx"), + ] + } + }) +} + /// Compute all root paths that we need to query Watchman with. All files /// relevant to the compiler should be in these directories. pub fn get_all_roots(config: &Config) -> Vec { let source_roots = get_source_roots(config); + let extra_sources_roots = get_generated_sources_roots(config); let output_roots = get_output_dir_paths(config); let extension_roots = get_extension_roots(config); let schema_file_roots = get_schema_file_roots(config); @@ -114,6 +134,7 @@ pub fn get_all_roots(config: &Config) -> Vec { unify_roots( source_roots .into_iter() + .chain(extra_sources_roots) .chain(output_roots) .chain(extension_roots) .chain(schema_file_roots) @@ -127,6 +148,11 @@ fn get_source_roots(config: &Config) -> Vec { config.sources.keys().cloned().collect() } +/// Returns all root directories of JS source files for the config. +fn get_generated_sources_roots(config: &Config) -> Vec { + config.generated_sources.keys().cloned().collect() +} + /// Returns all root directories of GraphQL schema extension files for the /// config. fn get_extension_roots(config: &Config) -> Vec { @@ -226,7 +252,7 @@ fn unify_roots(mut paths: Vec) -> Vec { let mut roots = Vec::new(); for path in paths { match roots.last() { - Some(prev) if path.starts_with(&prev) => { + Some(prev) if path.starts_with(prev) => { // skip } _ => { diff --git a/compiler/crates/relay-compiler/src/graphql_asts.rs b/compiler/crates/relay-compiler/src/graphql_asts.rs index 82e43c752111a..4431e54a9be3f 100644 --- a/compiler/crates/relay-compiler/src/graphql_asts.rs +++ b/compiler/crates/relay-compiler/src/graphql_asts.rs @@ -9,22 +9,29 @@ use std::collections::hash_map::Entry; use std::collections::HashSet; use std::path::Path; use std::path::PathBuf; +use std::sync::Arc; +use common::sync::ParallelIterator; use common::Diagnostic; use common::SourceLocationKey; use dependency_analyzer::ExecutableDefinitionNameSet; -use dependency_analyzer::ExecutableDefinitionNameVec; use fnv::FnvHashMap; use graphql_ir::ExecutableDefinitionName; use graphql_ir::FragmentDefinitionName; use graphql_ir::OperationDefinitionName; use graphql_syntax::ExecutableDefinition; +use graphql_syntax::ParserFeatures; +use rayon::iter::IntoParallelRefIterator; +use relay_config::ProjectConfig; +use relay_config::ProjectName; +use crate::artifact_map::ArtifactSourceKey; use crate::compiler_state::GraphQLSources; -use crate::compiler_state::ProjectName; +use crate::config::Config; use crate::errors::Error; use crate::errors::Result; use crate::file_source::LocatedGraphQLSource; +use crate::utils::get_parser_features; #[derive(Debug)] pub struct GraphQLAsts { @@ -32,7 +39,7 @@ pub struct GraphQLAsts { /// Names of fragments and operations that are updated or created pub pending_definition_names: ExecutableDefinitionNameSet, /// Names of fragments and operations that are deleted - pub removed_definition_names: ExecutableDefinitionNameVec, + pub removed_definition_names: Vec, } impl GraphQLAsts { @@ -49,14 +56,34 @@ impl GraphQLAsts { pub fn from_graphql_sources_map( graphql_sources_map: &FnvHashMap, - dirty_definitions_map: &FnvHashMap>, + dirty_artifact_sources: &FnvHashMap>, + config: &Arc, ) -> Result> { graphql_sources_map - .iter() + .par_iter() .map(|(&project_name, sources)| { + let project_config = &config.projects[&project_name]; + let asts = GraphQLAsts::from_graphql_sources( sources, - dirty_definitions_map.get(&project_name), + dirty_artifact_sources + .get(&project_name) + .map(|dirty_artifacts| { + dirty_artifacts + .iter() + .filter_map(|artifact_source_key| match artifact_source_key { + ArtifactSourceKey::ExecutableDefinition(def_name) => { + Some(def_name) + } + ArtifactSourceKey::Schema() + | ArtifactSourceKey::ResolverHash(_) => { + // We're only concerned with collecting ExecutableDefinitionNames + None + } + }) + .collect() + }), + project_config, )?; Ok((project_name, asts)) }) @@ -68,8 +95,11 @@ impl GraphQLAsts { /// Additionally collects the set of definition names that updated, given the compiler state pub fn from_graphql_sources( graphql_sources: &GraphQLSources, - dirty_definitions: Option<&Vec>, + dirty_definitions: Option>, + project_config: &ProjectConfig, ) -> Result { + let parser_features = get_parser_features(project_config); + let mut syntax_errors = Vec::new(); let mut asts: FnvHashMap> = Default::default(); @@ -80,108 +110,77 @@ impl GraphQLAsts { pending_definition_names.extend(dirty_definitions); } - // Iterate over all pending sources, and parse each graphql string. - // Prefer the entry from the pending source set, which contains the - // latest values for the graphql strings in the file. - for (file_name, pending_graphql_sources) in graphql_sources.pending.iter() { - let mut definitions_for_file = Vec::new(); - for LocatedGraphQLSource { - index, - graphql_source, - } in pending_graphql_sources.iter() - { - let source_location = - SourceLocationKey::embedded(&file_name.to_string_lossy(), *index); - match graphql_syntax::parse_executable( - &graphql_source.text_source().text, - source_location, - ) { - Ok(document) => { - for def in &document.definitions { - if let Some(name) = def.name() { - match def { - ExecutableDefinition::Operation(_) => pending_definition_names - .insert(OperationDefinitionName(name).into()), - ExecutableDefinition::Fragment(_) => pending_definition_names - .insert(FragmentDefinitionName(name).into()), - }; - } else { - syntax_errors.push(Diagnostic::error( - "Expected operation to have a name (e.g. 'query ')", - def.location(), - )) - } - } - definitions_for_file.extend(document.definitions); - } - Err(errors) => syntax_errors.extend(errors), - } - } - // Parse the processed source to get all processed definition names - // and collect definition names that are removed from that file. - // (A definition moved to another file is considered as a deletion and a new source) - if let Some(processed_graphql_sources) = graphql_sources.processed.get(file_name) { - for LocatedGraphQLSource { - index, - graphql_source, - } in processed_graphql_sources.iter() - { - // TODO: parse name instead of the whole graphql text - let source_location = - SourceLocationKey::embedded(&file_name.to_string_lossy(), *index); - if let Ok(document) = graphql_syntax::parse_executable( - &graphql_source.text_source().text, - source_location, - ) { - for def in document.definitions { - let name = def.name(); - if let Some(def_name) = name { - if !definitions_for_file.iter().any(|def| def.name() == name) { - match def { - ExecutableDefinition::Operation(_) => { - removed_definition_names - .push(OperationDefinitionName(def_name).into()) - } - ExecutableDefinition::Fragment(_) => { - removed_definition_names - .push(FragmentDefinitionName(def_name).into()) - } - } + let (pending_results, processed_results) = rayon::join( + // Iterate over all pending sources, and parse each graphql string. + // Prefer the entry from the pending source set, which contains the + // latest values for the graphql strings in the file. + || { + graphql_sources + .pending + .par_iter() + .map(|(file_name, pending_graphql_sources)| { + parse_pending_graphql_source_and_collect_removed_definitions( + graphql_sources, + parser_features, + file_name, + pending_graphql_sources, + ) + }) + .collect::>() + }, + // Iterate over processed sources that aren't in the pending source set, + // and parse each graphql string. + || { + graphql_sources + .processed + .par_iter() + .filter( + // Only parse the file if it isn't in the pending sources + |(file_name, _)| !graphql_sources.pending.contains_key(*file_name), + ) + .map(|(file_name, processed_graphql_sources)| { + let mut definitions_for_file = Vec::new(); + let mut local_syntax_errors = vec![]; + for LocatedGraphQLSource { + index, + graphql_source, + } in processed_graphql_sources.iter() + { + let source_location = + SourceLocationKey::embedded(&file_name.to_string_lossy(), *index); + match graphql_syntax::parse_executable_with_features( + &graphql_source.text_source().text, + source_location, + parser_features, + ) { + Ok(document) => { + definitions_for_file.extend(document.definitions); } + Err(errors) => local_syntax_errors.extend(errors), } } - } - } - } + (file_name, definitions_for_file, local_syntax_errors) + }) + .collect::>() + }, + ); + + for PendingSourceResult { + file_name, + definitions_for_file, + local_pending_definition_names, + local_syntax_errors, + local_removed_definition_names, + } in pending_results + { + pending_definition_names.extend(local_pending_definition_names); + syntax_errors.extend(local_syntax_errors); + removed_definition_names.extend(local_removed_definition_names); asts.insert(file_name.clone(), definitions_for_file); } - // Iterate over processed sources that aren't in the pending source set, - // and parse each graphql string. - for (file_name, processed_graphql_sources) in &graphql_sources.processed { - // Only parse the file if it isn't in the pending sources - if graphql_sources.pending.contains_key(file_name) { - continue; - } - - let mut definitions_for_file = Vec::new(); - for LocatedGraphQLSource { - index, - graphql_source, - } in processed_graphql_sources.iter() - { - let source_location = - SourceLocationKey::embedded(&file_name.to_string_lossy(), *index); - match graphql_syntax::parse_executable( - &graphql_source.text_source().text, - source_location, - ) { - Ok(document) => { - definitions_for_file.extend(document.definitions); - } - Err(errors) => syntax_errors.extend(errors), - } - } + for (file_name, definitions_for_file, local_syntax_errors) in processed_results { + syntax_errors.extend(local_syntax_errors); match asts.entry(file_name.clone()) { Entry::Vacant(entry) => { entry.insert(definitions_for_file); @@ -206,3 +205,121 @@ impl GraphQLAsts { } } } + +struct PendingSourceResult<'a> { + file_name: &'a PathBuf, + definitions_for_file: Vec, + local_pending_definition_names: Vec, + local_syntax_errors: Vec, + local_removed_definition_names: Vec, +} + +fn parse_pending_graphql_source_and_collect_removed_definitions<'a>( + graphql_sources: &GraphQLSources, + parser_features: ParserFeatures, + file_name: &'a PathBuf, + pending_graphql_sources: &[LocatedGraphQLSource], +) -> PendingSourceResult<'a> { + let mut definitions_for_file = Vec::new(); + let mut local_pending_definition_names: Vec = vec![]; + let mut local_syntax_errors = vec![]; + let mut local_removed_definition_names = vec![]; + for LocatedGraphQLSource { + index, + graphql_source, + } in pending_graphql_sources.iter() + { + let source_location = SourceLocationKey::embedded(&file_name.to_string_lossy(), *index); + match graphql_syntax::parse_executable_with_features( + &graphql_source.text_source().text, + source_location, + parser_features, + ) { + Ok(document) => { + for def in &document.definitions { + if let Some(name) = def.name() { + match def { + ExecutableDefinition::Operation(_) => local_pending_definition_names + .push(OperationDefinitionName(name).into()), + ExecutableDefinition::Fragment(_) => local_pending_definition_names + .push(FragmentDefinitionName(name).into()), + }; + } else { + local_syntax_errors.push(Diagnostic::error( + "Expected operation to have a name (e.g. 'query ')", + def.location(), + )) + } + } + definitions_for_file.extend(document.definitions); + } + Err(errors) => local_syntax_errors.extend(errors), + } + } + // Parse the processed source to get all processed definition names + // and collect definition names that are removed from that file. + // (A definition moved to another file is considered as a deletion and a new source) + if let Some(processed_graphql_sources) = graphql_sources.processed.get(file_name) { + for LocatedGraphQLSource { + index, + graphql_source, + } in processed_graphql_sources.iter() + { + // TODO: parse name instead of the whole graphql text + let source_location = SourceLocationKey::embedded(&file_name.to_string_lossy(), *index); + if let Ok(document) = graphql_syntax::parse_executable_with_features( + &graphql_source.text_source().text, + source_location, + parser_features, + ) { + for def in document.definitions { + match def { + ExecutableDefinition::Operation(operation) => { + if !(definitions_for_file.iter().any(|def| { + if let ExecutableDefinition::Operation(op) = def { + op.name == operation.name + } else { + false + } + })) { + if let Some(operation_name) = operation.name { + local_removed_definition_names.push( + ArtifactSourceKey::ExecutableDefinition( + ExecutableDefinitionName::OperationDefinitionName( + OperationDefinitionName(operation_name.value), + ), + ), + ); + } + } + } + ExecutableDefinition::Fragment(fragment) => { + if !(definitions_for_file.iter().any(|def| { + if let ExecutableDefinition::Fragment(frag) = def { + frag.name == fragment.name + } else { + false + } + })) { + local_removed_definition_names.push( + ArtifactSourceKey::ExecutableDefinition( + ExecutableDefinitionName::FragmentDefinitionName( + FragmentDefinitionName(fragment.name.value), + ), + ), + ); + } + } + } + } + } + } + } + PendingSourceResult { + file_name, + definitions_for_file, + local_pending_definition_names, + local_syntax_errors, + local_removed_definition_names, + } +} diff --git a/compiler/crates/relay-compiler/src/lib.rs b/compiler/crates/relay-compiler/src/lib.rs index 040e0d3ff0bd7..0764ed576809b 100644 --- a/compiler/crates/relay-compiler/src/lib.rs +++ b/compiler/crates/relay-compiler/src/lib.rs @@ -17,14 +17,15 @@ pub mod compiler_state; pub mod config; mod docblocks; pub mod errors; -mod file_source; +pub mod file_source; mod graphql_asts; mod operation_persister; mod red_to_green; pub mod saved_state; pub mod status_reporter; +mod utils; -pub use build_project::add_to_mercurial; +pub use artifact_map::ArtifactSourceKey; pub use build_project::artifact_writer::ArtifactDifferenceShardedWriter; pub use build_project::artifact_writer::ArtifactDifferenceWriter; pub use build_project::artifact_writer::ArtifactFileWriter; @@ -37,6 +38,8 @@ pub use build_project::build_schema; pub use build_project::find_duplicates; pub use build_project::generate_artifacts; pub use build_project::generate_extra_artifacts::GenerateExtraArtifactsFn; +pub use build_project::get_artifacts_file_hash_map::GetArtifactsFileHashMapFn; +pub use build_project::source_control_for_root; pub use build_project::transform_program; pub use build_project::validate; pub use build_project::validate_program; @@ -55,6 +58,8 @@ pub use config::ProjectConfig; pub use config::RemotePersistConfig; pub use config::SchemaLocation; pub use file_source::source_for_location; +pub use file_source::ExternalFileSourceResult; +pub use file_source::File; pub use file_source::FileCategorizer; pub use file_source::FileGroup; pub use file_source::FileSource; @@ -67,3 +72,5 @@ pub use file_source::SourceReader; pub use graphql_asts::GraphQLAsts; pub use operation_persister::LocalPersister; pub use operation_persister::RemotePersister; +pub use relay_config::ProjectName; +pub use utils::get_parser_features; diff --git a/compiler/crates/relay-compiler/src/operation_persister/mod.rs b/compiler/crates/relay-compiler/src/operation_persister.rs similarity index 100% rename from compiler/crates/relay-compiler/src/operation_persister/mod.rs rename to compiler/crates/relay-compiler/src/operation_persister.rs diff --git a/compiler/crates/relay-compiler/src/status_reporter.rs b/compiler/crates/relay-compiler/src/status_reporter.rs index e22389f861cf1..9d96b01999198 100644 --- a/compiler/crates/relay-compiler/src/status_reporter.rs +++ b/compiler/crates/relay-compiler/src/status_reporter.rs @@ -151,3 +151,23 @@ impl StatusReporter for ConsoleStatusReporter { } } } + +pub struct JSONStatusReporter; + +impl StatusReporter for JSONStatusReporter { + fn build_starts(&self) {} + + fn build_completes(&self, diagnostics: &[Diagnostic]) { + println!( + "{{\"completed\":true,\"diagnostics\":{}}}", + serde_json::to_string(diagnostics).unwrap() + ); + } + + fn build_errors(&self, error: &Error) { + println!( + "{{\"completed\":false,\"error\":{}}}", + serde_json::to_string(error).unwrap() + ); + } +} diff --git a/compiler/crates/relay-compiler/src/utils.rs b/compiler/crates/relay-compiler/src/utils.rs new file mode 100644 index 0000000000000..bd4516938e52b --- /dev/null +++ b/compiler/crates/relay-compiler/src/utils.rs @@ -0,0 +1,23 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use graphql_syntax::FragmentArgumentSyntaxKind; +use graphql_syntax::ParserFeatures; +use relay_config::ProjectConfig; + +pub fn get_parser_features(project_config: &ProjectConfig) -> ParserFeatures { + ParserFeatures { + fragment_argument_capability: if project_config + .feature_flags + .enable_fragment_argument_transform + { + FragmentArgumentSyntaxKind::SpreadArgumentsAndFragmentVariableDefinitions + } else { + FragmentArgumentSyntaxKind::None + }, + } +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts.rs b/compiler/crates/relay-compiler/tests/compile_relay_artifacts.rs new file mode 100644 index 0000000000000..320f2915815d0 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts.rs @@ -0,0 +1,293 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::collections::HashMap; +use std::sync::Arc; + +use common::ConsoleLogger; +use common::FeatureFlag; +use common::FeatureFlags; +use common::NamedItem; +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build_ir_with_extra_features; +use graphql_ir::BuilderOptions; +use graphql_ir::FragmentDefinition; +use graphql_ir::FragmentDefinitionName; +use graphql_ir::FragmentVariablesSemantic; +use graphql_ir::OperationDefinition; +use graphql_ir::OperationDefinitionName; +use graphql_ir::Program; +use graphql_ir::RelayMode; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use graphql_text_printer::print_full_operation; +use intern::string_key::Intern; +use relay_codegen::build_request_params; +use relay_codegen::print_fragment; +use relay_codegen::print_operation; +use relay_codegen::print_request; +use relay_codegen::JsModuleFormat; +use relay_compiler::find_duplicates; +use relay_compiler::validate; +use relay_compiler::ConfigFileProject; +use relay_compiler::ProjectConfig; +use relay_config::NonNodeIdFieldsConfig; +use relay_config::ProjectName; +use relay_config::SchemaConfig; +use relay_test_schema::get_test_schema; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::apply_transforms; +use relay_transforms::DIRECTIVE_SPLIT_OPERATION; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + + if fixture.content.contains("%TODO%") { + if fixture.content.contains("expected-to-throw") { + return Err("TODO".to_string()); + } + return Ok("TODO".to_string()); + } + let no_inline_allowlist = vec![ + "autoFilledArgumentOnMatchPlainUserNameRenderer_name".intern(), + "autoFilledArgumentOnMatchMarkdownUserNameRenderer_name".intern(), + "fragmentWithMatchDirective_PlainUserNameRenderer_name".intern(), + "fragmentWithMatchDirective_MarkdownUserNameRenderer_name".intern(), + "matchFieldOverlapAcrossDocuments_MarkdownUserNameRenderer_name".intern(), + "matchOnChildOfPlural_PlainUserNameRenderer_name".intern(), + "matchOnChildOfPlural_MarkdownUserNameRenderer_name".intern(), + "moduleDeduping_frag".intern(), + "moduleInInlineFragment_MarkdownUserNameRenderer_name".intern(), + "moduleOverlapAcrossDocuments_MarkdownUserNameRenderer_name".intern(), + "moduleOverlapAcrossDocuments_PlainUserNameRenderer_name".intern(), + "moduleOverlapAcrossDocuments_MarkdownUserNameRenderer_name".intern(), + "moduleOverlapWithinDocument_MarkdownUserNameRenderer_name".intern(), + "moduleOverlapWithinDocument_PlainUserNameRenderer_name".intern(), + "moduleOverlapWithinDocument_MarkdownUserNameRenderer_name".intern(), + "moduleWithDefer_MarkdownUserNameRenderer_name".intern(), + "multipleModulesDifferentComponent_MarkdownUserNameRenderer_name".intern(), + "multipleModulesDifferentFragment_MarkdownUserNameRenderer_name".intern(), + "multipleModulesDifferentFragment_OtherMarkdownUserNameRenderer_name".intern(), + "multipleModulesSameSelections_MarkdownUserNameRenderer_name".intern(), + "multipleModulesWithKey_PlainUserNameRenderer_name".intern(), + "multipleModulesWithKey_MarkdownUserNameRenderer_name".intern(), + "multipleModulesWithoutKey_PlainUserNameRenderer_name".intern(), + "multipleModulesWithoutKey_MarkdownUserNameRenderer_name".intern(), + "noInlineFragmentAndModule_parent".intern(), + "queryWithAndWithoutModuleDirective_MarkdownUserNameRenderer_name".intern(), + "queryWithConditionalModule_MarkdownUserNameRenderer_name".intern(), + "queryWithMatchDirective_PlainUserNameRenderer_name".intern(), + "queryWithMatchDirective_MarkdownUserNameRenderer_name".intern(), + "queryWithMatchDirectiveNoInlineExperimental_PlainUserNameRenderer_name".intern(), + "queryWithMatchDirectiveNoInlineExperimental_MarkdownUserNameRenderer_name".intern(), + "queryWithMatchDirectiveWithExtraArgument_PlainUserNameRenderer_name".intern(), + "queryWithMatchDirectiveWithExtraArgument_MarkdownUserNameRenderer_name".intern(), + "queryWithMatchDirectiveWithTypename_PlainUserNameRenderer_name".intern(), + "queryWithMatchDirectiveWithTypename_MarkdownUserNameRenderer_name".intern(), + "queryWithModuleDirective_MarkdownUserNameRenderer_name".intern(), + "queryWithModuleDirectiveAndArguments_MarkdownUserNameRenderer_name".intern(), + "queryWithModuleDirectiveAndArguments_PlainUserNameRenderer_name".intern(), + "conflictingSelectionsWithNoInline_fragment".intern(), + "providedVariableNoInlineFragment".intern(), + "noInlineFragment_parent".intern(), + "noInlineAbstractFragment_parent".intern(), + "queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment".intern(), + "queryWithRelayClientComponent_ClientComponentFragment".intern(), + ]; + + let feature_flags = FeatureFlags { + // test SplitOperations that do not use @no-inline D28460294 + no_inline: FeatureFlag::Limited { + allowlist: no_inline_allowlist.into_iter().collect(), + }, + enable_relay_resolver_transform: true, + enable_catch_directive_transform: FeatureFlag::Disabled, + enable_3d_branch_arg_generation: true, + actor_change_support: FeatureFlag::Enabled, + text_artifacts: FeatureFlag::Disabled, + skip_printing_nulls: FeatureFlag::Disabled, + enable_fragment_aliases: FeatureFlag::Enabled, + compact_query_text: FeatureFlag::Disabled, + emit_normalization_nodes_for_client_edges: true, + relay_resolver_enable_interface_output_type: if fixture + .content + .contains("# relay-resolver-enable-interface-output-type") + { + FeatureFlag::Enabled + } else { + FeatureFlag::Disabled + }, + enable_resolver_normalization_ast: fixture + .content + .contains("# enable_resolver_normalization_ast"), + prefer_fetchable_in_refetch_queries: fixture + .content + .contains("# prefer_fetchable_in_refetch_queries"), + ..Default::default() + }; + + let default_project_config = ProjectConfig { + name: ProjectName::default(), + feature_flags: Arc::new(feature_flags), + js_module_format: JsModuleFormat::Haste, + schema_config: SchemaConfig { + non_node_id_fields: Some(NonNodeIdFieldsConfig { + allowed_id_types: { + let mut mappings = HashMap::new(); + + mappings.insert("NonNode".intern(), "String".intern()); + + mappings + }, + }), + ..Default::default() + }, + ..Default::default() + }; + + // Adding %project_config section on top of the fixture will allow + // us to validate output changes with different configurations + let parts: Vec<_> = fixture.content.split("%project_config%").collect(); + let (project_config, other_parts) = match parts.as_slice() { + [fixture_content, project_config_str] => ( + { + let config_file_project: ConfigFileProject = + serde_json::from_str(project_config_str).unwrap(); + ProjectConfig { + schema_config: config_file_project.schema_config, + typegen_config: config_file_project.typegen_config, + module_import_config: config_file_project.module_import_config, + feature_flags: config_file_project + .feature_flags + .map_or(default_project_config.feature_flags, |flags| { + Arc::new(flags) + }), + js_module_format: config_file_project.js_module_format, + ..default_project_config + } + }, + fixture_content.split("%extensions%").collect::>(), + ), + [fixture_content] => ( + default_project_config, + fixture_content.split("%extensions%").collect::>(), + ), + _ => panic!("Invalid fixture input {}", fixture.content), + }; + + let (base, schema) = match other_parts.as_slice() { + [base, extensions] => (base, get_test_schema_with_extensions(extensions)), + [base] => (base, get_test_schema()), + _ => panic!("Invalid fixture input {}", fixture.content), + }; + + let ast = parse_executable(base, source_location) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + find_duplicates(&ast.definitions, &[]) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let ir_result = build_ir_with_extra_features( + &schema, + &ast.definitions, + &BuilderOptions { + allow_undefined_fragment_spreads: false, + fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, + relay_mode: Some(RelayMode), + default_anonymous_operation_name: None, + allow_custom_scalar_literals: true, // for compatibility + }, + ); + let ir = ir_result + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let program = Program::from_definitions(Arc::clone(&schema), ir); + + validate(&program, &project_config, &None) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + // TODO pass base fragment names + let programs = apply_transforms( + &project_config, + Arc::new(program), + Default::default(), + Arc::new(ConsoleLogger), + None, + None, + ) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let mut operations: Vec<&std::sync::Arc> = + programs.normalization.operations().collect(); + operations.sort_by_key(|operation| operation.name.item.0); + let result = operations + .into_iter() + .map(|operation| { + if operation + .directives + .named(*DIRECTIVE_SPLIT_OPERATION) + .is_some() + { + let mut import_statements = Default::default(); + let operation = + print_operation(&schema, operation, &project_config, &mut import_statements); + format!("{}{}", import_statements, operation) + } else { + let name = operation.name.item.0; + let print_operation_node = programs + .operation_text + .operation(OperationDefinitionName(name)); + let text = print_operation_node.map_or_else( + || "Query Text is Empty.".to_string(), + |print_operation_node| { + print_full_operation( + &programs.operation_text, + print_operation_node, + Default::default(), + ) + }, + ); + + let reader_operation = programs + .reader + .operation(OperationDefinitionName(name)) + .expect("a reader fragment should be generated for this operation"); + let operation_fragment = FragmentDefinition { + name: reader_operation.name.map(|x| FragmentDefinitionName(x.0)), + variable_definitions: reader_operation.variable_definitions.clone(), + selections: reader_operation.selections.clone(), + used_global_variables: Default::default(), + directives: reader_operation.directives.clone(), + type_condition: reader_operation.type_, + }; + let mut import_statements = Default::default(); + let request_parameters = build_request_params(operation); + let request = print_request( + &schema, + operation, + &operation_fragment, + request_parameters, + &project_config, + &mut import_statements, + ); + format!("{}{}\n\nQUERY:\n\n{}", import_statements, request, text) + } + }) + .chain({ + let mut fragments: Vec<&std::sync::Arc> = + programs.reader.fragments().collect(); + fragments.sort_by_key(|fragment| fragment.name.item); + fragments.into_iter().map(|fragment| { + let mut import_statements = Default::default(); + let fragment = + print_fragment(&schema, fragment, &project_config, &mut import_statements); + format!("{}{}", import_statements, fragment) + }) + }) + .collect::>(); + Ok(result.join("\n\n")) +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-invalid.expected new file mode 100644 index 0000000000000..4f9284ceb23ea --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-invalid.expected @@ -0,0 +1,22 @@ +==================================== INPUT ==================================== +# expected-to-throw + +mutation appendNodeLiteralEdgeTypeNameInvalidCommentCreateMutation( + $connections: [ID!]! + $input: CommentCreateInput +) { + commentCreate(input: $input) { + comment + @appendNode(connections: $connections, edgeTypeName: "CommentEdge") { + id + } + } +} +==================================== ERROR ==================================== +✖︎ Expected the 'edgeTypeName' argument value on @appendNode to be the name of an object type. 'CommentEdge' does not refer to a known object type. Did you mean `CommentsEdge`, `Comment`, or `SegmentsEdge`? + + append-node-literal-edge-type-name-invalid.graphql:9:60 + 8 │ comment + 9 │ @appendNode(connections: $connections, edgeTypeName: "CommentEdge") { + │ ^^^^^^^^^^^^^ + 10 │ id diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-invalid.graphql new file mode 100644 index 0000000000000..f85913bbfad10 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-invalid.graphql @@ -0,0 +1,13 @@ +# expected-to-throw + +mutation appendNodeLiteralEdgeTypeNameInvalidCommentCreateMutation( + $connections: [ID!]! + $input: CommentCreateInput +) { + commentCreate(input: $input) { + comment + @appendNode(connections: $connections, edgeTypeName: "CommentEdge") { + id + } + } +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-not-object-type.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-not-object-type.expected new file mode 100644 index 0000000000000..8b2f144f53135 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-not-object-type.expected @@ -0,0 +1,22 @@ +==================================== INPUT ==================================== +# expected-to-throw + +mutation appendNodeLiteralEdgeTypeNameNotObjectTypeMutation( + $connections: [ID!]! + $input: CommentCreateInput +) { + commentCreate(input: $input) { + comment + @appendNode(connections: $connections, edgeTypeName: "Node") { + id + } + } +} +==================================== ERROR ==================================== +✖︎ Expected the 'edgeTypeName' argument value on @appendNode to be the name of an object type. 'Node' does not refer to a known object type. + + append-node-literal-edge-type-name-not-object-type.graphql:9:60 + 8 │ comment + 9 │ @appendNode(connections: $connections, edgeTypeName: "Node") { + │ ^^^^^^ + 10 │ id diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-not-object-type.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-not-object-type.graphql new file mode 100644 index 0000000000000..d7133946040be --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-not-object-type.graphql @@ -0,0 +1,13 @@ +# expected-to-throw + +mutation appendNodeLiteralEdgeTypeNameNotObjectTypeMutation( + $connections: [ID!]! + $input: CommentCreateInput +) { + commentCreate(input: $input) { + comment + @appendNode(connections: $connections, edgeTypeName: "Node") { + id + } + } +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-variable.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-variable.expected new file mode 100644 index 0000000000000..5a4446f1790c8 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-variable.expected @@ -0,0 +1,177 @@ +==================================== INPUT ==================================== +mutation appendNodeLiteralEdgeTypeNameVariableMutation( + $connections: [ID!]! + $input: CommentCreateInput + $edgeTypeName: String! +) { + commentCreate(input: $input) { + comment + @appendNode(connections: $connections, edgeTypeName: $edgeTypeName) { + id + } + } +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "connections" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "edgeTypeName" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "input" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "appendNodeLiteralEdgeTypeNameVariableMutation", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "input", + "variableName": "input" + } + ], + "concreteType": "CommentCreateResponsePayload", + "kind": "LinkedField", + "name": "commentCreate", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "Comment", + "kind": "LinkedField", + "name": "comment", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "Mutation", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "connections" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "input" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "edgeTypeName" + } + ], + "kind": "Operation", + "name": "appendNodeLiteralEdgeTypeNameVariableMutation", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "input", + "variableName": "input" + } + ], + "concreteType": "CommentCreateResponsePayload", + "kind": "LinkedField", + "name": "commentCreate", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "Comment", + "kind": "LinkedField", + "name": "comment", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + }, + { + "alias": null, + "args": null, + "filters": null, + "handle": "appendNode", + "key": "", + "kind": "LinkedHandle", + "name": "comment", + "handleArgs": [ + { + "kind": "Variable", + "name": "connections", + "variableName": "connections" + }, + { + "kind": "Variable", + "name": "edgeTypeName", + "variableName": "edgeTypeName" + } + ] + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "c6c4a56d7ea1db74c1935491b7a2e10a", + "id": null, + "metadata": {}, + "name": "appendNodeLiteralEdgeTypeNameVariableMutation", + "operationKind": "mutation", + "text": null + } +} + +QUERY: + +mutation appendNodeLiteralEdgeTypeNameVariableMutation( + $input: CommentCreateInput +) { + commentCreate(input: $input) { + comment { + id + } + } +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-variable.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-variable.graphql new file mode 100644 index 0000000000000..6c06d48b4c2c7 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-variable.graphql @@ -0,0 +1,12 @@ +mutation appendNodeLiteralEdgeTypeNameVariableMutation( + $connections: [ID!]! + $input: CommentCreateInput + $edgeTypeName: String! +) { + commentCreate(input: $input) { + comment + @appendNode(connections: $connections, edgeTypeName: $edgeTypeName) { + id + } + } +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name.expected index 03c7c146be85a..fc3abffdbb9c9 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name.expected @@ -5,7 +5,7 @@ mutation appendNodeLiteralEdgeTypeNameCommentCreateMutation( ) { commentCreate(input: $input) { comment - @appendNode(connections: $connections, edgeTypeName: "CommentEdge") { + @appendNode(connections: $connections, edgeTypeName: "CommentsEdge") { id } } @@ -134,7 +134,7 @@ mutation appendNodeLiteralEdgeTypeNameCommentCreateMutation( { "kind": "Literal", "name": "edgeTypeName", - "value": "CommentEdge" + "value": "CommentsEdge" } ] } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name.graphql index b18e012e4031b..843eb0f80625b 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name.graphql +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/append-node-literal-edge-type-name.graphql @@ -4,7 +4,7 @@ mutation appendNodeLiteralEdgeTypeNameCommentCreateMutation( ) { commentCreate(input: $input) { comment - @appendNode(connections: $connections, edgeTypeName: "CommentEdge") { + @appendNode(connections: $connections, edgeTypeName: "CommentsEdge") { id } } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/circular-fragment.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/circular-fragment.expected new file mode 100644 index 0000000000000..6a2292acf4bf5 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/circular-fragment.expected @@ -0,0 +1,32 @@ +==================================== INPUT ==================================== +# expected-to-throw + +query circularFragmentQuery { + story { + feedback { + ...circularFragment_feedback + } + } +} + +fragment circularFragment_feedback on Feedback { + feedback { + ...circularFragment_feedback + } +} +==================================== ERROR ==================================== +✖︎ Found a circular reference from fragment 'circularFragment_feedback'. + + circular-fragment.graphql:13:8 + 12 │ feedback { + 13 │ ...circularFragment_feedback + │ ^^^^^^^^^^^^^^^^^^^^^^^^^ + 14 │ } + + ℹ︎ other member of the cycle + + circular-fragment.graphql:6:10 + 5 │ feedback { + 6 │ ...circularFragment_feedback + │ ^^^^^^^^^^^^^^^^^^^^^^^^^ + 7 │ } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/circular-fragment.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/circular-fragment.graphql new file mode 100644 index 0000000000000..311ecde9bf4ff --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/circular-fragment.graphql @@ -0,0 +1,15 @@ +# expected-to-throw + +query circularFragmentQuery { + story { + feedback { + ...circularFragment_feedback + } + } +} + +fragment circularFragment_feedback on Feedback { + feedback { + ...circularFragment_feedback + } +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-fields-with-undefined-global-variables.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-fields-with-undefined-global-variables.invalid.expected new file mode 100644 index 0000000000000..e117d0595dc52 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-fields-with-undefined-global-variables.invalid.expected @@ -0,0 +1,26 @@ +==================================== INPUT ==================================== +# expected-to-throw + +query clientFieldsWithUndefinedGlobalVariablesQuery($id: ID!) { + node(id: $id) { + ...clientFieldsWithUndefinedGlobalVariables_user + } +} + +fragment clientFieldsWithUndefinedGlobalVariables_user on User { + pop_star_name(scale: $scale) +} + +# %extensions% + +extend type User { + pop_star_name(scale: Float!): String +} +==================================== ERROR ==================================== +✖︎ Operation 'clientFieldsWithUndefinedGlobalVariablesQuery' references undefined variable: '$scale'. + + client-fields-with-undefined-global-variables.invalid.graphql:10:24 + 9 │ fragment clientFieldsWithUndefinedGlobalVariables_user on User { + 10 │ pop_star_name(scale: $scale) + │ ^^^^^^ + 11 │ } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-fields-with-undefined-global-variables.invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-fields-with-undefined-global-variables.invalid.graphql new file mode 100644 index 0000000000000..685277b35f78e --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-fields-with-undefined-global-variables.invalid.graphql @@ -0,0 +1,17 @@ +# expected-to-throw + +query clientFieldsWithUndefinedGlobalVariablesQuery($id: ID!) { + node(id: $id) { + ...clientFieldsWithUndefinedGlobalVariables_user + } +} + +fragment clientFieldsWithUndefinedGlobalVariables_user on User { + pop_star_name(scale: $scale) +} + +# %extensions% + +extend type User { + pop_star_name(scale: Float!): String +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-inline-fragments-duplicate.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-inline-fragments-duplicate.expected new file mode 100644 index 0000000000000..67e73b5fb9e60 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-inline-fragments-duplicate.expected @@ -0,0 +1,141 @@ +==================================== INPUT ==================================== +query clientInlineFragmentsDuplicateQuery { + cat { + description + description + ... on Tabby { + greeting + } + ... on Tabby { + greeting + } + } +} + +# %extensions% +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String + greeting: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "clientInlineFragmentsDuplicateQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "cat", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "description", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "greeting", + "storageKey": null + } + ], + "type": "Tabby", + "abstractKey": null + } + ], + "storageKey": null + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "clientInlineFragmentsDuplicateQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "cat", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "description", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "greeting", + "storageKey": null + } + ], + "type": "Tabby", + "abstractKey": null + } + ], + "storageKey": null + } + ] + } + ] + }, + "params": { + "cacheID": "a22161d66129f4ea54b4be1a1b16caa8", + "id": null, + "metadata": {}, + "name": "clientInlineFragmentsDuplicateQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-inline-fragments-duplicate.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-inline-fragments-duplicate.graphql new file mode 100644 index 0000000000000..e784bb6c83f20 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-inline-fragments-duplicate.graphql @@ -0,0 +1,26 @@ +query clientInlineFragmentsDuplicateQuery { + cat { + description + description + ... on Tabby { + greeting + } + ... on Tabby { + greeting + } + } +} + +# %extensions% +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String + greeting: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-implemented-wrong-type.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-implemented-wrong-type.invalid.expected new file mode 100644 index 0000000000000..6f5dfb5988882 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-implemented-wrong-type.invalid.expected @@ -0,0 +1,107 @@ +==================================== INPUT ==================================== +# TODO T174533887 expected to throw + +query clientInterfacesImplementedWrongTypeInvalidQuery { + cat { + description + } +} + +# %extensions% +interface Cat { + description: String +} + +type Tabby implements Cat { + description: Text +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "clientInterfacesImplementedWrongTypeInvalidQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "cat", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "description", + "storageKey": null + } + ], + "storageKey": null + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "clientInterfacesImplementedWrongTypeInvalidQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "cat", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "description", + "storageKey": null + } + ], + "storageKey": null + } + ] + } + ] + }, + "params": { + "cacheID": "99252e455d9eb567db049b05603a8d92", + "id": null, + "metadata": {}, + "name": "clientInterfacesImplementedWrongTypeInvalidQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-implemented-wrong-type.invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-implemented-wrong-type.invalid.graphql new file mode 100644 index 0000000000000..322846b4d6ab2 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-implemented-wrong-type.invalid.graphql @@ -0,0 +1,20 @@ +# TODO T174533887 expected to throw + +query clientInterfacesImplementedWrongTypeInvalidQuery { + cat { + description + } +} + +# %extensions% +interface Cat { + description: String +} + +type Tabby implements Cat { + description: Text +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-no-inline.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-no-inline.expected new file mode 100644 index 0000000000000..683d9156ddff5 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-no-inline.expected @@ -0,0 +1,111 @@ +==================================== INPUT ==================================== +query clientInterfacesNoInlineQuery { + cat { + description + } +} + +# %extensions% +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String + greeting: String +} + +type Persian implements Cat { + description: String + adopted: Boolean +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "clientInterfacesNoInlineQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "cat", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "description", + "storageKey": null + } + ], + "storageKey": null + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "clientInterfacesNoInlineQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "cat", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "description", + "storageKey": null + } + ], + "storageKey": null + } + ] + } + ] + }, + "params": { + "cacheID": "230ba4fd5e4507dad3c08d149cd68d69", + "id": null, + "metadata": {}, + "name": "clientInterfacesNoInlineQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-no-inline.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-no-inline.graphql new file mode 100644 index 0000000000000..d9da93576ded7 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-no-inline.graphql @@ -0,0 +1,24 @@ +query clientInterfacesNoInlineQuery { + cat { + description + } +} + +# %extensions% +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String + greeting: String +} + +type Persian implements Cat { + description: String + adopted: Boolean +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-no-inline.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-no-inline.invalid.expected new file mode 100644 index 0000000000000..e68521a828558 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-no-inline.invalid.expected @@ -0,0 +1,36 @@ +==================================== INPUT ==================================== +# expected-to-throw + +query clientInterfacesNoInlineInvalidQuery { + cat { + greeting + } +} + +# %extensions% +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String + greeting: String +} + +type Persian implements Cat { + description: String + adopted: Boolean +} + +extend type Query { + cat: Cat +} +==================================== ERROR ==================================== +✖︎ The type `Cat` has no field `greeting`. +See https://relay.dev/docs/error-reference/unknown-field/ + + client-interfaces-no-inline.invalid.graphql:5:5 + 4 │ cat { + 5 │ greeting + │ ^^^^^^^^ + 6 │ } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-no-inline.invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-no-inline.invalid.graphql new file mode 100644 index 0000000000000..9fdc4179e1b21 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces-no-inline.invalid.graphql @@ -0,0 +1,26 @@ +# expected-to-throw + +query clientInterfacesNoInlineInvalidQuery { + cat { + greeting + } +} + +# %extensions% +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String + greeting: String +} + +type Persian implements Cat { + description: String + adopted: Boolean +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces.expected new file mode 100644 index 0000000000000..bff5a77b4dfd0 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces.expected @@ -0,0 +1,173 @@ +==================================== INPUT ==================================== +query clientInterfacesQuery { + cat { + description + ... on Tabby { + greeting + } + ... on Persian { + adopted + } + } +} + +# %extensions% +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String + greeting: String +} + +type Persian implements Cat { + description: String + adopted: Boolean +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "clientInterfacesQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "cat", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "description", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "greeting", + "storageKey": null + } + ], + "type": "Tabby", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "adopted", + "storageKey": null + } + ], + "type": "Persian", + "abstractKey": null + } + ], + "storageKey": null + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "clientInterfacesQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "cat", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "description", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "greeting", + "storageKey": null + } + ], + "type": "Tabby", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "adopted", + "storageKey": null + } + ], + "type": "Persian", + "abstractKey": null + } + ], + "storageKey": null + } + ] + } + ] + }, + "params": { + "cacheID": "c4729eeba1c7c1cb95704f980c666f70", + "id": null, + "metadata": {}, + "name": "clientInterfacesQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces.graphql new file mode 100644 index 0000000000000..fc8aedf84dcfb --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces.graphql @@ -0,0 +1,30 @@ +query clientInterfacesQuery { + cat { + description + ... on Tabby { + greeting + } + ... on Persian { + adopted + } + } +} + +# %extensions% +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String + greeting: String +} + +type Persian implements Cat { + description: String + adopted: Boolean +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces.invalid.expected new file mode 100644 index 0000000000000..112b9fce48ba7 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces.invalid.expected @@ -0,0 +1,107 @@ +==================================== INPUT ==================================== +# TODO T174533887 expected to throw + +query clientInterfacesInvalidQuery { + cat { + greeting + } +} + +# %extensions% +interface Cat { + greeting: String +} + +type Tabby implements Cat { + description: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "clientInterfacesInvalidQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "cat", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "greeting", + "storageKey": null + } + ], + "storageKey": null + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "clientInterfacesInvalidQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "cat", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "greeting", + "storageKey": null + } + ], + "storageKey": null + } + ] + } + ] + }, + "params": { + "cacheID": "bff40d074712569f213acce4cc45cb3b", + "id": null, + "metadata": {}, + "name": "clientInterfacesInvalidQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces.invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces.invalid.graphql new file mode 100644 index 0000000000000..e2d5b4c199f6b --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client-interfaces.invalid.graphql @@ -0,0 +1,20 @@ +# TODO T174533887 expected to throw + +query clientInterfacesInvalidQuery { + cat { + greeting + } +} + +# %extensions% +interface Cat { + greeting: String +} + +type Tabby implements Cat { + description: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type.expected index 5932a1108880f..86b36ce128732 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type.expected @@ -56,6 +56,7 @@ extend type ClientViewer { { "kind": "ClientEdgeToClientObject", "concreteType": "ClientType", + "modelResolvers": null, "backingField": { "alias": null, "args": null, diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse.expected new file mode 100644 index 0000000000000..9ddd4936a9368 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse.expected @@ -0,0 +1,277 @@ +==================================== INPUT ==================================== +fragment clientEdgeFromClientTypeToClientTypeTerse_fragment on ClientViewer { + some_field +} + +query clientEdgeFromClientTypeToClientTypeTerse_Query { + client_viewer { + client_edge { + name + } + } +} + +# %extensions% + +type ClientViewer { + some_field: String +} + +type ClientType @__RelayResolverModel { + id: ID! + name: String + __relay_model_instance: RelayResolverValue + @relay_resolver( + fragment_name: "ClientType__id" + import_path: "./path/to/UserModelResolver.js" + inject_fragment_data: "id" + ) +} + +extend type Query { + client_viewer: ClientViewer +} + +extend type ClientViewer { + client_edge: ClientType + @relay_resolver( + fragment_name: "clientEdgeFromClientTypeToClientTypeTerse_fragment" + import_path: "./path/to/Resolver.js", + ) +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "clientEdgeFromClientTypeToClientTypeTerse_Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "ClientViewer", + "kind": "LinkedField", + "name": "client_viewer", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "ClientType", + "modelResolvers": { + "ClientType": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "ClientType__id" + }, + "kind": "RelayResolver", + "name": "client_edge", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('ClientType__id.graphql'), require('').ClientType, 'id', true), + "path": "client_viewer.client_edge.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "clientEdgeFromClientTypeToClientTypeTerse_fragment" + }, + "kind": "RelayResolver", + "name": "client_edge", + "resolverModule": require('Resolver'), + "path": "client_viewer.client_edge" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "ClientType", + "kind": "LinkedField", + "name": "client_edge", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "storageKey": null + } + } + ], + "storageKey": null + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "clientEdgeFromClientTypeToClientTypeTerse_Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "ClientViewer", + "kind": "LinkedField", + "name": "client_viewer", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "client_edge", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "some_field", + "storageKey": null + } + ], + "type": "ClientViewer", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "ClientType", + "kind": "LinkedField", + "name": "client_edge", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + } + ], + "storageKey": null + } + ] + } + ] + }, + "params": { + "cacheID": "e6057d52710bc19ef7c8002823bcd8da", + "id": null, + "metadata": {}, + "name": "clientEdgeFromClientTypeToClientTypeTerse_Query", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "ClientType____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "ClientType__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('ClientType__id.graphql'), require('UserModelResolver'), 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "ClientType", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "ClientType__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "ClientType", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "clientEdgeFromClientTypeToClientTypeTerse_fragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "some_field", + "storageKey": null + } + ] + } + ], + "type": "ClientViewer", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse.graphql new file mode 100644 index 0000000000000..8b9e2194f2690 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse.graphql @@ -0,0 +1,40 @@ +fragment clientEdgeFromClientTypeToClientTypeTerse_fragment on ClientViewer { + some_field +} + +query clientEdgeFromClientTypeToClientTypeTerse_Query { + client_viewer { + client_edge { + name + } + } +} + +# %extensions% + +type ClientViewer { + some_field: String +} + +type ClientType @__RelayResolverModel { + id: ID! + name: String + __relay_model_instance: RelayResolverValue + @relay_resolver( + fragment_name: "ClientType__id" + import_path: "./path/to/UserModelResolver.js" + inject_fragment_data: "id" + ) +} + +extend type Query { + client_viewer: ClientViewer +} + +extend type ClientViewer { + client_edge: ClientType + @relay_resolver( + fragment_name: "clientEdgeFromClientTypeToClientTypeTerse_fragment" + import_path: "./path/to/Resolver.js", + ) +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_live.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_live.expected new file mode 100644 index 0000000000000..68ef6ab876478 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_live.expected @@ -0,0 +1,230 @@ +==================================== INPUT ==================================== +query clientEdgeFromClientTypeToClientTypeTerseLive_Query { + client_viewer { + client_edge { + name + } + } +} + +# %extensions% + +type ClientViewer { + some_field: String +} + +type ClientType @__RelayResolverModel { + id: ID! + name: String + __relay_model_instance: RelayResolverValue + @relay_resolver( + fragment_name: "ClientType__id" + import_path: "./path/to/UserModelResolver.js" + inject_fragment_data: "id", + live: true + ) +} + +extend type Query { + client_viewer: ClientViewer +} + +extend type ClientViewer { + client_edge: ClientType @relay_resolver(import_path: "./path/to/Resolver.js") +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "clientEdgeFromClientTypeToClientTypeTerseLive_Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "ClientViewer", + "kind": "LinkedField", + "name": "client_viewer", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "ClientType", + "modelResolvers": { + "ClientType": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "ClientType__id" + }, + "kind": "RelayLiveResolver", + "name": "client_edge", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('ClientType__id.graphql'), require('').ClientType, 'id', true), + "path": "client_viewer.client_edge.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "client_edge", + "resolverModule": require('Resolver'), + "path": "client_viewer.client_edge" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "ClientType", + "kind": "LinkedField", + "name": "client_edge", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "storageKey": null + } + } + ], + "storageKey": null + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "clientEdgeFromClientTypeToClientTypeTerseLive_Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "ClientViewer", + "kind": "LinkedField", + "name": "client_viewer", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "client_edge", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "ClientType", + "kind": "LinkedField", + "name": "client_edge", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + } + ], + "storageKey": null + } + ] + } + ] + }, + "params": { + "cacheID": "de3c2295804293b99b45794d4bf6b3b4", + "id": null, + "metadata": {}, + "name": "clientEdgeFromClientTypeToClientTypeTerseLive_Query", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "ClientType____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "ClientType__id" + }, + "kind": "RelayLiveResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('ClientType__id.graphql'), require('UserModelResolver'), 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "ClientType", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "ClientType__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "ClientType", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_live.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_live.graphql new file mode 100644 index 0000000000000..8aa0fe1974649 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_live.graphql @@ -0,0 +1,33 @@ +query clientEdgeFromClientTypeToClientTypeTerseLive_Query { + client_viewer { + client_edge { + name + } + } +} + +# %extensions% + +type ClientViewer { + some_field: String +} + +type ClientType @__RelayResolverModel { + id: ID! + name: String + __relay_model_instance: RelayResolverValue + @relay_resolver( + fragment_name: "ClientType__id" + import_path: "./path/to/UserModelResolver.js" + inject_fragment_data: "id", + live: true + ) +} + +extend type Query { + client_viewer: ClientViewer +} + +extend type ClientViewer { + client_edge: ClientType @relay_resolver(import_path: "./path/to/Resolver.js") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_plural.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_plural.expected new file mode 100644 index 0000000000000..92fb7aa085419 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_plural.expected @@ -0,0 +1,277 @@ +==================================== INPUT ==================================== +fragment clientEdgeFromClientTypeToClientTypeTersePlural_fragment on ClientViewer { + some_field +} + +query clientEdgeFromClientTypeToClientTypeTersePlural_Query { + client_viewer { + client_edges { + name + } + } +} + +# %extensions% + +type ClientViewer { + some_field: String +} + +type ClientType @__RelayResolverModel { + id: ID! + name: String + __relay_model_instance: RelayResolverValue + @relay_resolver( + fragment_name: "ClientType__id" + import_path: "./path/to/UserModelResolver.js" + inject_fragment_data: "id" + ) +} + +extend type Query { + client_viewer: ClientViewer +} + +extend type ClientViewer { + client_edges: [ClientType] + @relay_resolver( + fragment_name: "clientEdgeFromClientTypeToClientTypeTersePlural_fragment" + import_path: "./path/to/Resolver.js", + ) +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "clientEdgeFromClientTypeToClientTypeTersePlural_Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "ClientViewer", + "kind": "LinkedField", + "name": "client_viewer", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "ClientType", + "modelResolvers": { + "ClientType": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "ClientType__id" + }, + "kind": "RelayResolver", + "name": "client_edges", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('ClientType__id.graphql'), require('').ClientType, 'id', true), + "path": "client_viewer.client_edges.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "clientEdgeFromClientTypeToClientTypeTersePlural_fragment" + }, + "kind": "RelayResolver", + "name": "client_edges", + "resolverModule": require('Resolver'), + "path": "client_viewer.client_edges" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "ClientType", + "kind": "LinkedField", + "name": "client_edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "storageKey": null + } + } + ], + "storageKey": null + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "clientEdgeFromClientTypeToClientTypeTersePlural_Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "ClientViewer", + "kind": "LinkedField", + "name": "client_viewer", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "client_edges", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "some_field", + "storageKey": null + } + ], + "type": "ClientViewer", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "ClientType", + "kind": "LinkedField", + "name": "client_edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + } + ], + "storageKey": null + } + ] + } + ] + }, + "params": { + "cacheID": "b9ff4b1737e14c4178c4744a4cda9347", + "id": null, + "metadata": {}, + "name": "clientEdgeFromClientTypeToClientTypeTersePlural_Query", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "ClientType____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "ClientType__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('ClientType__id.graphql'), require('UserModelResolver'), 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "ClientType", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "ClientType__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "ClientType", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "clientEdgeFromClientTypeToClientTypeTersePlural_fragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "some_field", + "storageKey": null + } + ] + } + ], + "type": "ClientViewer", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_plural.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_plural.graphql new file mode 100644 index 0000000000000..161a32806bc4d --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_plural.graphql @@ -0,0 +1,40 @@ +fragment clientEdgeFromClientTypeToClientTypeTersePlural_fragment on ClientViewer { + some_field +} + +query clientEdgeFromClientTypeToClientTypeTersePlural_Query { + client_viewer { + client_edges { + name + } + } +} + +# %extensions% + +type ClientViewer { + some_field: String +} + +type ClientType @__RelayResolverModel { + id: ID! + name: String + __relay_model_instance: RelayResolverValue + @relay_resolver( + fragment_name: "ClientType__id" + import_path: "./path/to/UserModelResolver.js" + inject_fragment_data: "id" + ) +} + +extend type Query { + client_viewer: ClientViewer +} + +extend type ClientViewer { + client_edges: [ClientType] + @relay_resolver( + fragment_name: "clientEdgeFromClientTypeToClientTypeTersePlural_fragment" + import_path: "./path/to/Resolver.js", + ) +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_scalar.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_scalar.expected new file mode 100644 index 0000000000000..c98c039017972 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_scalar.expected @@ -0,0 +1,229 @@ +==================================== INPUT ==================================== +query clientEdgeFromClientTypeToClientTypeTerseScalar_Query { + client_viewer { + client_edge { + name + } + } +} + +# %extensions% + +type ClientViewer { + some_field: String +} + +type ClientType @__RelayResolverModel { + id: ID! + name: String + __relay_model_instance: RelayResolverValue + @relay_resolver( + fragment_name: "ClientType__id" + import_path: "./path/to/UserModelResolver.js" + inject_fragment_data: "id" + ) +} + +extend type Query { + client_viewer: ClientViewer +} + +extend type ClientViewer { + client_edge: ClientType @relay_resolver(import_path: "./path/to/Resolver.js") +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "clientEdgeFromClientTypeToClientTypeTerseScalar_Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "ClientViewer", + "kind": "LinkedField", + "name": "client_viewer", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "ClientType", + "modelResolvers": { + "ClientType": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "ClientType__id" + }, + "kind": "RelayResolver", + "name": "client_edge", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('ClientType__id.graphql'), require('').ClientType, 'id', true), + "path": "client_viewer.client_edge.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "client_edge", + "resolverModule": require('Resolver'), + "path": "client_viewer.client_edge" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "ClientType", + "kind": "LinkedField", + "name": "client_edge", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "storageKey": null + } + } + ], + "storageKey": null + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "clientEdgeFromClientTypeToClientTypeTerseScalar_Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "ClientViewer", + "kind": "LinkedField", + "name": "client_viewer", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "client_edge", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "ClientType", + "kind": "LinkedField", + "name": "client_edge", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + } + ], + "storageKey": null + } + ] + } + ] + }, + "params": { + "cacheID": "deffb175fdad447c0ede3a99a95c61c4", + "id": null, + "metadata": {}, + "name": "clientEdgeFromClientTypeToClientTypeTerseScalar_Query", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "ClientType____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "ClientType__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('ClientType__id.graphql'), require('UserModelResolver'), 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "ClientType", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "ClientType__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "ClientType", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_scalar.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_scalar.graphql new file mode 100644 index 0000000000000..6432f2feeecd2 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_scalar.graphql @@ -0,0 +1,32 @@ +query clientEdgeFromClientTypeToClientTypeTerseScalar_Query { + client_viewer { + client_edge { + name + } + } +} + +# %extensions% + +type ClientViewer { + some_field: String +} + +type ClientType @__RelayResolverModel { + id: ID! + name: String + __relay_model_instance: RelayResolverValue + @relay_resolver( + fragment_name: "ClientType__id" + import_path: "./path/to/UserModelResolver.js" + inject_fragment_data: "id" + ) +} + +extend type Query { + client_viewer: ClientViewer +} + +extend type ClientViewer { + client_edge: ClientType @relay_resolver(import_path: "./path/to/Resolver.js") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type.expected index 4a3d51544c618..8796ca30f3086 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type.expected @@ -37,6 +37,7 @@ extend type Query { { "kind": "ClientEdgeToClientObject", "concreteType": "ClientType", + "modelResolvers": null, "backingField": { "alias": null, "args": null, diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type_fragment_reads_client_field.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type_fragment_reads_client_field.expected index 3c8ee0cd43bb5..61051f7ac0560 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type_fragment_reads_client_field.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type_fragment_reads_client_field.expected @@ -41,6 +41,7 @@ extend type Query { { "kind": "ClientEdgeToClientObject", "concreteType": "ClientType", + "modelResolvers": null, "backingField": { "alias": null, "args": null, diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/connection-name-matches-fragment.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/connection-name-matches-fragment.invalid.expected new file mode 100644 index 0000000000000..951b799258c10 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/connection-name-matches-fragment.invalid.expected @@ -0,0 +1,40 @@ +==================================== INPUT ==================================== +# expected-to-throw +query connectionNameMatchesFragmentQuery { + ...connectionNameMatchesFragmentFragment +} + +fragment connectionNameMatchesFragmentFragment on Query +@argumentDefinitions(first: { type: "Int", defaultValue: 2 }, after: { type: "ID" }) + # This name matches the parent fragment, which violates an invariant that every + # query/fragment have a unique name. + @refetchable(queryName: "connectionNameMatchesFragmentFragment") { + node(id: "4") { + id + ... on Story { + comments(first: $first, after: $after) @connection(key: "NodeQuery_comments") { + edges { + node { + id + } + } + } + } + } +} +==================================== ERROR ==================================== +✖︎ The `queryName` specified in `@refetchable` must be unique, a definition with the name `connectionNameMatchesFragmentFragment` already exists. + + connection-name-matches-fragment.invalid.graphql:10:27 + 9 │ # query/fragment have a unique name. + 10 │ @refetchable(queryName: "connectionNameMatchesFragmentFragment") { + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 11 │ node(id: "4") { + + ℹ︎ a fragment with that name is already defined here + + connection-name-matches-fragment.invalid.graphql:6:10 + 5 │ + 6 │ fragment connectionNameMatchesFragmentFragment on Query + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 7 │ @argumentDefinitions(first: { type: "Int", defaultValue: 2 }, after: { type: "ID" }) diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/connection-name-matches-fragment.invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/connection-name-matches-fragment.invalid.graphql new file mode 100644 index 0000000000000..47bad4018db70 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/connection-name-matches-fragment.invalid.graphql @@ -0,0 +1,23 @@ +# expected-to-throw +query connectionNameMatchesFragmentQuery { + ...connectionNameMatchesFragmentFragment +} + +fragment connectionNameMatchesFragmentFragment on Query +@argumentDefinitions(first: { type: "Int", defaultValue: 2 }, after: { type: "ID" }) + # This name matches the parent fragment, which violates an invariant that every + # query/fragment have a unique name. + @refetchable(queryName: "connectionNameMatchesFragmentFragment") { + node(id: "4") { + id + ... on Story { + comments(first: $first, after: $after) @connection(key: "NodeQuery_comments") { + edges { + node { + id + } + } + } + } + } +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/flight-props-transform.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/flight-props-transform.expected deleted file mode 100644 index 05c017743d405..0000000000000 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/flight-props-transform.expected +++ /dev/null @@ -1,1023 +0,0 @@ -==================================== INPUT ==================================== -query flightPropsTransform_StoryQuery( - $condition: Boolean! - $count: Int! - $id: ID! -) { - node(id: $id) { - ...flightPropsTransform_story - } -} - -fragment flightPropsTransform_story on Story { - ...flightPropsTransform_storyHeader - StoryAttachments - StoryComments(condition: $condition, count: $count) - ...flightPropsTransform_storyFooter -} - -query flightPropsTransform_StoryHeaderQuery( - $id: ID! -) { - node(id: $id) { - ...flightPropsTransform_storyHeader - } -} - -fragment flightPropsTransform_storyHeader on Story { - ...flightPropsTransform_storyTitle - ...flightPropsTransform_storyAuthor -} - -query flightPropsTransform_StoryTitleQuery( - $id: ID! -) { - node(id: $id) { - ...flightPropsTransform_storyTitle - } -} - -fragment flightPropsTransform_storyTitle on Story { - StoryTitle -} - -fragment flightPropsTransform_storyAuthor on Story { - StoryAuthor -} - -fragment flightPropsTransform_storyFooter on Story { - StoryFooter -} - -query flightPropsTransform_StoryCommentsQuery( - $condition: Boolean! - $count: Int! - $id: ID! -) { - node(id: $id) { - ... on Story { - StoryComments(condition: $condition, count: $count) - } - } -} - -# %extensions% # -directive @react_flight_component(name: String!) on FIELD_DEFINITION - -extend type Story { - StoryComments( - condition: Boolean! - count: Int! - ): ReactFlightComponent @react_flight_component(name: "StoryComments.server") - - StoryTitle: ReactFlightComponent @react_flight_component(name: "StoryTitle.server") - - StoryAuthor: ReactFlightComponent @react_flight_component(name: "StoryAuthor.server") - - StoryFooter: ReactFlightComponent @react_flight_component(name: "StoryFooter.server") - - StoryAttachments: ReactFlightComponent @react_flight_component(name: "StoryAttachments.server") -} -==================================== OUTPUT =================================== -{ - "fragment": { - "argumentDefinitions": [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "condition" - }, - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "count" - }, - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } - ], - "kind": "Fragment", - "metadata": null, - "name": "flightPropsTransform_StoryCommentsQuery", - "selections": [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } - ], - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "kind": "InlineFragment", - "selections": [ - { - "alias": "StoryComments", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryComments.server" - }, - { - "fields": [ - { - "kind": "Variable", - "name": "condition", - "variableName": "condition" - }, - { - "kind": "Variable", - "name": "count", - "variableName": "count" - } - ], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": null - } - ], - "type": "Story", - "abstractKey": null - } - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "condition" - }, - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "count" - }, - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } - ], - "kind": "Operation", - "name": "flightPropsTransform_StoryCommentsQuery", - "selections": [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } - ], - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - { - "kind": "InlineFragment", - "selections": [ - { - "alias": "StoryComments", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryComments.server" - }, - { - "fields": [ - { - "kind": "Variable", - "name": "condition", - "variableName": "condition" - }, - { - "kind": "Variable", - "name": "count", - "variableName": "count" - } - ], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": null - } - ], - "type": "Story", - "abstractKey": null - }, - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "36c624e95960b57bae298acee0fc2325", - "id": null, - "metadata": {}, - "name": "flightPropsTransform_StoryCommentsQuery", - "operationKind": "query", - "text": null - } -} - -QUERY: - -query flightPropsTransform_StoryCommentsQuery( - $condition: Boolean! - $count: Int! - $id: ID! -) @react_flight(components: ["StoryComments.server"]) { - node(id: $id) { - __typename - ... on Story { - StoryComments: flight(component: "StoryComments.server", props: {condition: $condition, count: $count}) - } - id - } -} - - -{ - "fragment": { - "argumentDefinitions": [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } - ], - "kind": "Fragment", - "metadata": null, - "name": "flightPropsTransform_StoryHeaderQuery", - "selections": [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } - ], - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "args": null, - "kind": "FragmentSpread", - "name": "flightPropsTransform_storyHeader" - } - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } - ], - "kind": "Operation", - "name": "flightPropsTransform_StoryHeaderQuery", - "selections": [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } - ], - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - { - "kind": "InlineFragment", - "selections": [ - { - "alias": "StoryTitle", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryTitle.server" - }, - { - "fields": [], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": "flight(component:\"StoryTitle.server\",props:{})" - }, - { - "alias": "StoryAuthor", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryAuthor.server" - }, - { - "fields": [], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": "flight(component:\"StoryAuthor.server\",props:{})" - } - ], - "type": "Story", - "abstractKey": null - }, - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "1a9ce6cbc21aa0877299be1d6d47b441", - "id": null, - "metadata": {}, - "name": "flightPropsTransform_StoryHeaderQuery", - "operationKind": "query", - "text": null - } -} - -QUERY: - -query flightPropsTransform_StoryHeaderQuery( - $id: ID! -) @react_flight(components: ["StoryAuthor.server", "StoryTitle.server"]) { - node(id: $id) { - __typename - ...flightPropsTransform_storyHeader - id - } -} - -fragment flightPropsTransform_storyAuthor on Story { - StoryAuthor: flight(component: "StoryAuthor.server", props: {}) -} - -fragment flightPropsTransform_storyHeader on Story { - ...flightPropsTransform_storyTitle - ...flightPropsTransform_storyAuthor -} - -fragment flightPropsTransform_storyTitle on Story { - StoryTitle: flight(component: "StoryTitle.server", props: {}) -} - - -{ - "fragment": { - "argumentDefinitions": [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "condition" - }, - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "count" - }, - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } - ], - "kind": "Fragment", - "metadata": null, - "name": "flightPropsTransform_StoryQuery", - "selections": [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } - ], - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "args": null, - "kind": "FragmentSpread", - "name": "flightPropsTransform_story" - } - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "condition" - }, - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "count" - }, - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } - ], - "kind": "Operation", - "name": "flightPropsTransform_StoryQuery", - "selections": [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } - ], - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - { - "kind": "InlineFragment", - "selections": [ - { - "alias": "StoryTitle", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryTitle.server" - }, - { - "fields": [], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": "flight(component:\"StoryTitle.server\",props:{})" - }, - { - "alias": "StoryAuthor", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryAuthor.server" - }, - { - "fields": [], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": "flight(component:\"StoryAuthor.server\",props:{})" - }, - { - "alias": "StoryAttachments", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryAttachments.server" - }, - { - "fields": [], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": "flight(component:\"StoryAttachments.server\",props:{})" - }, - { - "alias": "StoryComments", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryComments.server" - }, - { - "fields": [ - { - "kind": "Variable", - "name": "condition", - "variableName": "condition" - }, - { - "kind": "Variable", - "name": "count", - "variableName": "count" - } - ], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": null - }, - { - "alias": "StoryFooter", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryFooter.server" - }, - { - "fields": [], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": "flight(component:\"StoryFooter.server\",props:{})" - } - ], - "type": "Story", - "abstractKey": null - }, - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "69a734ff296b28728d03558e1d566210", - "id": null, - "metadata": {}, - "name": "flightPropsTransform_StoryQuery", - "operationKind": "query", - "text": null - } -} - -QUERY: - -query flightPropsTransform_StoryQuery( - $condition: Boolean! - $count: Int! - $id: ID! -) @react_flight(components: ["StoryAttachments.server", "StoryAuthor.server", "StoryComments.server", "StoryFooter.server", "StoryTitle.server"]) { - node(id: $id) { - __typename - ...flightPropsTransform_story - id - } -} - -fragment flightPropsTransform_story on Story { - ...flightPropsTransform_storyHeader - StoryAttachments: flight(component: "StoryAttachments.server", props: {}) - StoryComments: flight(component: "StoryComments.server", props: {condition: $condition, count: $count}) - ...flightPropsTransform_storyFooter -} - -fragment flightPropsTransform_storyAuthor on Story { - StoryAuthor: flight(component: "StoryAuthor.server", props: {}) -} - -fragment flightPropsTransform_storyFooter on Story { - StoryFooter: flight(component: "StoryFooter.server", props: {}) -} - -fragment flightPropsTransform_storyHeader on Story { - ...flightPropsTransform_storyTitle - ...flightPropsTransform_storyAuthor -} - -fragment flightPropsTransform_storyTitle on Story { - StoryTitle: flight(component: "StoryTitle.server", props: {}) -} - - -{ - "fragment": { - "argumentDefinitions": [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } - ], - "kind": "Fragment", - "metadata": null, - "name": "flightPropsTransform_StoryTitleQuery", - "selections": [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } - ], - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "args": null, - "kind": "FragmentSpread", - "name": "flightPropsTransform_storyTitle" - } - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } - ], - "kind": "Operation", - "name": "flightPropsTransform_StoryTitleQuery", - "selections": [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } - ], - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - { - "kind": "InlineFragment", - "selections": [ - { - "alias": "StoryTitle", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryTitle.server" - }, - { - "fields": [], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": "flight(component:\"StoryTitle.server\",props:{})" - } - ], - "type": "Story", - "abstractKey": null - }, - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "c7147649e8202d8f1b699b187fc3fd2c", - "id": null, - "metadata": {}, - "name": "flightPropsTransform_StoryTitleQuery", - "operationKind": "query", - "text": null - } -} - -QUERY: - -query flightPropsTransform_StoryTitleQuery( - $id: ID! -) @react_flight(components: ["StoryTitle.server"]) { - node(id: $id) { - __typename - ...flightPropsTransform_storyTitle - id - } -} - -fragment flightPropsTransform_storyTitle on Story { - StoryTitle: flight(component: "StoryTitle.server", props: {}) -} - - -{ - "argumentDefinitions": [ - { - "kind": "RootArgument", - "name": "condition" - }, - { - "kind": "RootArgument", - "name": "count" - } - ], - "kind": "Fragment", - "metadata": null, - "name": "flightPropsTransform_story", - "selections": [ - { - "args": null, - "kind": "FragmentSpread", - "name": "flightPropsTransform_storyHeader" - }, - { - "alias": "StoryAttachments", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryAttachments.server" - }, - { - "fields": [], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": "flight(component:\"StoryAttachments.server\",props:{})" - }, - { - "alias": "StoryComments", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryComments.server" - }, - { - "fields": [ - { - "kind": "Variable", - "name": "condition", - "variableName": "condition" - }, - { - "kind": "Variable", - "name": "count", - "variableName": "count" - } - ], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": null - }, - { - "args": null, - "kind": "FragmentSpread", - "name": "flightPropsTransform_storyFooter" - } - ], - "type": "Story", - "abstractKey": null -} - -{ - "argumentDefinitions": [], - "kind": "Fragment", - "metadata": null, - "name": "flightPropsTransform_storyAuthor", - "selections": [ - { - "alias": "StoryAuthor", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryAuthor.server" - }, - { - "fields": [], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": "flight(component:\"StoryAuthor.server\",props:{})" - } - ], - "type": "Story", - "abstractKey": null -} - -{ - "argumentDefinitions": [], - "kind": "Fragment", - "metadata": null, - "name": "flightPropsTransform_storyFooter", - "selections": [ - { - "alias": "StoryFooter", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryFooter.server" - }, - { - "fields": [], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": "flight(component:\"StoryFooter.server\",props:{})" - } - ], - "type": "Story", - "abstractKey": null -} - -{ - "argumentDefinitions": [], - "kind": "Fragment", - "metadata": null, - "name": "flightPropsTransform_storyHeader", - "selections": [ - { - "args": null, - "kind": "FragmentSpread", - "name": "flightPropsTransform_storyTitle" - }, - { - "args": null, - "kind": "FragmentSpread", - "name": "flightPropsTransform_storyAuthor" - } - ], - "type": "Story", - "abstractKey": null -} - -{ - "argumentDefinitions": [], - "kind": "Fragment", - "metadata": null, - "name": "flightPropsTransform_storyTitle", - "selections": [ - { - "alias": "StoryTitle", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "StoryTitle.server" - }, - { - "fields": [], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": "flight(component:\"StoryTitle.server\",props:{})" - } - ], - "type": "Story", - "abstractKey": null -} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/flight-props-transform.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/flight-props-transform.graphql deleted file mode 100644 index c31935666683d..0000000000000 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/flight-props-transform.graphql +++ /dev/null @@ -1,79 +0,0 @@ -query flightPropsTransform_StoryQuery( - $condition: Boolean! - $count: Int! - $id: ID! -) { - node(id: $id) { - ...flightPropsTransform_story - } -} - -fragment flightPropsTransform_story on Story { - ...flightPropsTransform_storyHeader - StoryAttachments - StoryComments(condition: $condition, count: $count) - ...flightPropsTransform_storyFooter -} - -query flightPropsTransform_StoryHeaderQuery( - $id: ID! -) { - node(id: $id) { - ...flightPropsTransform_storyHeader - } -} - -fragment flightPropsTransform_storyHeader on Story { - ...flightPropsTransform_storyTitle - ...flightPropsTransform_storyAuthor -} - -query flightPropsTransform_StoryTitleQuery( - $id: ID! -) { - node(id: $id) { - ...flightPropsTransform_storyTitle - } -} - -fragment flightPropsTransform_storyTitle on Story { - StoryTitle -} - -fragment flightPropsTransform_storyAuthor on Story { - StoryAuthor -} - -fragment flightPropsTransform_storyFooter on Story { - StoryFooter -} - -query flightPropsTransform_StoryCommentsQuery( - $condition: Boolean! - $count: Int! - $id: ID! -) { - node(id: $id) { - ... on Story { - StoryComments(condition: $condition, count: $count) - } - } -} - -# %extensions% # -directive @react_flight_component(name: String!) on FIELD_DEFINITION - -extend type Story { - StoryComments( - condition: Boolean! - count: Int! - ): ReactFlightComponent @react_flight_component(name: "StoryComments.server") - - StoryTitle: ReactFlightComponent @react_flight_component(name: "StoryTitle.server") - - StoryAuthor: ReactFlightComponent @react_flight_component(name: "StoryAuthor.server") - - StoryFooter: ReactFlightComponent @react_flight_component(name: "StoryFooter.server") - - StoryAttachments: ReactFlightComponent @react_flight_component(name: "StoryAttachments.server") -} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-arg-passed-to-resolver-rutime-arg.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-arg-passed-to-resolver-rutime-arg.expected new file mode 100644 index 0000000000000..4d8d63b34bb57 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-arg-passed-to-resolver-rutime-arg.expected @@ -0,0 +1,129 @@ +==================================== INPUT ==================================== +query fragmentArgPassedToResolverRutimeArg_MeQuery($globalArg: String!) { + ...fragmentArgPassedToResolverRutimeArgFragment + @arguments(fragmentArg: $globalArg) +} + +fragment fragmentArgPassedToResolverRutimeArgFragment on Query +@argumentDefinitions(fragmentArg: {type: "String!"}) { + resolver_field(resolverArg: $fragmentArg) +} + +# %extensions% + +extend type Query { + resolver_field(resolverArg: String!): String + @relay_resolver(import_path: "./path/to/Resolver.js") +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "globalArg" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "fragmentArgPassedToResolverRutimeArg_MeQuery", + "selections": [ + { + "args": [ + { + "kind": "Variable", + "name": "fragmentArg", + "variableName": "globalArg" + } + ], + "kind": "FragmentSpread", + "name": "fragmentArgPassedToResolverRutimeArgFragment" + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "globalArg" + } + ], + "kind": "Operation", + "name": "fragmentArgPassedToResolverRutimeArg_MeQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "resolver_field", + "args": [ + { + "kind": "Variable", + "name": "resolverArg", + "variableName": "globalArg" + } + ], + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ] + } + ] + }, + "params": { + "cacheID": "e6fab15386443f5a4c89366a5d006c7b", + "id": null, + "metadata": {}, + "name": "fragmentArgPassedToResolverRutimeArg_MeQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. + +{ + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "fragmentArg" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "fragmentArgPassedToResolverRutimeArgFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "resolverArg", + "variableName": "fragmentArg" + } + ], + "fragment": null, + "kind": "RelayResolver", + "name": "resolver_field", + "resolverModule": require('Resolver'), + "path": "resolver_field" + } + ] + } + ], + "type": "Query", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-arg-passed-to-resolver-rutime-arg.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-arg-passed-to-resolver-rutime-arg.graphql new file mode 100644 index 0000000000000..c780ca679d6e6 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-arg-passed-to-resolver-rutime-arg.graphql @@ -0,0 +1,16 @@ +query fragmentArgPassedToResolverRutimeArg_MeQuery($globalArg: String!) { + ...fragmentArgPassedToResolverRutimeArgFragment + @arguments(fragmentArg: $globalArg) +} + +fragment fragmentArgPassedToResolverRutimeArgFragment on Query +@argumentDefinitions(fragmentArg: {type: "String!"}) { + resolver_field(resolverArg: $fragmentArg) +} + +# %extensions% + +extend type Query { + resolver_field(resolverArg: String!): String + @relay_resolver(import_path: "./path/to/Resolver.js") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-on-node-interface.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-on-node-interface.expected index 78e19b4e9d517..48d8aa2572a26 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-on-node-interface.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-on-node-interface.expected @@ -244,7 +244,10 @@ fragment fragmentOnNodeInterface_RefetchableFragment on Node { "node" ], "operation": require('RefetchableFragmentQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "fragmentOnNodeInterface_RefetchableFragment", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-on-non-node-fetchable-type.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-on-non-node-fetchable-type.expected index 9d0cf6bcdcc5d..94c3498c75c31 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-on-non-node-fetchable-type.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-on-non-node-fetchable-type.expected @@ -263,7 +263,10 @@ fragment fragmentOnNonNodeFetchableType_RefetchableFragment on NonNodeStory { "fetch__NonNodeStory" ], "operation": require('RefetchableFragmentQuery.graphql'), - "identifierField": "fetch_id" + "identifierInfo": { + "identifierField": "fetch_id", + "identifierQueryVariableName": "id" + } } }, "name": "fragmentOnNonNodeFetchableType_RefetchableFragment", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-on-object-implementing-node-interface.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-on-object-implementing-node-interface.expected index 84bfa1bd84746..893a400d33ef8 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-on-object-implementing-node-interface.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-on-object-implementing-node-interface.expected @@ -235,7 +235,10 @@ fragment fragmentOnObjectImplementingNodeInterface_RefetchableFragment on User { "node" ], "operation": require('RefetchableFragmentQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "fragmentOnObjectImplementingNodeInterface_RefetchableFragment", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-defer-in-stream.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-defer-in-stream.expected index 3e4e33feb3ed8..c401b33139ad1 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-defer-in-stream.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-defer-in-stream.expected @@ -8,7 +8,7 @@ query fragmentWithDeferInStream_QueryWithFragmentWithStreamQuery($id: ID!) { fragment fragmentWithDeferInStream_FeedbackFragment on Feedback { id - actors @stream(initial_count: 1, label: "StreamedActorsLabel") { + actors @stream(initialCount: 1, label: "StreamedActorsLabel") { ...fragmentWithDeferInStream_ActorFragment @defer } } @@ -194,7 +194,7 @@ fragment fragmentWithDeferInStream_ActorFragment on Actor { fragment fragmentWithDeferInStream_FeedbackFragment on Feedback { id - actors @stream(label: "fragmentWithDeferInStream_FeedbackFragment$stream$StreamedActorsLabel", initial_count: 1) { + actors @stream(label: "fragmentWithDeferInStream_FeedbackFragment$stream$StreamedActorsLabel", initialCount: 1) { __typename ...fragmentWithDeferInStream_ActorFragment @defer(label: "fragmentWithDeferInStream_FeedbackFragment$defer$fragmentWithDeferInStream_ActorFragment") id diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-defer-in-stream.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-defer-in-stream.graphql index 7886bae9ac370..d551db5e514b2 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-defer-in-stream.graphql +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-defer-in-stream.graphql @@ -7,7 +7,7 @@ query fragmentWithDeferInStream_QueryWithFragmentWithStreamQuery($id: ID!) { fragment fragmentWithDeferInStream_FeedbackFragment on Feedback { id - actors @stream(initial_count: 1, label: "StreamedActorsLabel") { + actors @stream(initialCount: 1, label: "StreamedActorsLabel") { ...fragmentWithDeferInStream_ActorFragment @defer } } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-stream.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-stream.expected index 6d08b8070d456..1f7a7bf9222e4 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-stream.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-stream.expected @@ -8,7 +8,7 @@ query fragmentWithStream_QueryWithFragmentWithStreamQuery($id: ID!) { fragment fragmentWithStream_FeedbackFragment on Feedback { id - actors @stream(initial_count: 1, label: "StreamedActorsLabel") { + actors @stream(initialCount: 1, label: "StreamedActorsLabel") { name } } @@ -174,7 +174,7 @@ query fragmentWithStream_QueryWithFragmentWithStreamQuery( fragment fragmentWithStream_FeedbackFragment on Feedback { id - actors @stream(label: "fragmentWithStream_FeedbackFragment$stream$StreamedActorsLabel", initial_count: 1) { + actors @stream(label: "fragmentWithStream_FeedbackFragment$stream$StreamedActorsLabel", initialCount: 1) { __typename name id diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-stream.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-stream.graphql index e60f16cdf1a39..c9020d678df83 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-stream.graphql +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/fragment-with-stream.graphql @@ -7,7 +7,7 @@ query fragmentWithStream_QueryWithFragmentWithStreamQuery($id: ID!) { fragment fragmentWithStream_FeedbackFragment on Feedback { id - actors @stream(initial_count: 1, label: "StreamedActorsLabel") { + actors @stream(initialCount: 1, label: "StreamedActorsLabel") { name } } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/inline-fragment-on-abstract-client-type-nested-in-resolver-client-edge.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/inline-fragment-on-abstract-client-type-nested-in-resolver-client-edge.expected index d0593a0e84907..a47a592060c54 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/inline-fragment-on-abstract-client-type-nested-in-resolver-client-edge.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/inline-fragment-on-abstract-client-type-nested-in-resolver-client-edge.expected @@ -44,6 +44,7 @@ extend type Query { { "kind": "ClientEdgeToClientObject", "concreteType": "ClientUser", + "modelResolvers": null, "backingField": { "alias": null, "args": null, diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/multiple-client-edges.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/multiple-client-edges.expected index ccc6db5181088..105961cdf8ef5 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/multiple-client-edges.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/multiple-client-edges.expected @@ -60,6 +60,7 @@ extend type User { { "kind": "ClientEdgeToClientObject", "concreteType": "ClientUser", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -95,6 +96,7 @@ extend type User { { "kind": "ClientEdgeToClientObject", "concreteType": "ClientUser", + "modelResolvers": null, "backingField": { "alias": null, "args": null, diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-passed-in-argument-refetchable-fragment-invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-passed-in-argument-refetchable-fragment-invalid.expected new file mode 100644 index 0000000000000..c914e94e3801a --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-passed-in-argument-refetchable-fragment-invalid.expected @@ -0,0 +1,32 @@ +==================================== INPUT ==================================== +# expected-to-throw +query providedVariablePassedInArgumentRefetchableFragmentInvalid_Query { + node(id: 4) { + ...providedVariablePassedInArgumentRefetchableFragmentInvalid_Fragment @arguments(includeName: true) + } +} + +fragment providedVariablePassedInArgumentRefetchableFragmentInvalid_Fragment on Node +@refetchable(queryName: "refetchableQuery") +@argumentDefinitions( + includeName: {type: "Boolean!", provider: "includeName_RelayProvider"} +) { + id + name @include(if: $includeName) +} +==================================== ERROR ==================================== +✖︎ Passing a value to 'includeName' (a provided variable) through @arguments is not supported. + + provided-variable-passed-in-argument-refetchable-fragment-invalid.graphql:4:87 + 3 │ node(id: 4) { + 4 │ ...providedVariablePassedInArgumentRefetchableFragmentInvalid_Fragment @arguments(includeName: true) + │ ^^^^^^^^^^^ + 5 │ } + + ℹ︎ Provided variable defined here + + provided-variable-passed-in-argument-refetchable-fragment-invalid.graphql:11:3 + 10 │ @argumentDefinitions( + 11 │ includeName: {type: "Boolean!", provider: "includeName_RelayProvider"} + │ ^^^^^^^^^^^ + 12 │ ) { diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-passed-in-argument-refetchable-fragment-invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-passed-in-argument-refetchable-fragment-invalid.graphql new file mode 100644 index 0000000000000..d8c8a56191022 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-passed-in-argument-refetchable-fragment-invalid.graphql @@ -0,0 +1,15 @@ +# expected-to-throw +query providedVariablePassedInArgumentRefetchableFragmentInvalid_Query { + node(id: 4) { + ...providedVariablePassedInArgumentRefetchableFragmentInvalid_Fragment @arguments(includeName: true) + } +} + +fragment providedVariablePassedInArgumentRefetchableFragmentInvalid_Fragment on Node +@refetchable(queryName: "refetchableQuery") +@argumentDefinitions( + includeName: {type: "Boolean!", provider: "includeName_RelayProvider"} +) { + id + name @include(if: $includeName) +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment-combination.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment-combination.expected new file mode 100644 index 0000000000000..1597de4d8400e --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment-combination.expected @@ -0,0 +1,343 @@ +==================================== INPUT ==================================== +query providedVariableRefetchableFragmentCombination_Query { + node(id: 4) { + ...providedVariableRefetchableFragmentCombination_Fragment + } +} + +fragment providedVariableRefetchableFragmentCombination_Fragment on Node +@refetchable(queryName: "refetchableQuery") +@argumentDefinitions( + includeName: {type: "Boolean!", provider: "includeName_RelayProvider"} +) { + id + name @include(if: $includeName) +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "providedVariableRefetchableFragmentCombination_Query", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Literal", + "name": "id", + "value": 4 + } + ], + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "providedVariableRefetchableFragmentCombination_Fragment" + } + ], + "storageKey": "node(id:4)" + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "__relay_internal__pv__includeName_RelayProvider" + } + ], + "kind": "Operation", + "name": "providedVariableRefetchableFragmentCombination_Query", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Literal", + "name": "id", + "value": 4 + } + ], + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "kind": "TypeDiscriminator", + "abstractKey": "__isNode" + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "condition": "__relay_internal__pv__includeName_RelayProvider", + "kind": "Condition", + "passingValue": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ] + } + ], + "storageKey": "node(id:4)" + } + ] + }, + "params": { + "cacheID": "4fde8992106a76990bb849df8c53c930", + "id": null, + "metadata": {}, + "name": "providedVariableRefetchableFragmentCombination_Query", + "operationKind": "query", + "text": null, + "providedVariables": { + "__relay_internal__pv__includeName_RelayProvider": require('includeName_RelayProvider') + } + } +} + +QUERY: + +query providedVariableRefetchableFragmentCombination_Query( + $__relay_internal__pv__includeName_RelayProvider: Boolean! +) { + node(id: 4) { + __typename + ...providedVariableRefetchableFragmentCombination_Fragment + id + } +} + +fragment providedVariableRefetchableFragmentCombination_Fragment on Node { + __isNode: __typename + id + name @include(if: $__relay_internal__pv__includeName_RelayProvider) +} + + +{ + "fragment": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "includeName" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "refetchableQuery", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } + ], + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "providedVariableRefetchableFragmentCombination_Fragment" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "includeName" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "__relay_internal__pv__includeName_RelayProvider" + } + ], + "kind": "Operation", + "name": "refetchableQuery", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } + ], + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "kind": "TypeDiscriminator", + "abstractKey": "__isNode" + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "condition": "__relay_internal__pv__includeName_RelayProvider", + "kind": "Condition", + "passingValue": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ] + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "b8a7a02a0ee5dcbac23246959bda7f5a", + "id": null, + "metadata": {}, + "name": "refetchableQuery", + "operationKind": "query", + "text": null, + "providedVariables": { + "includeName": require('includeName_RelayProvider'), + "__relay_internal__pv__includeName_RelayProvider": require('includeName_RelayProvider') + } + } +} + +QUERY: + +query refetchableQuery( + $id: ID! + $__relay_internal__pv__includeName_RelayProvider: Boolean! +) { + node(id: $id) { + __typename + ...providedVariableRefetchableFragmentCombination_Fragment + id + } +} + +fragment providedVariableRefetchableFragmentCombination_Fragment on Node { + __isNode: __typename + id + name @include(if: $__relay_internal__pv__includeName_RelayProvider) +} + + +{ + "argumentDefinitions": [ + { + "kind": "RootArgument", + "name": "__relay_internal__pv__includeName_RelayProvider" + } + ], + "kind": "Fragment", + "metadata": { + "refetch": { + "connection": null, + "fragmentPathInResult": [ + "node" + ], + "operation": require('refetchableQuery.graphql'), + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } + } + }, + "name": "providedVariableRefetchableFragmentCombination_Fragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "condition": "__relay_internal__pv__includeName_RelayProvider", + "kind": "Condition", + "passingValue": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ] + } + ], + "type": "Node", + "abstractKey": "__isNode" +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment-combination.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment-combination.graphql new file mode 100644 index 0000000000000..43df7525f3529 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment-combination.graphql @@ -0,0 +1,14 @@ +query providedVariableRefetchableFragmentCombination_Query { + node(id: 4) { + ...providedVariableRefetchableFragmentCombination_Fragment + } +} + +fragment providedVariableRefetchableFragmentCombination_Fragment on Node +@refetchable(queryName: "refetchableQuery") +@argumentDefinitions( + includeName: {type: "Boolean!", provider: "includeName_RelayProvider"} +) { + id + name @include(if: $includeName) +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment.expected index b4cc718ccffa0..63c7df4a05bea 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment.expected @@ -321,7 +321,10 @@ fragment providedVariableRefetchableFragment_providedVariableFragment on User { "node" ], "operation": require('refetchableQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "providedVariableRefetchableFragment", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-and-without-module-directive.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-and-without-module-directive.expected index 788e9df8a620e..fb0d16eb943df 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-and-without-module-directive.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-and-without-module-directive.expected @@ -24,7 +24,7 @@ fragment queryWithAndWithoutModuleDirective_MarkdownUserNameRenderer_name on Mar } } ==================================== ERROR ==================================== -✖︎ The 'queryWithAndWithoutModuleDirective_MarkdownUserNameRenderer_name' is transformed to use @no_inline implictly by `@module` or `@relay_client_component`, but it's also used in a regular fragment spread. It's required to explicitly add `@no_inline` to the definition of 'queryWithAndWithoutModuleDirective_MarkdownUserNameRenderer_name'. +✖︎ The 'queryWithAndWithoutModuleDirective_MarkdownUserNameRenderer_name' is transformed to use @no_inline implicitly by `@module`, but it's also used in a regular fragment spread. It's required to explicitly add `@no_inline` to the definition of 'queryWithAndWithoutModuleDirective_MarkdownUserNameRenderer_name'. query-with-and-without-module-directive.graphql:15:8 14 │ without_module: nameRenderer { diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-relay-client-component-with-argument-definitions.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-relay-client-component-with-argument-definitions.expected deleted file mode 100644 index d189cb6244efe..0000000000000 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-relay-client-component-with-argument-definitions.expected +++ /dev/null @@ -1,240 +0,0 @@ -==================================== INPUT ==================================== -query queryWithRelayClientComponentWithArgumentDefinitions_Query($id: ID!) { - node(id: $id) { - id - ...queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment - @relay_client_component - } -} - -fragment queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment on User -@argumentDefinitions(size: {type: "[Int]", defaultValue: [32]}) { - name - profilePicture(size: $size) { - uri - } -} -==================================== OUTPUT =================================== -{ - "argumentDefinitions": [ - { - "defaultValue": [ - 32 - ], - "kind": "LocalArgument", - "name": "queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment$size" - } - ], - "kind": "SplitOperation", - "metadata": {}, - "name": "queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment$normalization", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - }, - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "size", - "variableName": "queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment$size" - } - ], - "concreteType": "Image", - "kind": "LinkedField", - "name": "profilePicture", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "uri", - "storageKey": null - } - ], - "storageKey": null - } - ] -} - -{ - "fragment": { - "argumentDefinitions": [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } - ], - "kind": "Fragment", - "metadata": null, - "name": "queryWithRelayClientComponentWithArgumentDefinitions_Query", - "selections": [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } - ], - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - }, - { - "args": null, - "kind": "FragmentSpread", - "name": "queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment" - } - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } - ], - "kind": "Operation", - "name": "queryWithRelayClientComponentWithArgumentDefinitions_Query", - "selections": [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } - ], - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - }, - { - "args": null, - "fragment": require('queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment$normalization.graphql'), - "kind": "ClientComponent" - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "6ad82143df29774a4c9152ad549f165a", - "id": null, - "metadata": {}, - "name": "queryWithRelayClientComponentWithArgumentDefinitions_Query", - "operationKind": "query", - "text": null - } -} - -QUERY: - -query queryWithRelayClientComponentWithArgumentDefinitions_Query( - $id: ID! -) { - node(id: $id) { - __typename - id - ...queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment @relay_client_component_server(module_id: "queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment$normalization.graphql") - } -} - -fragment queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment on User { - name - profilePicture(size: [32]) { - uri - } -} - - -{ - "argumentDefinitions": [ - { - "defaultValue": [ - 32 - ], - "kind": "LocalArgument", - "name": "size" - } - ], - "kind": "Fragment", - "metadata": null, - "name": "queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - }, - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "size", - "variableName": "size" - } - ], - "concreteType": "Image", - "kind": "LinkedField", - "name": "profilePicture", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "uri", - "storageKey": null - } - ], - "storageKey": null - } - ], - "type": "User", - "abstractKey": null -} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-relay-client-component-with-argument-definitions.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-relay-client-component-with-argument-definitions.graphql deleted file mode 100644 index 0795fb9ff733a..0000000000000 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-relay-client-component-with-argument-definitions.graphql +++ /dev/null @@ -1,15 +0,0 @@ -query queryWithRelayClientComponentWithArgumentDefinitions_Query($id: ID!) { - node(id: $id) { - id - ...queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment - @relay_client_component - } -} - -fragment queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment on User -@argumentDefinitions(size: {type: "[Int]", defaultValue: [32]}) { - name - profilePicture(size: $size) { - uri - } -} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-relay-client-component.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-relay-client-component.expected deleted file mode 100644 index 95b3ccbe643b4..0000000000000 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-relay-client-component.expected +++ /dev/null @@ -1,238 +0,0 @@ -==================================== INPUT ==================================== -query queryWithRelayClientComponent_Query($id: ID!, $size: [Int]) { - node(id: $id) { - id - ...queryWithRelayClientComponent_ClientComponentFragment - @relay_client_component - } -} - -fragment queryWithRelayClientComponent_ClientComponentFragment on User { - name - profilePicture(size: $size) { - uri - } -} -==================================== OUTPUT =================================== -{ - "kind": "SplitOperation", - "metadata": {}, - "name": "queryWithRelayClientComponent_ClientComponentFragment$normalization", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - }, - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "size", - "variableName": "size" - } - ], - "concreteType": "Image", - "kind": "LinkedField", - "name": "profilePicture", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "uri", - "storageKey": null - } - ], - "storageKey": null - } - ] -} - -{ - "fragment": { - "argumentDefinitions": [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - }, - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "size" - } - ], - "kind": "Fragment", - "metadata": null, - "name": "queryWithRelayClientComponent_Query", - "selections": [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } - ], - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - }, - { - "args": null, - "kind": "FragmentSpread", - "name": "queryWithRelayClientComponent_ClientComponentFragment" - } - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - }, - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "size" - } - ], - "kind": "Operation", - "name": "queryWithRelayClientComponent_Query", - "selections": [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } - ], - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - }, - { - "args": null, - "fragment": require('queryWithRelayClientComponent_ClientComponentFragment$normalization.graphql'), - "kind": "ClientComponent" - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "acfdaa661773ccf675ea2c3179ce215b", - "id": null, - "metadata": {}, - "name": "queryWithRelayClientComponent_Query", - "operationKind": "query", - "text": null - } -} - -QUERY: - -query queryWithRelayClientComponent_Query( - $id: ID! - $size: [Int] -) { - node(id: $id) { - __typename - id - ...queryWithRelayClientComponent_ClientComponentFragment @relay_client_component_server(module_id: "queryWithRelayClientComponent_ClientComponentFragment$normalization.graphql") - } -} - -fragment queryWithRelayClientComponent_ClientComponentFragment on User { - name - profilePicture(size: $size) { - uri - } -} - - -{ - "argumentDefinitions": [ - { - "kind": "RootArgument", - "name": "size" - } - ], - "kind": "Fragment", - "metadata": null, - "name": "queryWithRelayClientComponent_ClientComponentFragment", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - }, - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "size", - "variableName": "size" - } - ], - "concreteType": "Image", - "kind": "LinkedField", - "name": "profilePicture", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "uri", - "storageKey": null - } - ], - "storageKey": null - } - ], - "type": "User", - "abstractKey": null -} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-relay-client-component.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-relay-client-component.graphql deleted file mode 100644 index cb300da813820..0000000000000 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/query-with-relay-client-component.graphql +++ /dev/null @@ -1,14 +0,0 @@ -query queryWithRelayClientComponent_Query($id: ID!, $size: [Int]) { - node(id: $id) { - id - ...queryWithRelayClientComponent_ClientComponentFragment - @relay_client_component - } -} - -fragment queryWithRelayClientComponent_ClientComponentFragment on User { - name - profilePicture(size: $size) { - uri - } -} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-connection-custom-handler.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-connection-custom-handler.expected index 35b9c79c1e5a3..1e84b772d2697 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-connection-custom-handler.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-connection-custom-handler.expected @@ -318,7 +318,10 @@ fragment refetchableConnectionCustomHandler_RefetchableConnection_feedback on Fe "node" ], "operation": require('RefetchableConnectionQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "refetchableConnectionCustomHandler_RefetchableConnection_feedback", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-connection.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-connection.expected index fb16f42a15e28..016d3a6a9622d 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-connection.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-connection.expected @@ -318,7 +318,10 @@ fragment refetchableConnection_RefetchableConnection_feedback on Feedback { "node" ], "operation": require('RefetchableConnectionQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "refetchableConnection_RefetchableConnection_feedback", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-arg.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-arg.expected new file mode 100644 index 0000000000000..4e8fe2c933413 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-arg.expected @@ -0,0 +1,168 @@ +==================================== INPUT ==================================== +fragment refetchableFragmentOnNodeAndFetchableArg_RefetchableFragment on FetchableType + @refetchable(queryName: "RefetchableFragmentQuery", preferFetchable: true) { + id +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "RefetchableFragmentQuery", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "fetch_id", + "variableName": "id" + } + ], + "concreteType": "FetchableType", + "kind": "LinkedField", + "name": "fetch__FetchableType", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "refetchableFragmentOnNodeAndFetchableArg_RefetchableFragment" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } + ], + "kind": "Operation", + "name": "RefetchableFragmentQuery", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "fetch_id", + "variableName": "id" + } + ], + "concreteType": "FetchableType", + "kind": "LinkedField", + "name": "fetch__FetchableType", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "fetch_id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__token", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "31dcd57f42128b8d975e6224202a17c9", + "id": null, + "metadata": {}, + "name": "RefetchableFragmentQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +query RefetchableFragmentQuery( + $id: ID! +) { + fetch__FetchableType(fetch_id: $id) { + ...refetchableFragmentOnNodeAndFetchableArg_RefetchableFragment + id + } +} + +fragment refetchableFragmentOnNodeAndFetchableArg_RefetchableFragment on FetchableType { + id + fetch_id + __token +} + + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "refetch": { + "connection": null, + "fragmentPathInResult": [ + "fetch__FetchableType" + ], + "operation": require('RefetchableFragmentQuery.graphql'), + "identifierInfo": { + "identifierField": "fetch_id", + "identifierQueryVariableName": "id" + } + } + }, + "name": "refetchableFragmentOnNodeAndFetchableArg_RefetchableFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "fetch_id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__token", + "storageKey": null + } + ], + "type": "FetchableType", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-arg.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-arg.graphql new file mode 100644 index 0000000000000..fb6c9528df2aa --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-arg.graphql @@ -0,0 +1,4 @@ +fragment refetchableFragmentOnNodeAndFetchableArg_RefetchableFragment on FetchableType + @refetchable(queryName: "RefetchableFragmentQuery", preferFetchable: true) { + id +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-no-flag.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-no-flag.expected new file mode 100644 index 0000000000000..fb437a79d6c2e --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-no-flag.expected @@ -0,0 +1,146 @@ +==================================== INPUT ==================================== +fragment refetchableFragmentOnNodeAndFetchableNoFlag_RefetchableFragment on FetchableType + @refetchable(queryName: "RefetchableFragmentQuery") { + id +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "RefetchableFragmentQuery", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } + ], + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "refetchableFragmentOnNodeAndFetchableNoFlag_RefetchableFragment" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } + ], + "kind": "Operation", + "name": "RefetchableFragmentQuery", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } + ], + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "31dcd57f42128b8d975e6224202a17c9", + "id": null, + "metadata": {}, + "name": "RefetchableFragmentQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +query RefetchableFragmentQuery( + $id: ID! +) { + node(id: $id) { + __typename + ...refetchableFragmentOnNodeAndFetchableNoFlag_RefetchableFragment + id + } +} + +fragment refetchableFragmentOnNodeAndFetchableNoFlag_RefetchableFragment on FetchableType { + id +} + + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "refetch": { + "connection": null, + "fragmentPathInResult": [ + "node" + ], + "operation": require('RefetchableFragmentQuery.graphql'), + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } + } + }, + "name": "refetchableFragmentOnNodeAndFetchableNoFlag_RefetchableFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "type": "FetchableType", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-no-flag.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-no-flag.graphql new file mode 100644 index 0000000000000..c3c23a3c49d3c --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-no-flag.graphql @@ -0,0 +1,4 @@ +fragment refetchableFragmentOnNodeAndFetchableNoFlag_RefetchableFragment on FetchableType + @refetchable(queryName: "RefetchableFragmentQuery") { + id +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable.expected new file mode 100644 index 0000000000000..460c33e7373dc --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable.expected @@ -0,0 +1,170 @@ +==================================== INPUT ==================================== +# prefer_fetchable_in_refetch_queries + +fragment refetchableFragmentOnNodeAndFetchable_RefetchableFragment on FetchableType + @refetchable(queryName: "RefetchableFragmentQuery") { + id +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "RefetchableFragmentQuery", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "fetch_id", + "variableName": "id" + } + ], + "concreteType": "FetchableType", + "kind": "LinkedField", + "name": "fetch__FetchableType", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "refetchableFragmentOnNodeAndFetchable_RefetchableFragment" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } + ], + "kind": "Operation", + "name": "RefetchableFragmentQuery", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "fetch_id", + "variableName": "id" + } + ], + "concreteType": "FetchableType", + "kind": "LinkedField", + "name": "fetch__FetchableType", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "fetch_id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__token", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "31dcd57f42128b8d975e6224202a17c9", + "id": null, + "metadata": {}, + "name": "RefetchableFragmentQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +query RefetchableFragmentQuery( + $id: ID! +) { + fetch__FetchableType(fetch_id: $id) { + ...refetchableFragmentOnNodeAndFetchable_RefetchableFragment + id + } +} + +fragment refetchableFragmentOnNodeAndFetchable_RefetchableFragment on FetchableType { + id + fetch_id + __token +} + + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "refetch": { + "connection": null, + "fragmentPathInResult": [ + "fetch__FetchableType" + ], + "operation": require('RefetchableFragmentQuery.graphql'), + "identifierInfo": { + "identifierField": "fetch_id", + "identifierQueryVariableName": "id" + } + } + }, + "name": "refetchableFragmentOnNodeAndFetchable_RefetchableFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "fetch_id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__token", + "storageKey": null + } + ], + "type": "FetchableType", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable.graphql new file mode 100644 index 0000000000000..d4985a97a0700 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable.graphql @@ -0,0 +1,6 @@ +# prefer_fetchable_in_refetch_queries + +fragment refetchableFragmentOnNodeAndFetchable_RefetchableFragment on FetchableType + @refetchable(queryName: "RefetchableFragmentQuery") { + id +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-with-missing-id.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-with-missing-id.expected index e91b8492174b1..49dfbbdc91fb0 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-with-missing-id.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-on-node-with-missing-id.expected @@ -243,7 +243,10 @@ fragment refetchableFragmentOnNodeWithMissingId_RefetchableFragment on Node { "node" ], "operation": require('RefetchableFragmentQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "refetchableFragmentOnNodeWithMissingId_RefetchableFragment", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-bidirectional.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-bidirectional.expected index a26bdf4b5f4f2..742e445add423 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-bidirectional.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-bidirectional.expected @@ -421,7 +421,10 @@ fragment refetchableFragmentWithConnectionBidirectional_PaginationFragment_1G22u "node" ], "operation": require('RefetchableFragmentQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "refetchableFragmentWithConnectionBidirectional_PaginationFragment", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-es-modules.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-es-modules.expected index 9616b19185e26..e6e18c31fcfff 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-es-modules.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-es-modules.expected @@ -355,7 +355,10 @@ import RefetchableFragmentQuery_graphql from './RefetchableFragmentQuery.graphql "node" ], "operation": RefetchableFragmentQuery_graphql, - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "refetchableFragmentWithConnectionEsModules_PaginationFragment", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-with-stream.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-with-stream.expected index 84b1abf3b93f3..9c7f8d0277482 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-with-stream.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-with-stream.expected @@ -306,7 +306,7 @@ fragment refetchableFragmentWithConnectionWithStream_PaginationFragment_1G22uz o ... on User { name friends(after: $cursor, first: $count) { - edges @stream(label: "refetchableFragmentWithConnectionWithStream_PaginationFragment$stream$PaginationFragment_friends", initial_count: 1) { + edges @stream(label: "refetchableFragmentWithConnectionWithStream_PaginationFragment$stream$PaginationFragment_friends", initialCount: 1) { node { id __typename @@ -365,7 +365,10 @@ fragment refetchableFragmentWithConnectionWithStream_PaginationFragment_1G22uz o "node" ], "operation": require('RefetchableFragmentQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "refetchableFragmentWithConnectionWithStream_PaginationFragment", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection.expected index aef310842b6a4..11d9eaf09b5e4 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable-fragment-with-connection.expected @@ -348,7 +348,10 @@ fragment refetchableFragmentWithConnection_PaginationFragment_1G22uz on Node { "node" ], "operation": require('RefetchableFragmentQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "refetchableFragmentWithConnection_PaginationFragment", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable_conflict_with_operation.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable_conflict_with_operation.invalid.expected index 106fc73604fb3..01187b911f42a 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable_conflict_with_operation.invalid.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/refetchable_conflict_with_operation.invalid.expected @@ -13,7 +13,7 @@ query refetchableConflictWithOperationQuery { } } ==================================== ERROR ==================================== -✖︎ A unique query name has to be specified in `@refetchable`, an operation `refetchableConflictWithOperationQuery` already exists. +✖︎ The `queryName` specified in `@refetchable` must be unique, a definition with the name `refetchableConflictWithOperationQuery` already exists. refetchable_conflict_with_operation.invalid.graphql:4:27 3 │ fragment refetchableConflictWithOperationF on Node diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-client-id-field.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-client-id-field.expected index 98b9afe482213..a3d9b544c2ecf 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-client-id-field.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-client-id-field.expected @@ -114,9 +114,7 @@ query relayClientIdField_RelayClientIDFieldQuery($id: ID!) { { "kind": "Literal", "name": "supported", - "value": [ - "PlainCommentBody" - ] + "value": "2Rll6p" } ], "concreteType": null, @@ -200,7 +198,7 @@ query relayClientIdField_RelayClientIDFieldQuery($id: ID!) { ] } ], - "storageKey": "commentBody(supported:[\"PlainCommentBody\"])" + "storageKey": "commentBody(supported:\"2Rll6p\")" } ], "type": "Comment", @@ -323,9 +321,7 @@ query relayClientIdField_RelayClientIDFieldQuery($id: ID!) { { "kind": "Literal", "name": "supported", - "value": [ - "PlainCommentBody" - ] + "value": "2Rll6p" } ], "concreteType": null, @@ -409,7 +405,7 @@ query relayClientIdField_RelayClientIDFieldQuery($id: ID!) { ] } ], - "storageKey": "commentBody(supported:[\"PlainCommentBody\"])" + "storageKey": "commentBody(supported:\"2Rll6p\")" } ], "type": "Comment", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-backing-client-edge.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-backing-client-edge.expected index 2e3e9c3c9efa2..147abf640235a 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-backing-client-edge.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-backing-client-edge.expected @@ -287,7 +287,10 @@ fragment relayResolverBackingClientEdge_best_friend_resolver on User { "node" ], "operation": require('ClientEdgeQuery_relayResolverBackingClientEdgeQuery_me__best_friend.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_relayResolverBackingClientEdgeQuery_me__best_friend", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-child-interface-and-no-implementors.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-child-interface-and-no-implementors.expected new file mode 100644 index 0000000000000..71cb70653bb5b --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-child-interface-and-no-implementors.expected @@ -0,0 +1,43 @@ +==================================== INPUT ==================================== +# expected-to-throw +query relayResolverEdgeToInterfaceWithChildInterfaceAndNoImplementorsQuery { + resolver_field { + name + } +} + +# %extensions% + +""" +An interface with no concrete implementors +""" +interface SomeInterface { + name: String +} + +interface ChildInterface implements SomeInterface { + name: String + age: Int +} + +extend type Query { + resolver_field: SomeInterface + @relay_resolver(import_path: "./path/to/Resolver.js") +} +==================================== ERROR ==================================== +✖︎ Client Edges that reference client-defined interface types are not currently supported in Relay. + + relay-resolver-edge-to-interface-with-child-interface-and-no-implementors.graphql:3:3 + 2 │ query relayResolverEdgeToInterfaceWithChildInterfaceAndNoImplementorsQuery { + 3 │ resolver_field { + │ ^^^^^^^^^^^^^^ + 4 │ name + + +✖︎ No types implement the client interface SomeInterface. Interfaces returned by a @RelayResolver must have at least one concrete implementation. + + :2:44 + 1 │ # expected-to-throw + 2 │ query relayResolverEdgeToInterfaceWithChildInterfaceAndNoImplementorsQuery { + │ ^^^^^^^^^^^^^ + 3 │ resolver_field { diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-child-interface-and-no-implementors.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-child-interface-and-no-implementors.graphql new file mode 100644 index 0000000000000..996b9886d6989 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-child-interface-and-no-implementors.graphql @@ -0,0 +1,25 @@ +# expected-to-throw +query relayResolverEdgeToInterfaceWithChildInterfaceAndNoImplementorsQuery { + resolver_field { + name + } +} + +# %extensions% + +""" +An interface with no concrete implementors +""" +interface SomeInterface { + name: String +} + +interface ChildInterface implements SomeInterface { + name: String + age: Int +} + +extend type Query { + resolver_field: SomeInterface + @relay_resolver(import_path: "./path/to/Resolver.js") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-no-implementors.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-no-implementors.expected new file mode 100644 index 0000000000000..52481f5e54510 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-no-implementors.expected @@ -0,0 +1,38 @@ +==================================== INPUT ==================================== +# expected-to-throw +query relayResolverEdgeToInterfaceWithNoImplementorsQuery { + resolver_field { + name + } +} + +# %extensions% + +""" +An interface with no implementors +""" +interface SomeInterface { + name: String +} + +extend type Query { + resolver_field: SomeInterface + @relay_resolver(import_path: "./path/to/Resolver.js") +} +==================================== ERROR ==================================== +✖︎ Client Edges that reference client-defined interface types are not currently supported in Relay. + + relay-resolver-edge-to-interface-with-no-implementors.graphql:3:3 + 2 │ query relayResolverEdgeToInterfaceWithNoImplementorsQuery { + 3 │ resolver_field { + │ ^^^^^^^^^^^^^^ + 4 │ name + + +✖︎ No types implement the client interface SomeInterface. Interfaces returned by a @RelayResolver must have at least one concrete implementation. + + :2:35 + 1 │ # expected-to-throw + 2 │ query relayResolverEdgeToInterfaceWithNoImplementorsQuery { + │ ^^^^^^^^^^^^^ + 3 │ resolver_field { diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-no-implementors.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-no-implementors.graphql new file mode 100644 index 0000000000000..358021deb4e42 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-no-implementors.graphql @@ -0,0 +1,20 @@ +# expected-to-throw +query relayResolverEdgeToInterfaceWithNoImplementorsQuery { + resolver_field { + name + } +} + +# %extensions% + +""" +An interface with no implementors +""" +interface SomeInterface { + name: String +} + +extend type Query { + resolver_field: SomeInterface + @relay_resolver(import_path: "./path/to/Resolver.js") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface.expected new file mode 100644 index 0000000000000..270884cfb4166 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface.expected @@ -0,0 +1,263 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +query relayResolverEdgeToInterfaceQuery { + cat { + description + } +} + +# %extensions% + +interface Cat { + id: ID! + description: String +} + +type Tabby implements Cat @__RelayResolverModel { + id: ID! + description: String @relay_resolver(import_path: "TabbyResolver") + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "TabbyResolver", fragment_name: "Tabby__id", inject_fragment_data: "id") +} + +type Persian implements Cat @__RelayResolverModel { + id: ID! + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "PersianResolver", fragment_name: "Persian__id", inject_fragment_data: "id") +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "relayResolverEdgeToInterfaceQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "cat", + "plural": false, + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "description", + "resolverModule": require('TabbyResolver'), + "path": "cat.description" + } + ], + "type": "Tabby", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "description", + "storageKey": null + } + ], + "type": "Persian", + "abstractKey": null + } + ], + "storageKey": null + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "relayResolverEdgeToInterfaceQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "cat", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "description", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "Tabby", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "description", + "storageKey": null + } + ], + "type": "Persian", + "abstractKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + } + ] + }, + "params": { + "cacheID": "2f16188f4e1df75d26e540d9144bd2f2", + "id": null, + "metadata": {}, + "name": "relayResolverEdgeToInterfaceQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Persian____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Persian__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Persian__id.graphql'), require('PersianResolver'), 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Persian", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Persian__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Persian", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Tabby____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Tabby__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Tabby__id.graphql'), require('TabbyResolver'), 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Tabby", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Tabby__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Tabby", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface.graphql new file mode 100644 index 0000000000000..17ca411380282 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface.graphql @@ -0,0 +1,30 @@ +# relay-resolver-enable-interface-output-type + +query relayResolverEdgeToInterfaceQuery { + cat { + description + } +} + +# %extensions% + +interface Cat { + id: ID! + description: String +} + +type Tabby implements Cat @__RelayResolverModel { + id: ID! + description: String @relay_resolver(import_path: "TabbyResolver") + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "TabbyResolver", fragment_name: "Tabby__id", inject_fragment_data: "id") +} + +type Persian implements Cat @__RelayResolverModel { + id: ID! + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "PersianResolver", fragment_name: "Persian__id", inject_fragment_data: "id") +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-fragment-on-interface.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-fragment-on-interface.expected new file mode 100644 index 0000000000000..12cb4e911206c --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-fragment-on-interface.expected @@ -0,0 +1,165 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment relayResolverFragmentOnInterfaceFragment on Cat { + description +} + +# %extensions% + +interface Cat { + id: ID! + description: String +} + +type Tabby implements Cat @__RelayResolverModel { + id: ID! + description: String @relay_resolver(import_path: "TabbyResolver") + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "TabbyResolver", fragment_name: "Tabby__id", inject_fragment_data: "id") +} + +type Persian implements Cat @__RelayResolverModel { + id: ID! + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "PersianResolver", fragment_name: "Persian__id", inject_fragment_data: "id") +} +==================================== OUTPUT =================================== +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Persian____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Persian__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Persian__id.graphql'), require('PersianResolver'), 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Persian", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Persian__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Persian", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Tabby____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Tabby__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Tabby__id.graphql'), require('TabbyResolver'), 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Tabby", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Tabby__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Tabby", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "relayResolverFragmentOnInterfaceFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "description", + "resolverModule": require('TabbyResolver'), + "path": "description" + } + ], + "type": "Tabby", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "description", + "storageKey": null + } + ], + "type": "Persian", + "abstractKey": null + } + ] + } + ], + "type": "Cat", + "abstractKey": "__isCat" +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-fragment-on-interface.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-fragment-on-interface.graphql new file mode 100644 index 0000000000000..af1e2215ce87c --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-fragment-on-interface.graphql @@ -0,0 +1,24 @@ +# relay-resolver-enable-interface-output-type + +fragment relayResolverFragmentOnInterfaceFragment on Cat { + description +} + +# %extensions% + +interface Cat { + id: ID! + description: String +} + +type Tabby implements Cat @__RelayResolverModel { + id: ID! + description: String @relay_resolver(import_path: "TabbyResolver") + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "TabbyResolver", fragment_name: "Tabby__id", inject_fragment_data: "id") +} + +type Persian implements Cat @__RelayResolverModel { + id: ID! + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "PersianResolver", fragment_name: "Persian__id", inject_fragment_data: "id") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-live-weak-object.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-live-weak-object.expected index 393146f2fa571..0b3d47cad5377 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-live-weak-object.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-live-weak-object.expected @@ -21,26 +21,6 @@ extend type Query { ) } ==================================== OUTPUT =================================== -{ - "kind": "SplitOperation", - "metadata": {}, - "name": "Query__client_types$normalization", - "selections": [ - { - "kind": "ClientExtension", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__relay_model_instance", - "storageKey": null - } - ] - } - ] -} - { "fragment": { "argumentDefinitions": [], @@ -53,18 +33,19 @@ extend type Query { { "kind": "ClientEdgeToClientObject", "concreteType": "MyClientType", + "modelResolvers": null, "backingField": { "alias": null, "args": null, "fragment": null, "kind": "RelayLiveResolver", "name": "client_types", - "resolverModule": require('relay-runtime/experimental').weakObjectWrapperLive(require('ClientTypeResolver'), '__relay_model_instance', false), + "resolverModule": require('ClientTypeResolver'), "path": "client_types", "normalizationInfo": { + "kind": "WeakModel", "concreteType": "MyClientType", - "plural": false, - "normalizationNode": require('Query__client_types$normalization.graphql') + "plural": false } }, "linkedField": { diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-plural-fragment-on-interface.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-plural-fragment-on-interface.expected new file mode 100644 index 0000000000000..2177a6f9330fb --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-plural-fragment-on-interface.expected @@ -0,0 +1,167 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment relayResolverPluralFragmentOnInterfaceFragment on Cat @relay(plural: true) { + description +} + +# %extensions% + +interface Cat { + id: ID! + description: String +} + +type Tabby implements Cat @__RelayResolverModel { + id: ID! + description: String @relay_resolver(import_path: "TabbyResolver") + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "TabbyResolver", fragment_name: "Tabby__id", inject_fragment_data: "id") +} + +type Persian implements Cat @__RelayResolverModel { + id: ID! + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "PersianResolver", fragment_name: "Persian__id", inject_fragment_data: "id") +} +==================================== OUTPUT =================================== +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Persian____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Persian__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Persian__id.graphql'), require('PersianResolver'), 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Persian", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Persian__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Persian", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Tabby____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Tabby__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Tabby__id.graphql'), require('TabbyResolver'), 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Tabby", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Tabby__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Tabby", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "plural": true + }, + "name": "relayResolverPluralFragmentOnInterfaceFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "description", + "resolverModule": require('TabbyResolver'), + "path": "description" + } + ], + "type": "Tabby", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "description", + "storageKey": null + } + ], + "type": "Persian", + "abstractKey": null + } + ] + } + ], + "type": "Cat", + "abstractKey": "__isCat" +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-plural-fragment-on-interface.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-plural-fragment-on-interface.graphql new file mode 100644 index 0000000000000..f89b6fc815c6d --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-plural-fragment-on-interface.graphql @@ -0,0 +1,24 @@ +# relay-resolver-enable-interface-output-type + +fragment relayResolverPluralFragmentOnInterfaceFragment on Cat @relay(plural: true) { + description +} + +# %extensions% + +interface Cat { + id: ID! + description: String +} + +type Tabby implements Cat @__RelayResolverModel { + id: ID! + description: String @relay_resolver(import_path: "TabbyResolver") + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "TabbyResolver", fragment_name: "Tabby__id", inject_fragment_data: "id") +} + +type Persian implements Cat @__RelayResolverModel { + id: ID! + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "PersianResolver", fragment_name: "Persian__id", inject_fragment_data: "id") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-weak-object-normalization-ast.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-weak-object-normalization-ast.expected new file mode 100644 index 0000000000000..dc5ca35b4800d --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-weak-object-normalization-ast.expected @@ -0,0 +1,150 @@ +==================================== INPUT ==================================== +# enable_resolver_normalization_ast + +query relayResolverWeakObjectNormalizationAstQuery { + client_type { + __typename + } +} + +# %extensions% + +scalar MyClientTypeModel @__RelayCustomScalar(path: "/path/to/test/fixture/weak-type.js", export_name: "MyClientType") + +type MyClientType @__RelayResolverModel @RelayOutputType @__RelayWeakObject { + __relay_model_instance: MyClientTypeModel +} + +extend type Query { + client_type: MyClientType @relay_resolver( + import_path: "./path/to/ClientTypeResolver.js" + has_output_type: true + ) +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "relayResolverWeakObjectNormalizationAstQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "MyClientType", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "client_type", + "resolverModule": require('ClientTypeResolver'), + "path": "client_type", + "normalizationInfo": { + "kind": "WeakModel", + "concreteType": "MyClientType", + "plural": false + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "MyClientType", + "kind": "LinkedField", + "name": "client_type", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "relayResolverWeakObjectNormalizationAstQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "client_type", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('ClientTypeResolver'), + "rootFragment": null + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "MyClientType", + "kind": "LinkedField", + "name": "client_type", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "3cf6ca9eab3b4f1d050739c83709c404", + "id": null, + "metadata": {}, + "name": "relayResolverWeakObjectNormalizationAstQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "MyClientType____relay_model_instance", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ] + } + ], + "type": "MyClientType", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-weak-object-normalization-ast.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-weak-object-normalization-ast.graphql new file mode 100644 index 0000000000000..db608523a29c7 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-weak-object-normalization-ast.graphql @@ -0,0 +1,22 @@ +# enable_resolver_normalization_ast + +query relayResolverWeakObjectNormalizationAstQuery { + client_type { + __typename + } +} + +# %extensions% + +scalar MyClientTypeModel @__RelayCustomScalar(path: "/path/to/test/fixture/weak-type.js", export_name: "MyClientType") + +type MyClientType @__RelayResolverModel @RelayOutputType @__RelayWeakObject { + __relay_model_instance: MyClientTypeModel +} + +extend type Query { + client_type: MyClientType @relay_resolver( + import_path: "./path/to/ClientTypeResolver.js" + has_output_type: true + ) +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-weak-object-plural.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-weak-object-plural.expected index 3f1bd3caf3563..8d57ad1132e2c 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-weak-object-plural.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-weak-object-plural.expected @@ -20,26 +20,6 @@ extend type Query { ) } ==================================== OUTPUT =================================== -{ - "kind": "SplitOperation", - "metadata": {}, - "name": "Query__client_types$normalization", - "selections": [ - { - "kind": "ClientExtension", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__relay_model_instance", - "storageKey": null - } - ] - } - ] -} - { "fragment": { "argumentDefinitions": [], @@ -52,18 +32,19 @@ extend type Query { { "kind": "ClientEdgeToClientObject", "concreteType": "MyClientType", + "modelResolvers": null, "backingField": { "alias": null, "args": null, "fragment": null, "kind": "RelayResolver", "name": "client_types", - "resolverModule": require('relay-runtime/experimental').weakObjectWrapper(require('ClientTypeResolver'), '__relay_model_instance', true), + "resolverModule": require('ClientTypeResolver'), "path": "client_types", "normalizationInfo": { + "kind": "WeakModel", "concreteType": "MyClientType", - "plural": true, - "normalizationNode": require('Query__client_types$normalization.graphql') + "plural": true } }, "linkedField": { diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-weak-object.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-weak-object.expected index b98eaadfd8cca..873248582a798 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-weak-object.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-weak-object.expected @@ -20,26 +20,6 @@ extend type Query { ) } ==================================== OUTPUT =================================== -{ - "kind": "SplitOperation", - "metadata": {}, - "name": "Query__client_type$normalization", - "selections": [ - { - "kind": "ClientExtension", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__relay_model_instance", - "storageKey": null - } - ] - } - ] -} - { "fragment": { "argumentDefinitions": [], @@ -52,18 +32,19 @@ extend type Query { { "kind": "ClientEdgeToClientObject", "concreteType": "MyClientType", + "modelResolvers": null, "backingField": { "alias": null, "args": null, "fragment": null, "kind": "RelayResolver", "name": "client_type", - "resolverModule": require('relay-runtime/experimental').weakObjectWrapper(require('ClientTypeResolver'), '__relay_model_instance', false), + "resolverModule": require('ClientTypeResolver'), "path": "client_type", "normalizationInfo": { + "kind": "WeakModel", "concreteType": "MyClientType", - "plural": false, - "normalizationNode": require('Query__client_type$normalization.graphql') + "plural": false } }, "linkedField": { diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-args-fragment-spread-using-undefined-global-variable.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-args-fragment-spread-using-undefined-global-variable.invalid.expected new file mode 100644 index 0000000000000..638d83516b3d8 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-args-fragment-spread-using-undefined-global-variable.invalid.expected @@ -0,0 +1,38 @@ +==================================== INPUT ==================================== +# expected-to-throw + +fragment relayResolverWithArgsFragmentSpreadUsingUndefinedGlobalVariable_PopStarNameResolverFragment_name on ClientType @argumentDefinitions(scale: {type: "Float!"}) { + name + profile_picture(scale: $scale) +} + +query relayResolverWithArgsFragmentSpreadUsingUndefinedGlobalVariable_Query { + ...relayResolverWithArgsFragmentSpreadUsingUndefinedGlobalVariable_Fragment +} + +fragment relayResolverWithArgsFragmentSpreadUsingUndefinedGlobalVariable_Fragment on Query { + pop_star { + pop_star_name(scale: $scale) + } +} + + +# %extensions% + +type ClientType { + name: String + profile_picture(scale: Float!): String + pop_star_name(scale: Float!): String @relay_resolver(fragment_name: "relayResolverWithArgsFragmentSpreadUsingUndefinedGlobalVariable_PopStarNameResolverFragment_name", import_path: "./path/to/PopStarNameResolver.js", , import_name: "pop_star_name") +} + +extend type Query { + pop_star: ClientType @relay_resolver(import_path: "./path/to/PopStarNameResolver.js", import_name: "pop_star") +} +==================================== ERROR ==================================== +✖︎ Operation 'relayResolverWithArgsFragmentSpreadUsingUndefinedGlobalVariable_Query' references undefined variable: '$scale'. + + relay-resolver-with-args-fragment-spread-using-undefined-global-variable.invalid.graphql:14:26 + 13 │ pop_star { + 14 │ pop_star_name(scale: $scale) + │ ^^^^^^ + 15 │ } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-args-fragment-spread-using-undefined-global-variable.invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-args-fragment-spread-using-undefined-global-variable.invalid.graphql new file mode 100644 index 0000000000000..f5091be68b3e0 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-args-fragment-spread-using-undefined-global-variable.invalid.graphql @@ -0,0 +1,29 @@ +# expected-to-throw + +fragment relayResolverWithArgsFragmentSpreadUsingUndefinedGlobalVariable_PopStarNameResolverFragment_name on ClientType @argumentDefinitions(scale: {type: "Float!"}) { + name + profile_picture(scale: $scale) +} + +query relayResolverWithArgsFragmentSpreadUsingUndefinedGlobalVariable_Query { + ...relayResolverWithArgsFragmentSpreadUsingUndefinedGlobalVariable_Fragment +} + +fragment relayResolverWithArgsFragmentSpreadUsingUndefinedGlobalVariable_Fragment on Query { + pop_star { + pop_star_name(scale: $scale) + } +} + + +# %extensions% + +type ClientType { + name: String + profile_picture(scale: Float!): String + pop_star_name(scale: Float!): String @relay_resolver(fragment_name: "relayResolverWithArgsFragmentSpreadUsingUndefinedGlobalVariable_PopStarNameResolverFragment_name", import_path: "./path/to/PopStarNameResolver.js", , import_name: "pop_star_name") +} + +extend type Query { + pop_star: ClientType @relay_resolver(import_path: "./path/to/PopStarNameResolver.js", import_name: "pop_star") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-args.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-args.expected index 0b360a193cb04..0fc2aacb52ce2 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-args.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-args.expected @@ -46,7 +46,7 @@ extend type User { "selections": [ { "alias": null, - "args": null, + "args": [], "fragment": { "args": [ { diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-output-type-client-object.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-output-type-client-object.expected index 9e73ca70c54de..dc7676486e1d1 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-output-type-client-object.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-output-type-client-object.expected @@ -110,6 +110,7 @@ extend type User { { "kind": "ClientEdgeToClientObject", "concreteType": "ClientUser", + "modelResolvers": null, "backingField": { "alias": "poppy", "args": null, @@ -123,6 +124,7 @@ extend type User { "resolverModule": require('PopStarNameResolver'), "path": "poppy", "normalizationInfo": { + "kind": "OutputType", "concreteType": "ClientUser", "plural": false, "normalizationNode": require('User__pop_star_name$normalization.graphql') diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-required-client-edge.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-required-client-edge.expected index ffc0f4c494e0a..7c12d598c5f14 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-required-client-edge.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-required-client-edge.expected @@ -292,7 +292,10 @@ fragment relayResolverWithRequiredClientEdge_best_friend_resolver on User { "node" ], "operation": require('ClientEdgeQuery_relayResolverWithRequiredClientEdgeQuery_me__best_friend.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_relayResolverWithRequiredClientEdgeQuery_me__best_friend", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-and-fragment-args.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-and-fragment-args.invalid.expected new file mode 100644 index 0000000000000..82cf473bb3b80 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-and-fragment-args.invalid.expected @@ -0,0 +1,40 @@ +==================================== INPUT ==================================== +# expected-to-throw + +query relayResolverWithUndefinedFieldAndFragmentArgsQuery { + node(id: "SOME_ID") { + ...relayResolverWithUndefinedFieldAndFragmentArgs_user + } +} + +fragment relayResolverWithUndefinedFieldAndFragmentArgs_user on User { + pop_star_name(field_arg: $undefined_field_arg, includeName: $undefined_fragment_arg) +} + +fragment relayResolverWithUndefinedFieldAndFragmentArgsFragment_name on User + @argumentDefinitions(includeName: {type: "Boolean!"}) { + __typename + name @include(if: $includeName) +} + +# %extensions% + +extend type User { + pop_star_name(field_arg: Int, includeName: Boolean!): String @relay_resolver(fragment_name: "relayResolverWithUndefinedFieldAndFragmentArgsFragment_name", import_path: "PopStarNameResolver") +} +==================================== ERROR ==================================== +✖︎ Operation 'relayResolverWithUndefinedFieldAndFragmentArgsQuery' references undefined variables: '$undefined_field_arg', '$undefined_fragment_arg'. + + relay-resolver-with-undefined-field-and-fragment-args.invalid.graphql:10:28 + 9 │ fragment relayResolverWithUndefinedFieldAndFragmentArgs_user on User { + 10 │ pop_star_name(field_arg: $undefined_field_arg, includeName: $undefined_fragment_arg) + │ ^^^^^^^^^^^^^^^^^^^^ + 11 │ } + + ℹ︎ related location + + relay-resolver-with-undefined-field-and-fragment-args.invalid.graphql:10:63 + 9 │ fragment relayResolverWithUndefinedFieldAndFragmentArgs_user on User { + 10 │ pop_star_name(field_arg: $undefined_field_arg, includeName: $undefined_fragment_arg) + │ ^^^^^^^^^^^^^^^^^^^^^^^ + 11 │ } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-and-fragment-args.invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-and-fragment-args.invalid.graphql new file mode 100644 index 0000000000000..d8f9e242cbffd --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-and-fragment-args.invalid.graphql @@ -0,0 +1,23 @@ +# expected-to-throw + +query relayResolverWithUndefinedFieldAndFragmentArgsQuery { + node(id: "SOME_ID") { + ...relayResolverWithUndefinedFieldAndFragmentArgs_user + } +} + +fragment relayResolverWithUndefinedFieldAndFragmentArgs_user on User { + pop_star_name(field_arg: $undefined_field_arg, includeName: $undefined_fragment_arg) +} + +fragment relayResolverWithUndefinedFieldAndFragmentArgsFragment_name on User + @argumentDefinitions(includeName: {type: "Boolean!"}) { + __typename + name @include(if: $includeName) +} + +# %extensions% + +extend type User { + pop_star_name(field_arg: Int, includeName: Boolean!): String @relay_resolver(fragment_name: "relayResolverWithUndefinedFieldAndFragmentArgsFragment_name", import_path: "PopStarNameResolver") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-linked-field.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-linked-field.invalid.expected new file mode 100644 index 0000000000000..763b1502dedb7 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-linked-field.invalid.expected @@ -0,0 +1,27 @@ +==================================== INPUT ==================================== +# expected-to-throw + +query relayResolverWithUndefinedFieldArgsLinkedFieldQuery { + node(id: "SOME_ID") { + ...relayResolverWithUndefinedFieldArgsLinkedField_PopStar + } +} + +fragment relayResolverWithUndefinedFieldArgsLinkedField_PopStar on User { + pop_star(name: $undefined) @waterfall { + id + } +} +# %extensions% + +extend type User { + pop_star(name: String): User @relay_resolver(import_path: "./path/to/PopStarResolver.js") +} +==================================== ERROR ==================================== +✖︎ Operation 'relayResolverWithUndefinedFieldArgsLinkedFieldQuery' references undefined variable: '$undefined'. + + relay-resolver-with-undefined-field-args-linked-field.invalid.graphql:10:18 + 9 │ fragment relayResolverWithUndefinedFieldArgsLinkedField_PopStar on User { + 10 │ pop_star(name: $undefined) @waterfall { + │ ^^^^^^^^^^ + 11 │ id diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-linked-field.invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-linked-field.invalid.graphql new file mode 100644 index 0000000000000..9fe8437b819ce --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-linked-field.invalid.graphql @@ -0,0 +1,18 @@ +# expected-to-throw + +query relayResolverWithUndefinedFieldArgsLinkedFieldQuery { + node(id: "SOME_ID") { + ...relayResolverWithUndefinedFieldArgsLinkedField_PopStar + } +} + +fragment relayResolverWithUndefinedFieldArgsLinkedField_PopStar on User { + pop_star(name: $undefined) @waterfall { + id + } +} +# %extensions% + +extend type User { + pop_star(name: String): User @relay_resolver(import_path: "./path/to/PopStarResolver.js") +} \ No newline at end of file diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-scalar.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-scalar.invalid.expected new file mode 100644 index 0000000000000..b5ea6b2957f6b --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-scalar.invalid.expected @@ -0,0 +1,26 @@ +==================================== INPUT ==================================== +# expected-to-throw + +query relayResolverWithUndefinedFieldArgsScalarQuery { + node(id: "SOME_ID") { + ...relayResolverWithUndefinedFieldArgsScalar_name + } +} + +fragment relayResolverWithUndefinedFieldArgsScalar_name on User { + pop_star_name(scale: $scale) +} + +# %extensions% + +extend type User { + pop_star_name(scale: Float!): String @relay_resolver(import_path: "./path/to/PopStarNameResolver.js") +} +==================================== ERROR ==================================== +✖︎ Operation 'relayResolverWithUndefinedFieldArgsScalarQuery' references undefined variable: '$scale'. + + relay-resolver-with-undefined-field-args-scalar.invalid.graphql:10:24 + 9 │ fragment relayResolverWithUndefinedFieldArgsScalar_name on User { + 10 │ pop_star_name(scale: $scale) + │ ^^^^^^ + 11 │ } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-scalar.invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-scalar.invalid.graphql new file mode 100644 index 0000000000000..5101c9c35f05e --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-scalar.invalid.graphql @@ -0,0 +1,17 @@ +# expected-to-throw + +query relayResolverWithUndefinedFieldArgsScalarQuery { + node(id: "SOME_ID") { + ...relayResolverWithUndefinedFieldArgsScalar_name + } +} + +fragment relayResolverWithUndefinedFieldArgsScalar_name on User { + pop_star_name(scale: $scale) +} + +# %extensions% + +extend type User { + pop_star_name(scale: Float!): String @relay_resolver(import_path: "./path/to/PopStarNameResolver.js") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args.invalid.expected new file mode 100644 index 0000000000000..ab6d63a772d7a --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args.invalid.expected @@ -0,0 +1,30 @@ +==================================== INPUT ==================================== +# expected-to-throw + +query relayResolverWithUndefinedFieldArgsQuery { + node(id: "SOME_ID") { + ...relayResolverWithUndefinedFieldArgs_PopStarName + } +} + +fragment relayResolverWithUndefinedFieldArgs_PopStarName on User { + pop_star_name(scale: $scale) +} + +fragment relayResolverWithUndefinedFieldArgs_PopStarNameResolverFragment_name on User { + name +} + +# %extensions% + +extend type User { + pop_star_name(scale: Float!): String @relay_resolver(fragment_name: "relayResolverWithUndefinedFieldArgs_PopStarNameResolverFragment_name", import_path: "./path/to/PopStarNameResolver.js") +} +==================================== ERROR ==================================== +✖︎ Operation 'relayResolverWithUndefinedFieldArgsQuery' references undefined variable: '$scale'. + + relay-resolver-with-undefined-field-args.invalid.graphql:10:24 + 9 │ fragment relayResolverWithUndefinedFieldArgs_PopStarName on User { + 10 │ pop_star_name(scale: $scale) + │ ^^^^^^ + 11 │ } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args.invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args.invalid.graphql new file mode 100644 index 0000000000000..de59a5f8deecf --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args.invalid.graphql @@ -0,0 +1,21 @@ +# expected-to-throw + +query relayResolverWithUndefinedFieldArgsQuery { + node(id: "SOME_ID") { + ...relayResolverWithUndefinedFieldArgs_PopStarName + } +} + +fragment relayResolverWithUndefinedFieldArgs_PopStarName on User { + pop_star_name(scale: $scale) +} + +fragment relayResolverWithUndefinedFieldArgs_PopStarNameResolverFragment_name on User { + name +} + +# %extensions% + +extend type User { + pop_star_name(scale: Float!): String @relay_resolver(fragment_name: "relayResolverWithUndefinedFieldArgs_PopStarNameResolverFragment_name", import_path: "./path/to/PopStarNameResolver.js") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args-linked-field.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args-linked-field.invalid.expected new file mode 100644 index 0000000000000..a6213b748dd20 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args-linked-field.invalid.expected @@ -0,0 +1,35 @@ +==================================== INPUT ==================================== +# expected-to-throw + +query relayResolverWithUndefinedFragmentArgsLinkedFieldQuery { + node(id: "SOME_ID") { + ...relayResolverWithUndefinedFragmentArgsLinkedField_PopStarName + } +} + +fragment relayResolverWithUndefinedFragmentArgsLinkedField_PopStarName on User { + pop_star(includeName: $undefined) @waterfall { + id + } +} + +fragment relayResolverWithUndefinedFragmentArgsLinkedField_PopStarNameResolverFragment_name on User @argumentDefinitions( + includeName: {type: "Boolean!"} + ) { + name @include(if: $includeName) + id +} + +# %extensions% + +extend type User { + pop_star(includeName: Boolean!): User @relay_resolver(fragment_name: "relayResolverWithUndefinedFragmentArgsLinkedField_PopStarNameResolverFragment_name", import_path: "./path/to/PopStarNameResolver.js") +} +==================================== ERROR ==================================== +✖︎ Operation 'relayResolverWithUndefinedFragmentArgsLinkedFieldQuery' references undefined variable: '$undefined'. + + relay-resolver-with-undefined-fragment-args-linked-field.invalid.graphql:10:25 + 9 │ fragment relayResolverWithUndefinedFragmentArgsLinkedField_PopStarName on User { + 10 │ pop_star(includeName: $undefined) @waterfall { + │ ^^^^^^^^^^ + 11 │ id diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args-linked-field.invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args-linked-field.invalid.graphql new file mode 100644 index 0000000000000..d1385ab07e3df --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args-linked-field.invalid.graphql @@ -0,0 +1,26 @@ +# expected-to-throw + +query relayResolverWithUndefinedFragmentArgsLinkedFieldQuery { + node(id: "SOME_ID") { + ...relayResolverWithUndefinedFragmentArgsLinkedField_PopStarName + } +} + +fragment relayResolverWithUndefinedFragmentArgsLinkedField_PopStarName on User { + pop_star(includeName: $undefined) @waterfall { + id + } +} + +fragment relayResolverWithUndefinedFragmentArgsLinkedField_PopStarNameResolverFragment_name on User @argumentDefinitions( + includeName: {type: "Boolean!"} + ) { + name @include(if: $includeName) + id +} + +# %extensions% + +extend type User { + pop_star(includeName: Boolean!): User @relay_resolver(fragment_name: "relayResolverWithUndefinedFragmentArgsLinkedField_PopStarNameResolverFragment_name", import_path: "./path/to/PopStarNameResolver.js") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args.invalid.expected new file mode 100644 index 0000000000000..85f5eb1e420e5 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args.invalid.expected @@ -0,0 +1,33 @@ +==================================== INPUT ==================================== +# expected-to-throw + +query relayResolverWithUndefinedFragmentArgsQuery { + node(id: "SOME_ID") { + ...relayResolverWithUndefinedFragmentArgs_PopStarName + } +} + +fragment relayResolverWithUndefinedFragmentArgs_PopStarName on User { + pop_star_name(includeName: $undefined) +} + +fragment relayResolverWithUndefinedFragmentArgs_PopStarNameResolverFragment_name on User @argumentDefinitions( + includeName: {type: "Boolean!"} + ) { + name + id @include(if: $includeName) +} + +# %extensions% + +extend type User { + pop_star_name(includeName: Boolean!): String @relay_resolver(fragment_name: "relayResolverWithUndefinedFragmentArgs_PopStarNameResolverFragment_name", import_path: "./path/to/PopStarNameResolver.js") +} +==================================== ERROR ==================================== +✖︎ Operation 'relayResolverWithUndefinedFragmentArgsQuery' references undefined variable: '$undefined'. + + relay-resolver-with-undefined-fragment-args.invalid.graphql:10:30 + 9 │ fragment relayResolverWithUndefinedFragmentArgs_PopStarName on User { + 10 │ pop_star_name(includeName: $undefined) + │ ^^^^^^^^^^ + 11 │ } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args.invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args.invalid.graphql new file mode 100644 index 0000000000000..886df00e5b476 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args.invalid.graphql @@ -0,0 +1,24 @@ +# expected-to-throw + +query relayResolverWithUndefinedFragmentArgsQuery { + node(id: "SOME_ID") { + ...relayResolverWithUndefinedFragmentArgs_PopStarName + } +} + +fragment relayResolverWithUndefinedFragmentArgs_PopStarName on User { + pop_star_name(includeName: $undefined) +} + +fragment relayResolverWithUndefinedFragmentArgs_PopStarNameResolverFragment_name on User @argumentDefinitions( + includeName: {type: "Boolean!"} + ) { + name + id @include(if: $includeName) +} + +# %extensions% + +extend type User { + pop_star_name(includeName: Boolean!): String @relay_resolver(fragment_name: "relayResolverWithUndefinedFragmentArgs_PopStarNameResolverFragment_name", import_path: "./path/to/PopStarNameResolver.js") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/required-bubbles-to-client-edge.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/required-bubbles-to-client-edge.expected new file mode 100644 index 0000000000000..918639f115ae0 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/required-bubbles-to-client-edge.expected @@ -0,0 +1,69 @@ +==================================== INPUT ==================================== +fragment requiredBubblesToClientEdge_user on User { + best_friend { + # This will bubble nullability to the parent which is a client edge. + # Under the hood that adds a metadata directive to that node, which + # this test is designed to assert is allowed. + name @required(action: NONE) + } +} + +# %extensions% + +type ClientUser { + name: String +} + +extend type User { + best_friend: ClientUser @relay_resolver(import_path: "BestFriendResolver") +} +==================================== OUTPUT =================================== +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "requiredBubblesToClientEdge_user", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "ClientUser", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "best_friend", + "resolverModule": require('BestFriendResolver'), + "path": "best_friend" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "ClientUser", + "kind": "LinkedField", + "name": "best_friend", + "plural": false, + "selections": [ + { + "kind": "RequiredField", + "field": { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + "action": "NONE", + "path": "best_friend.name" + } + ], + "storageKey": null + } + } + ], + "type": "User", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/required-bubbles-to-client-edge.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/required-bubbles-to-client-edge.graphql new file mode 100644 index 0000000000000..39f2e15373915 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/required-bubbles-to-client-edge.graphql @@ -0,0 +1,18 @@ +fragment requiredBubblesToClientEdge_user on User { + best_friend { + # This will bubble nullability to the parent which is a client edge. + # Under the hood that adds a metadata directive to that node, which + # this test is designed to assert is allowed. + name @required(action: NONE) + } +} + +# %extensions% + +type ClientUser { + name: String +} + +extend type User { + best_friend: ClientUser @relay_resolver(import_path: "BestFriendResolver") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/required-bubbles-to-inline-aliased-fragment.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/required-bubbles-to-inline-aliased-fragment.expected new file mode 100644 index 0000000000000..f7ea97d05d96e --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/required-bubbles-to-inline-aliased-fragment.expected @@ -0,0 +1,97 @@ +==================================== INPUT ==================================== +query requiredBubblesToInlineAliasedFragmentQuery { + ... @alias(as: "myAlias") { + me @required(action: NONE) { + id + } + } +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "requiredBubblesToInlineAliasedFragmentQuery", + "selections": [ + { + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "kind": "RequiredField", + "field": { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + }, + "action": "NONE", + "path": "myAlias.me" + } + ], + "type": null, + "abstractKey": null + }, + "kind": "AliasedInlineFragmentSpread", + "name": "myAlias" + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "requiredBubblesToInlineAliasedFragmentQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "7d738e6cb20d93761e1572bd3825d7fa", + "id": null, + "metadata": {}, + "name": "requiredBubblesToInlineAliasedFragmentQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +query requiredBubblesToInlineAliasedFragmentQuery { + me { + id + } +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/required-bubbles-to-inline-aliased-fragment.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/required-bubbles-to-inline-aliased-fragment.graphql new file mode 100644 index 0000000000000..ac0f8be693d75 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/required-bubbles-to-inline-aliased-fragment.graphql @@ -0,0 +1,7 @@ +query requiredBubblesToInlineAliasedFragmentQuery { + ... @alias(as: "myAlias") { + me @required(action: NONE) { + id + } + } +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-field-with-all-fragment-args-omitted.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-field-with-all-fragment-args-omitted.expected new file mode 100644 index 0000000000000..88e2bcfe77dc9 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-field-with-all-fragment-args-omitted.expected @@ -0,0 +1,122 @@ +==================================== INPUT ==================================== +query resolverFieldWithAllFragmentArgsOmitted_Query { + greeting +} + +fragment resolverFieldWithAllFragmentArgsOmitted_Fragment on Query +@argumentDefinitions(task_number: {type: "Int"}) { + task(number: $task_number) { + __typename + } +} + +# %extensions% + +extend type Query { + greeting: String + @relay_resolver( + import_path: "./path/to/Resolver.js" + root_fragment: "resolverFieldWithAllFragmentArgsOmitted_Fragment" + ) +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "resolverFieldWithAllFragmentArgsOmitted_Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "greeting", + "resolverModule": require('Resolver'), + "path": "greeting" + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "resolverFieldWithAllFragmentArgsOmitted_Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "greeting", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ] + } + ] + }, + "params": { + "cacheID": "19967942cdd7d0f409f239fcaf48f375", + "id": null, + "metadata": {}, + "name": "resolverFieldWithAllFragmentArgsOmitted_Query", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. + +{ + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "task_number" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "resolverFieldWithAllFragmentArgsOmitted_Fragment", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "number", + "variableName": "task_number" + } + ], + "concreteType": "Task", + "kind": "LinkedField", + "name": "task", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-field-with-all-fragment-args-omitted.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-field-with-all-fragment-args-omitted.graphql new file mode 100644 index 0000000000000..279d6f17d82e2 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-field-with-all-fragment-args-omitted.graphql @@ -0,0 +1,20 @@ +query resolverFieldWithAllFragmentArgsOmitted_Query { + greeting +} + +fragment resolverFieldWithAllFragmentArgsOmitted_Fragment on Query +@argumentDefinitions(task_number: {type: "Int"}) { + task(number: $task_number) { + __typename + } +} + +# %extensions% + +extend type Query { + greeting: String + @relay_resolver( + import_path: "./path/to/Resolver.js" + root_fragment: "resolverFieldWithAllFragmentArgsOmitted_Fragment" + ) +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-field-with-all-runtime-args-omitted.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-field-with-all-runtime-args-omitted.expected new file mode 100644 index 0000000000000..c45e226ab0c5d --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-field-with-all-runtime-args-omitted.expected @@ -0,0 +1,71 @@ +==================================== INPUT ==================================== +query resolverFieldWithAllRuntimeArgsOmitted_Query { + greeting +} + +# %extensions% + +extend type Query { + greeting(salutation: String): String + @relay_resolver(import_path: "./path/to/Resolver.js") +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "resolverFieldWithAllRuntimeArgsOmitted_Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": [], + "fragment": null, + "kind": "RelayResolver", + "name": "greeting", + "resolverModule": require('Resolver'), + "path": "greeting" + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "resolverFieldWithAllRuntimeArgsOmitted_Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "greeting", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ] + } + ] + }, + "params": { + "cacheID": "92bc6f17961b64dccbe679004d425389", + "id": null, + "metadata": {}, + "name": "resolverFieldWithAllRuntimeArgsOmitted_Query", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-field-with-all-runtime-args-omitted.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-field-with-all-runtime-args-omitted.graphql new file mode 100644 index 0000000000000..4308b8a08c632 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-field-with-all-runtime-args-omitted.graphql @@ -0,0 +1,10 @@ +query resolverFieldWithAllRuntimeArgsOmitted_Query { + greeting +} + +# %extensions% + +extend type Query { + greeting(salutation: String): String + @relay_resolver(import_path: "./path/to/Resolver.js") +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-with-root-fragment-on-model-type.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-with-root-fragment-on-model-type.expected new file mode 100644 index 0000000000000..f4c2a4f38c0ce --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-with-root-fragment-on-model-type.expected @@ -0,0 +1,216 @@ +==================================== INPUT ==================================== +query resolverWithRootFragmentOnModelType_Query { + my_type { + my_field + } +} + +fragment resolverWithRootFragmentOnModelTypeRootFragment on MyType { + id +} + +# %extensions% + +type MyType @__RelayResolverModel { + id: ID! + __relay_model_instance: Int + @relay_resolver( + import_path: "/path/to/test/fixture/terse-relay-resolver-with-root-fragment-on-model.js" + fragment_name: "MyType__id" + inject_fragment_data: "id" + import_name: "MyType" + ) + @unselectable( + reason: "This field is intended only for Relay's internal use" + ) +} + +extend type MyType { + my_field: String + @relay_resolver( + import_path: "/path/to/test/fixture/terse-relay-resolver-with-root-fragment-on-model.js" + fragment_name: "resolverWithRootFragmentOnModelTypeRootFragment" + has_output_type: true + import_name: "my_field" + ) +} + +extend type Query { + my_type: MyType +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "resolverWithRootFragmentOnModelType_Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "MyType", + "kind": "LinkedField", + "name": "my_type", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "resolverWithRootFragmentOnModelTypeRootFragment" + }, + "kind": "RelayResolver", + "name": "my_field", + "resolverModule": require('terse-relay-resolver-with-root-fragment-on-model').my_field, + "path": "my_type.my_field" + } + ], + "storageKey": null + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "resolverWithRootFragmentOnModelType_Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "MyType", + "kind": "LinkedField", + "name": "my_type", + "plural": false, + "selections": [ + { + "name": "my_field", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "type": "MyType", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + } + ] + }, + "params": { + "cacheID": "6a469c8af9ccbc6879fcb39a2bff5b2e", + "id": null, + "metadata": {}, + "name": "resolverWithRootFragmentOnModelType_Query", + "operationKind": "query", + "text": null + } +} + +QUERY: + +Query Text is Empty. + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "MyType____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "MyType__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('MyType__id.graphql'), require('terse-relay-resolver-with-root-fragment-on-model').MyType, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "MyType", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "MyType__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "MyType", + "abstractKey": null +} + +{ + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "resolverWithRootFragmentOnModelTypeRootFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "MyType", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-with-root-fragment-on-model-type.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-with-root-fragment-on-model-type.graphql new file mode 100644 index 0000000000000..cc8b26e22c84e --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/resolver-with-root-fragment-on-model-type.graphql @@ -0,0 +1,39 @@ +query resolverWithRootFragmentOnModelType_Query { + my_type { + my_field + } +} + +fragment resolverWithRootFragmentOnModelTypeRootFragment on MyType { + id +} + +# %extensions% + +type MyType @__RelayResolverModel { + id: ID! + __relay_model_instance: Int + @relay_resolver( + import_path: "/path/to/test/fixture/terse-relay-resolver-with-root-fragment-on-model.js" + fragment_name: "MyType__id" + inject_fragment_data: "id" + import_name: "MyType" + ) + @unselectable( + reason: "This field is intended only for Relay's internal use" + ) +} + +extend type MyType { + my_field: String + @relay_resolver( + import_path: "/path/to/test/fixture/terse-relay-resolver-with-root-fragment-on-model.js" + fragment_name: "resolverWithRootFragmentOnModelTypeRootFragment" + has_output_type: true + import_name: "my_field" + ) +} + +extend type Query { + my_type: MyType +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types.expected index 784d1c48d1698..04e8466277317 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types.expected @@ -21,16 +21,16 @@ query selectionSetConflictConflictingListAndNonListTypesQuery { ==================================== ERROR ==================================== ✖︎ Field 'same_alias' is ambiguous because it references fields with different types: 'actors' with type '[Actor]' and 'actorCount' with type 'Int' - selection_set_conflict_conflicting_list_and_non_list_types.graphql:8:25 + selection_set_conflict_conflicting_list_and_non_list_types.graphql:8:13 7 │ ... on Story { 8 │ same_alias: actors { - │ ^^^^^^ + │ ^^^^^^^^^^ 9 │ id ℹ︎ the other field - selection_set_conflict_conflicting_list_and_non_list_types.graphql:13:25 + selection_set_conflict_conflicting_list_and_non_list_types.graphql:13:13 12 │ ... on PhotoStory { 13 │ same_alias: actorCount - │ ^^^^^^^^^^ + │ ^^^^^^^^^^ 14 │ } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types_opposite_order.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types_opposite_order.expected index ba20505c02487..c8fa80aac6054 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types_opposite_order.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types_opposite_order.expected @@ -21,16 +21,16 @@ query selectionSetConflictConflictingListAndNonListTypesOppositeOrderQuery { ==================================== ERROR ==================================== ✖︎ Field 'same_alias' is ambiguous because it references fields with different types: 'actorCount' with type 'Int' and 'actors' with type '[Actor]' - selection_set_conflict_conflicting_list_and_non_list_types_opposite_order.graphql:8:25 + selection_set_conflict_conflicting_list_and_non_list_types_opposite_order.graphql:8:13 7 │ ... on PhotoStory { 8 │ same_alias: actorCount - │ ^^^^^^^^^^ + │ ^^^^^^^^^^ 9 │ } ℹ︎ the other field - selection_set_conflict_conflicting_list_and_non_list_types_opposite_order.graphql:11:25 + selection_set_conflict_conflicting_list_and_non_list_types_opposite_order.graphql:11:13 10 │ ... on Story { 11 │ same_alias: actors { - │ ^^^^^^ + │ ^^^^^^^^^^ 12 │ id diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_1.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_1.expected index be19d3923866b..9c25de537f05c 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_1.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_1.expected @@ -5,7 +5,7 @@ query selectionSetConflictInconsistentStreamUsage1Query { ... on User { friends { ... on FriendsConnection { - edges @stream(label: "hdijf", initial_count: 1) { + edges @stream(label: "hdijf", initialCount: 1) { node { name } @@ -21,11 +21,11 @@ query selectionSetConflictInconsistentStreamUsage1Query { } } ==================================== ERROR ==================================== -✖︎ Field 'edges' is marked with @stream in one place, and not marked in another place. Please use alias to distinguish the 2 fields.' +✖︎ Field 'edges' is marked with @stream in one place, and not marked in another place. Please use an alias to distinguish the two fields. selection_set_conflict_inconsistent_stream_usage_1.graphql:7:11 6 │ ... on FriendsConnection { - 7 │ edges @stream(label: "hdijf", initial_count: 1) { + 7 │ edges @stream(label: "hdijf", initialCount: 1) { │ ^^^^^ 8 │ node { diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_1.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_1.graphql index 4e97124b440fd..ae506dfaf7429 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_1.graphql +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_1.graphql @@ -4,7 +4,7 @@ query selectionSetConflictInconsistentStreamUsage1Query { ... on User { friends { ... on FriendsConnection { - edges @stream(label: "hdijf", initial_count: 1) { + edges @stream(label: "hdijf", initialCount: 1) { node { name } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_2.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_2.expected index 29d689b89243b..5881787317707 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_2.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_2.expected @@ -5,13 +5,13 @@ query selectionSetConflictInconsistentStreamUsage2Query { ... on User { friends { ... on FriendsConnection { - edges @stream(label: "hdijf", initial_count: 1) { + edges @stream(label: "hdijf", initialCount: 1) { node { name } } } - edges @stream(label: "hkjdf", initial_count: 2) { + edges @stream(label: "hkjdf", initialCount: 2) { node { id } @@ -21,11 +21,11 @@ query selectionSetConflictInconsistentStreamUsage2Query { } } ==================================== ERROR ==================================== -✖︎ Field 'edges' is marked with @stream in multiple places. Please use an alias to distinguish them' +✖︎ Field 'edges' is marked with @stream in multiple places. Please use an alias to distinguish them. selection_set_conflict_inconsistent_stream_usage_2.graphql:7:11 6 │ ... on FriendsConnection { - 7 │ edges @stream(label: "hdijf", initial_count: 1) { + 7 │ edges @stream(label: "hdijf", initialCount: 1) { │ ^^^^^ 8 │ node { @@ -33,6 +33,6 @@ query selectionSetConflictInconsistentStreamUsage2Query { selection_set_conflict_inconsistent_stream_usage_2.graphql:13:9 12 │ } - 13 │ edges @stream(label: "hkjdf", initial_count: 2) { + 13 │ edges @stream(label: "hkjdf", initialCount: 2) { │ ^^^^^ 14 │ node { diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_2.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_2.graphql index e69ddcbf87db8..1042d77fa9bcf 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_2.graphql +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_2.graphql @@ -4,13 +4,13 @@ query selectionSetConflictInconsistentStreamUsage2Query { ... on User { friends { ... on FriendsConnection { - edges @stream(label: "hdijf", initial_count: 1) { + edges @stream(label: "hdijf", initialCount: 1) { node { name } } } - edges @stream(label: "hkjdf", initial_count: 2) { + edges @stream(label: "hkjdf", initialCount: 2) { node { id } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_multiple_conflicts.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_multiple_conflicts.expected index 7c38324a675dd..2317212c8e47c 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_multiple_conflicts.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_multiple_conflicts.expected @@ -15,35 +15,35 @@ query selectionSetConflictMultipleConflictsQuery { ==================================== ERROR ==================================== ✖︎ Field 'same_alias' is ambiguous because it references fields with different types: 'id' with type 'ID!' and 'profile_picture' with type 'Image' - selection_set_conflict_multiple_conflicts.graphql:4:17 + selection_set_conflict_multiple_conflicts.graphql:4:5 3 │ me { 4 │ same_alias: id - │ ^^ + │ ^^^^^^^^^^ 5 │ same_alias: name ℹ︎ the other field - selection_set_conflict_multiple_conflicts.graphql:6:17 + selection_set_conflict_multiple_conflicts.graphql:6:5 5 │ same_alias: name 6 │ same_alias: profile_picture { - │ ^^^^^^^^^^^^^^^ + │ ^^^^^^^^^^ 7 │ size: width ✖︎ Field 'same_alias' is ambiguous because it references fields with different types: 'id' with type 'ID!' and 'profile_picture' with type 'Image' - selection_set_conflict_multiple_conflicts.graphql:4:17 + selection_set_conflict_multiple_conflicts.graphql:4:5 3 │ me { 4 │ same_alias: id - │ ^^ + │ ^^^^^^^^^^ 5 │ same_alias: name ℹ︎ the other field - selection_set_conflict_multiple_conflicts.graphql:9:17 + selection_set_conflict_multiple_conflicts.graphql:9:5 8 │ } 9 │ same_alias: profile_picture { - │ ^^^^^^^^^^^^^^^ + │ ^^^^^^^^^^ 10 │ size: height diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges.expected index caf8639343dce..ceab8647d5a28 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges.expected @@ -3,7 +3,7 @@ query selectionSetConflictStreamOnNodesOrEdgesQuery { me { ... on User { friends { - edges @stream(label: "b", initial_count: 1) { + edges @stream(label: "b", initialCount: 1) { node { id } @@ -168,7 +168,7 @@ QUERY: query selectionSetConflictStreamOnNodesOrEdgesQuery { me { friends { - edges @stream(label: "selectionSetConflictStreamOnNodesOrEdgesQuery$stream$b", initial_count: 1) { + edges @stream(label: "selectionSetConflictStreamOnNodesOrEdgesQuery$stream$b", initialCount: 1) { node { id } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges.graphql index db0148987fcb8..e8902568b4a2d 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges.graphql +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges.graphql @@ -2,7 +2,7 @@ query selectionSetConflictStreamOnNodesOrEdgesQuery { me { ... on User { friends { - edges @stream(label: "b", initial_count: 1) { + edges @stream(label: "b", initialCount: 1) { node { id } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info.expected index da48ec2941e11..0a0c7a4be544c 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info.expected @@ -9,7 +9,7 @@ query selectionSetConflictStreamOnNodesOrEdgesWithoutDeferOnPageInfoQuery { hasNextPage } } - edges @stream(label: "b", initial_count: 1) { + edges @stream(label: "b", initialCount: 1) { node { id } @@ -213,7 +213,7 @@ query selectionSetConflictStreamOnNodesOrEdgesWithoutDeferOnPageInfoQuery { pageInfo { hasNextPage } - edges @stream(label: "selectionSetConflictStreamOnNodesOrEdgesWithoutDeferOnPageInfoQuery$stream$b", initial_count: 1) { + edges @stream(label: "selectionSetConflictStreamOnNodesOrEdgesWithoutDeferOnPageInfoQuery$stream$b", initialCount: 1) { node { id } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info.graphql index 5622f4c32e72a..2b932153c1396 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info.graphql +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info.graphql @@ -8,7 +8,7 @@ query selectionSetConflictStreamOnNodesOrEdgesWithoutDeferOnPageInfoQuery { hasNextPage } } - edges @stream(label: "b", initial_count: 1) { + edges @stream(label: "b", initialCount: 1) { node { id } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias.expected index 072e3d086aa5e..c9718e49c0623 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias.expected @@ -9,7 +9,7 @@ query selectionSetConflictStreamOnNodesOrEdgesWithoutDeferOnPageInfoAndPageInfoA hasNextPage } } - edges @stream(label: "b", initial_count: 1) { + edges @stream(label: "b", initialCount: 1) { node { id } @@ -213,7 +213,7 @@ query selectionSetConflictStreamOnNodesOrEdgesWithoutDeferOnPageInfoAndPageInfoA pagination: pageInfo { hasNextPage } - edges @stream(label: "selectionSetConflictStreamOnNodesOrEdgesWithoutDeferOnPageInfoAndPageInfoAliasQuery$stream$b", initial_count: 1) { + edges @stream(label: "selectionSetConflictStreamOnNodesOrEdgesWithoutDeferOnPageInfoAndPageInfoAliasQuery$stream$b", initialCount: 1) { node { id } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias.graphql index c956d3b86fd3b..52fb05a8580c7 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias.graphql +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias.graphql @@ -8,7 +8,7 @@ query selectionSetConflictStreamOnNodesOrEdgesWithoutDeferOnPageInfoAndPageInfoA hasNextPage } } - edges @stream(label: "b", initial_count: 1) { + edges @stream(label: "b", initialCount: 1) { node { id } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_valid_stream.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_valid_stream.expected index d2dc62828f103..e312c17dc2a5f 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_valid_stream.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_valid_stream.expected @@ -3,7 +3,7 @@ query selectionSetConflictValidStreamQuery { me { ... on User { friends { - edges @stream(label: "jkdhg", initial_count: 0) { + edges @stream(label: "jkdhg", initialCount: 0) { node { id } @@ -168,7 +168,7 @@ QUERY: query selectionSetConflictValidStreamQuery { me { friends { - edges @stream(label: "selectionSetConflictValidStreamQuery$stream$jkdhg", initial_count: 0) { + edges @stream(label: "selectionSetConflictValidStreamQuery$stream$jkdhg", initialCount: 0) { node { id } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_valid_stream.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_valid_stream.graphql index 414122e99e447..b1f17e144839e 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_valid_stream.graphql +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/selection_set_conflict_valid_stream.graphql @@ -2,7 +2,7 @@ query selectionSetConflictValidStreamQuery { me { ... on User { friends { - edges @stream(label: "jkdhg", initial_count: 0) { + edges @stream(label: "jkdhg", initialCount: 0) { node { id } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-and-handle.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-and-handle.expected index af5c86cbde306..39bf05c6bfaa7 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-and-handle.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-and-handle.expected @@ -7,7 +7,7 @@ query streamAndHandleQuery { fragment streamAndHandleFragment on Feedback { actors - @stream(label: "actors", if: true, initial_count: 0) + @stream(label: "actors", if: true, initialCount: 0) @__clientField(handle: "actors_handler") { name @__clientField(handle: "name_handler") } @@ -170,7 +170,7 @@ query streamAndHandleQuery { } fragment streamAndHandleFragment on Feedback { - actors @stream(label: "streamAndHandleFragment$stream$actors", if: true, initial_count: 0) { + actors @stream(label: "streamAndHandleFragment$stream$actors", if: true, initialCount: 0) { __typename name id diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-and-handle.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-and-handle.graphql index 0be41830dbe54..010ca4457f0cf 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-and-handle.graphql +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-and-handle.graphql @@ -6,7 +6,7 @@ query streamAndHandleQuery { fragment streamAndHandleFragment on Feedback { actors - @stream(label: "actors", if: true, initial_count: 0) + @stream(label: "actors", if: true, initialCount: 0) @__clientField(handle: "actors_handler") { name @__clientField(handle: "name_handler") } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-connection-conditional.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-connection-conditional.expected index 36f31d1241775..45f53b7d6a94e 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-connection-conditional.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-connection-conditional.expected @@ -360,7 +360,7 @@ query streamConnectionConditionalQuery( id ... on Story { comments(first: 10) { - edges @stream(label: "streamConnectionConditionalQuery$stream$NodeQuery_comments", if: $cond, initial_count: 0, use_customized_batch: $cond) { + edges @stream(label: "streamConnectionConditionalQuery$stream$NodeQuery_comments", if: $cond, initialCount: 0, useCustomizedBatch: $cond) { node { __typename id diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-connection.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-connection.expected index fccdf311a5906..1126866ef06df 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-connection.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream-connection.expected @@ -400,7 +400,7 @@ query streamConnection_NodeQuery( id ... on Story { comments(first: 10) { - edges @stream(label: "streamConnection_NodeQuery$stream$NodeQuery_comments", initial_count: 0) { + edges @stream(label: "streamConnection_NodeQuery$stream$NodeQuery_comments", initialCount: 0) { node { actor { __typename diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream_if_arguments.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream_if_arguments.expected index 2669b06cf9944..ec83f7cc0765b 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream_if_arguments.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream_if_arguments.expected @@ -19,27 +19,27 @@ fragment streamIfArgumentsFragment on User setToFalse: { type: "Boolean", defaultValue: false } ) { withDefaultTrue: tasks - @stream(initial_count: 1, if: $defaultsToTrue, label: "defaultTrue") { + @stream(initialCount: 1, if: $defaultsToTrue, label: "defaultTrue") { __typename } withDefaultFalse: tasks - @stream(initial_count: 1, if: $defaultsToFalse, label: "defaultFalse") { + @stream(initialCount: 1, if: $defaultsToFalse, label: "defaultFalse") { __typename } setToTrue: tasks - @stream(initial_count: 1, if: $setToTrue, label: "setToTrue") { + @stream(initialCount: 1, if: $setToTrue, label: "setToTrue") { __typename } setToFalse: tasks - @stream(initial_count: 1, if: $setToFalse, label: "setToFalse") { + @stream(initialCount: 1, if: $setToFalse, label: "setToFalse") { __typename } withValueFromQueryDirectly: tasks - @stream(initial_count: 1, if: $valueFromQuery, label: "fromQueryDirectly") { + @stream(initialCount: 1, if: $valueFromQuery, label: "fromQueryDirectly") { __typename } withValueFromQueryViaArgDef: tasks - @stream(initial_count: 1, if: $setToValue, label: "fromQueryViaArg") { + @stream(initialCount: 1, if: $setToValue, label: "fromQueryViaArg") { __typename } } @@ -283,22 +283,22 @@ query streamIfArgumentsQuery( } fragment streamIfArgumentsFragment_39RTKZ on User { - withDefaultTrue: tasks @stream(label: "streamIfArgumentsFragment$stream$defaultTrue", if: true, initial_count: 1) { + withDefaultTrue: tasks @stream(label: "streamIfArgumentsFragment$stream$defaultTrue", if: true, initialCount: 1) { __typename } withDefaultFalse: tasks { __typename } - setToTrue: tasks @stream(label: "streamIfArgumentsFragment$stream$setToTrue", if: true, initial_count: 1) { + setToTrue: tasks @stream(label: "streamIfArgumentsFragment$stream$setToTrue", if: true, initialCount: 1) { __typename } setToFalse: tasks { __typename } - withValueFromQueryDirectly: tasks @stream(label: "streamIfArgumentsFragment$stream$fromQueryDirectly", if: $valueFromQuery, initial_count: 1) { + withValueFromQueryDirectly: tasks @stream(label: "streamIfArgumentsFragment$stream$fromQueryDirectly", if: $valueFromQuery, initialCount: 1) { __typename } - withValueFromQueryViaArgDef: tasks @stream(label: "streamIfArgumentsFragment$stream$fromQueryViaArg", if: $valueFromQuery, initial_count: 1) { + withValueFromQueryViaArgDef: tasks @stream(label: "streamIfArgumentsFragment$stream$fromQueryViaArg", if: $valueFromQuery, initialCount: 1) { __typename } } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream_if_arguments.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream_if_arguments.graphql index 0ab8458f480f9..ffc1e2ae9280a 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream_if_arguments.graphql +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/stream_if_arguments.graphql @@ -18,27 +18,27 @@ fragment streamIfArgumentsFragment on User setToFalse: { type: "Boolean", defaultValue: false } ) { withDefaultTrue: tasks - @stream(initial_count: 1, if: $defaultsToTrue, label: "defaultTrue") { + @stream(initialCount: 1, if: $defaultsToTrue, label: "defaultTrue") { __typename } withDefaultFalse: tasks - @stream(initial_count: 1, if: $defaultsToFalse, label: "defaultFalse") { + @stream(initialCount: 1, if: $defaultsToFalse, label: "defaultFalse") { __typename } setToTrue: tasks - @stream(initial_count: 1, if: $setToTrue, label: "setToTrue") { + @stream(initialCount: 1, if: $setToTrue, label: "setToTrue") { __typename } setToFalse: tasks - @stream(initial_count: 1, if: $setToFalse, label: "setToFalse") { + @stream(initialCount: 1, if: $setToFalse, label: "setToFalse") { __typename } withValueFromQueryDirectly: tasks - @stream(initial_count: 1, if: $valueFromQuery, label: "fromQueryDirectly") { + @stream(initialCount: 1, if: $valueFromQuery, label: "fromQueryDirectly") { __typename } withValueFromQueryViaArgDef: tasks - @stream(initial_count: 1, if: $setToValue, label: "fromQueryViaArg") { + @stream(initialCount: 1, if: $setToValue, label: "fromQueryViaArg") { __typename } } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/validate-global-variables-undefined.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/validate-global-variables-undefined.invalid.expected new file mode 100644 index 0000000000000..1b211ccf6f12f --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/validate-global-variables-undefined.invalid.expected @@ -0,0 +1,22 @@ +==================================== INPUT ==================================== +# expected-to-throw +query validateGlobalVariablesUndefinedQuery { + me { + ...validateGlobalVariablesUndefined_user + } +} + +fragment validateGlobalVariablesUndefined_user on User { + id + ... @include(if: $condition) { + lastName + } +} +==================================== ERROR ==================================== +✖︎ Operation 'validateGlobalVariablesUndefinedQuery' references undefined variable: '$condition'. + + validate-global-variables-undefined.invalid.graphql:10:20 + 9 │ id + 10 │ ... @include(if: $condition) { + │ ^^^^^^^^^^ + 11 │ lastName diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/validate-global-variables-undefined.invalid.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/validate-global-variables-undefined.invalid.graphql new file mode 100644 index 0000000000000..e3dcc8efd83b3 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/fixtures/validate-global-variables-undefined.invalid.graphql @@ -0,0 +1,13 @@ +# expected-to-throw +query validateGlobalVariablesUndefinedQuery { + me { + ...validateGlobalVariablesUndefined_user + } +} + +fragment validateGlobalVariablesUndefined_user on User { + id + ... @include(if: $condition) { + lastName + } +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/mod.rs b/compiler/crates/relay-compiler/tests/compile_relay_artifacts/mod.rs deleted file mode 100644 index ddd7afaa88176..0000000000000 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts/mod.rs +++ /dev/null @@ -1,282 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::collections::HashMap; -use std::sync::Arc; - -use common::ConsoleLogger; -use common::FeatureFlag; -use common::FeatureFlags; -use common::NamedItem; -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build_ir_with_extra_features; -use graphql_ir::BuilderOptions; -use graphql_ir::FragmentDefinition; -use graphql_ir::FragmentDefinitionName; -use graphql_ir::FragmentVariablesSemantic; -use graphql_ir::OperationDefinition; -use graphql_ir::OperationDefinitionName; -use graphql_ir::Program; -use graphql_ir::RelayMode; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use graphql_text_printer::print_full_operation; -use intern::string_key::Intern; -use relay_codegen::build_request_params; -use relay_codegen::print_fragment; -use relay_codegen::print_operation; -use relay_codegen::print_request; -use relay_codegen::JsModuleFormat; -use relay_compiler::find_duplicates; -use relay_compiler::validate; -use relay_compiler::ConfigFileProject; -use relay_compiler::ProjectConfig; -use relay_config::NonNodeIdFieldsConfig; -use relay_config::SchemaConfig; -use relay_test_schema::get_test_schema; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::apply_transforms; -use relay_transforms::DIRECTIVE_SPLIT_OPERATION; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - if fixture.content.contains("%TODO%") { - if fixture.content.contains("expected-to-throw") { - return Err("TODO".to_string()); - } - return Ok("TODO".to_string()); - } - let hash_supported_argument_allowlist = vec!["UserNameRenderer".intern()]; - let no_inline_allowlist = vec![ - "autoFilledArgumentOnMatchPlainUserNameRenderer_name".intern(), - "autoFilledArgumentOnMatchMarkdownUserNameRenderer_name".intern(), - "fragmentWithMatchDirective_PlainUserNameRenderer_name".intern(), - "fragmentWithMatchDirective_MarkdownUserNameRenderer_name".intern(), - "matchFieldOverlapAcrossDocuments_MarkdownUserNameRenderer_name".intern(), - "matchOnChildOfPlural_PlainUserNameRenderer_name".intern(), - "matchOnChildOfPlural_MarkdownUserNameRenderer_name".intern(), - "moduleDeduping_frag".intern(), - "moduleInInlineFragment_MarkdownUserNameRenderer_name".intern(), - "moduleOverlapAcrossDocuments_MarkdownUserNameRenderer_name".intern(), - "moduleOverlapAcrossDocuments_PlainUserNameRenderer_name".intern(), - "moduleOverlapAcrossDocuments_MarkdownUserNameRenderer_name".intern(), - "moduleOverlapWithinDocument_MarkdownUserNameRenderer_name".intern(), - "moduleOverlapWithinDocument_PlainUserNameRenderer_name".intern(), - "moduleOverlapWithinDocument_MarkdownUserNameRenderer_name".intern(), - "moduleWithDefer_MarkdownUserNameRenderer_name".intern(), - "multipleModulesDifferentComponent_MarkdownUserNameRenderer_name".intern(), - "multipleModulesDifferentFragment_MarkdownUserNameRenderer_name".intern(), - "multipleModulesDifferentFragment_OtherMarkdownUserNameRenderer_name".intern(), - "multipleModulesSameSelections_MarkdownUserNameRenderer_name".intern(), - "multipleModulesWithKey_PlainUserNameRenderer_name".intern(), - "multipleModulesWithKey_MarkdownUserNameRenderer_name".intern(), - "multipleModulesWithoutKey_PlainUserNameRenderer_name".intern(), - "multipleModulesWithoutKey_MarkdownUserNameRenderer_name".intern(), - "noInlineFragmentAndModule_parent".intern(), - "queryWithAndWithoutModuleDirective_MarkdownUserNameRenderer_name".intern(), - "queryWithConditionalModule_MarkdownUserNameRenderer_name".intern(), - "queryWithMatchDirective_PlainUserNameRenderer_name".intern(), - "queryWithMatchDirective_MarkdownUserNameRenderer_name".intern(), - "queryWithMatchDirectiveNoInlineExperimental_PlainUserNameRenderer_name".intern(), - "queryWithMatchDirectiveNoInlineExperimental_MarkdownUserNameRenderer_name".intern(), - "queryWithMatchDirectiveWithExtraArgument_PlainUserNameRenderer_name".intern(), - "queryWithMatchDirectiveWithExtraArgument_MarkdownUserNameRenderer_name".intern(), - "queryWithMatchDirectiveWithTypename_PlainUserNameRenderer_name".intern(), - "queryWithMatchDirectiveWithTypename_MarkdownUserNameRenderer_name".intern(), - "queryWithModuleDirective_MarkdownUserNameRenderer_name".intern(), - "queryWithModuleDirectiveAndArguments_MarkdownUserNameRenderer_name".intern(), - "queryWithModuleDirectiveAndArguments_PlainUserNameRenderer_name".intern(), - "conflictingSelectionsWithNoInline_fragment".intern(), - "providedVariableNoInlineFragment".intern(), - "noInlineFragment_parent".intern(), - "noInlineAbstractFragment_parent".intern(), - "queryWithRelayClientComponentWithArgumentDefinitions_ClientComponentFragment".intern(), - "queryWithRelayClientComponent_ClientComponentFragment".intern(), - ]; - - let feature_flags = FeatureFlags { - enable_flight_transform: true, - hash_supported_argument: FeatureFlag::Limited { - allowlist: hash_supported_argument_allowlist.into_iter().collect(), - }, - // test SplitOperations that do not use @no-inline D28460294 - no_inline: FeatureFlag::Limited { - allowlist: no_inline_allowlist.into_iter().collect(), - }, - enable_relay_resolver_transform: true, - enable_3d_branch_arg_generation: true, - actor_change_support: FeatureFlag::Enabled, - text_artifacts: FeatureFlag::Disabled, - enable_client_edges: FeatureFlag::Enabled, - skip_printing_nulls: FeatureFlag::Disabled, - enable_fragment_aliases: FeatureFlag::Enabled, - compact_query_text: FeatureFlag::Disabled, - emit_normalization_nodes_for_client_edges: true, - relay_resolver_enable_output_type: FeatureFlag::Disabled, - }; - - let default_project_config = ProjectConfig { - name: "test".intern(), - feature_flags: Arc::new(feature_flags), - js_module_format: JsModuleFormat::Haste, - schema_config: SchemaConfig { - non_node_id_fields: Some(NonNodeIdFieldsConfig { - allowed_id_types: { - let mut mappings = HashMap::new(); - - mappings.insert("NonNode".intern(), "String".intern()); - - mappings - }, - }), - ..Default::default() - }, - ..Default::default() - }; - - // Adding %project_config section on top of the fixture will allow - // us to validate output changes with different configurations - let parts: Vec<_> = fixture.content.split("%project_config%").collect(); - let (project_config, other_parts) = match parts.as_slice() { - [fixture_content, project_config_str] => ( - { - let config_file_project: ConfigFileProject = - serde_json::from_str(project_config_str).unwrap(); - ProjectConfig { - schema_config: config_file_project.schema_config, - typegen_config: config_file_project.typegen_config, - module_import_config: config_file_project.module_import_config, - feature_flags: config_file_project - .feature_flags - .map_or(default_project_config.feature_flags, |flags| { - Arc::new(flags) - }), - js_module_format: config_file_project.js_module_format, - ..default_project_config - } - }, - fixture_content.split("%extensions%").collect::>(), - ), - [fixture_content] => ( - default_project_config, - fixture_content.split("%extensions%").collect::>(), - ), - _ => panic!("Invalid fixture input {}", fixture.content), - }; - - let (base, schema) = match other_parts.as_slice() { - [base, extensions] => (base, get_test_schema_with_extensions(extensions)), - [base] => (base, get_test_schema()), - _ => panic!("Invalid fixture input {}", fixture.content), - }; - - let ast = parse_executable(base, source_location) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - find_duplicates(&ast.definitions, &[]) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let ir_result = build_ir_with_extra_features( - &schema, - &ast.definitions, - &BuilderOptions { - allow_undefined_fragment_spreads: false, - fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, - relay_mode: Some(RelayMode), - default_anonymous_operation_name: None, - }, - ); - let ir = ir_result - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - let program = Program::from_definitions(Arc::clone(&schema), ir); - - validate(&program, &project_config, &None) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - // TODO pass base fragment names - let programs = apply_transforms( - &project_config, - Arc::new(program), - Default::default(), - Arc::new(ConsoleLogger), - None, - None, - ) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let mut operations: Vec<&std::sync::Arc> = - programs.normalization.operations().collect(); - operations.sort_by_key(|operation| operation.name.item.0); - let result = operations - .into_iter() - .map(|operation| { - if operation - .directives - .named(*DIRECTIVE_SPLIT_OPERATION) - .is_some() - { - let mut import_statements = Default::default(); - let operation = - print_operation(&schema, operation, &project_config, &mut import_statements); - format!("{}{}", import_statements, operation) - } else { - let name = operation.name.item.0; - let print_operation_node = programs - .operation_text - .operation(OperationDefinitionName(name)); - let text = print_operation_node.map_or_else( - || "Query Text is Empty.".to_string(), - |print_operation_node| { - print_full_operation( - &programs.operation_text, - print_operation_node, - Default::default(), - ) - }, - ); - - let reader_operation = programs - .reader - .operation(OperationDefinitionName(name)) - .expect("a reader fragment should be generated for this operation"); - let operation_fragment = FragmentDefinition { - name: reader_operation.name.map(|x| FragmentDefinitionName(x.0)), - variable_definitions: reader_operation.variable_definitions.clone(), - selections: reader_operation.selections.clone(), - used_global_variables: Default::default(), - directives: reader_operation.directives.clone(), - type_condition: reader_operation.type_, - }; - let mut import_statements = Default::default(); - let request_parameters = build_request_params(operation); - let request = print_request( - &schema, - operation, - &operation_fragment, - request_parameters, - &project_config, - &mut import_statements, - ); - format!("{}{}\n\nQUERY:\n\n{}", import_statements, request, text) - } - }) - .chain({ - let mut fragments: Vec<&std::sync::Arc> = - programs.reader.fragments().collect(); - fragments.sort_by_key(|fragment| fragment.name.item); - fragments.into_iter().map(|fragment| { - let mut import_statements = Default::default(); - let fragment = - print_fragment(&schema, fragment, &project_config, &mut import_statements); - format!("{}{}", import_statements, fragment) - }) - }) - .collect::>(); - Ok(result.join("\n\n")) -} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_test.rs b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_test.rs index 3ca16d1dfe73b..2bafb2bfb6fb7 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_test.rs +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<> */ mod compile_relay_artifacts; @@ -12,1738 +12,2004 @@ mod compile_relay_artifacts; use compile_relay_artifacts::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn abstract_type_refinement() { +#[tokio::test] +async fn abstract_type_refinement() { let input = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement.expected"); - test_fixture(transform_fixture, "abstract-type-refinement.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-type-refinement.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement.expected", input, expected).await; } -#[test] -fn abstract_type_refinement_dont_flatten_type_discriminator_fragment_spread() { +#[tokio::test] +async fn abstract_type_refinement_dont_flatten_type_discriminator_fragment_spread() { let input = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-fragment-spread.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-fragment-spread.expected"); - test_fixture(transform_fixture, "abstract-type-refinement-dont-flatten-type-discriminator-fragment-spread.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-fragment-spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-type-refinement-dont-flatten-type-discriminator-fragment-spread.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-fragment-spread.expected", input, expected).await; } -#[test] -fn abstract_type_refinement_dont_flatten_type_discriminator_fragment_spread_conditional() { +#[tokio::test] +async fn abstract_type_refinement_dont_flatten_type_discriminator_fragment_spread_conditional() { let input = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-fragment-spread-conditional.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-fragment-spread-conditional.expected"); - test_fixture(transform_fixture, "abstract-type-refinement-dont-flatten-type-discriminator-fragment-spread-conditional.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-fragment-spread-conditional.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-type-refinement-dont-flatten-type-discriminator-fragment-spread-conditional.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-fragment-spread-conditional.expected", input, expected).await; } -#[test] -fn abstract_type_refinement_dont_flatten_type_discriminator_inline_fragment() { +#[tokio::test] +async fn abstract_type_refinement_dont_flatten_type_discriminator_inline_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-inline-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-inline-fragment.expected"); - test_fixture(transform_fixture, "abstract-type-refinement-dont-flatten-type-discriminator-inline-fragment.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-type-refinement-dont-flatten-type-discriminator-inline-fragment.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-inline-fragment.expected", input, expected).await; } -#[test] -fn abstract_type_refinement_dont_flatten_type_discriminator_inline_fragment_conditional() { +#[tokio::test] +async fn abstract_type_refinement_dont_flatten_type_discriminator_inline_fragment_conditional() { let input = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-inline-fragment-conditional.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-inline-fragment-conditional.expected"); - test_fixture(transform_fixture, "abstract-type-refinement-dont-flatten-type-discriminator-inline-fragment-conditional.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-inline-fragment-conditional.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-type-refinement-dont-flatten-type-discriminator-inline-fragment-conditional.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-inline-fragment-conditional.expected", input, expected).await; } -#[test] -fn abstract_type_refinement_dont_flatten_type_discriminator_nested_fragment_spread() { +#[tokio::test] +async fn abstract_type_refinement_dont_flatten_type_discriminator_nested_fragment_spread() { let input = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread.expected"); - test_fixture(transform_fixture, "abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread.expected", input, expected).await; } -#[test] -fn abstract_type_refinement_dont_flatten_type_discriminator_nested_fragment_spread_within_inline_fragment() { +#[tokio::test] +async fn abstract_type_refinement_dont_flatten_type_discriminator_nested_fragment_spread_within_inline_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread-within-inline-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread-within-inline-fragment.expected"); - test_fixture(transform_fixture, "abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread-within-inline-fragment.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread-within-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread-within-inline-fragment.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread-within-inline-fragment.expected", input, expected).await; } -#[test] -fn abstract_type_refinement_dont_flatten_type_discriminator_nested_fragment_spread_within_inline_fragment_different_fields() { +#[tokio::test] +async fn abstract_type_refinement_dont_flatten_type_discriminator_nested_fragment_spread_within_inline_fragment_different_fields() { let input = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread-within-inline-fragment-different-fields.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread-within-inline-fragment-different-fields.expected"); - test_fixture(transform_fixture, "abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread-within-inline-fragment-different-fields.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread-within-inline-fragment-different-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread-within-inline-fragment-different-fields.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-fragment-spread-within-inline-fragment-different-fields.expected", input, expected).await; } -#[test] -fn abstract_type_refinement_dont_flatten_type_discriminator_nested_inline_fragment() { +#[tokio::test] +async fn abstract_type_refinement_dont_flatten_type_discriminator_nested_inline_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-inline-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-inline-fragment.expected"); - test_fixture(transform_fixture, "abstract-type-refinement-dont-flatten-type-discriminator-nested-inline-fragment.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-type-refinement-dont-flatten-type-discriminator-nested-inline-fragment.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-flatten-type-discriminator-nested-inline-fragment.expected", input, expected).await; } -#[test] -fn abstract_type_refinement_dont_skip_type_discriminator_fragment_spread() { +#[tokio::test] +async fn abstract_type_refinement_dont_skip_type_discriminator_fragment_spread() { let input = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-skip-type-discriminator-fragment-spread.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-skip-type-discriminator-fragment-spread.expected"); - test_fixture(transform_fixture, "abstract-type-refinement-dont-skip-type-discriminator-fragment-spread.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-skip-type-discriminator-fragment-spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-type-refinement-dont-skip-type-discriminator-fragment-spread.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-skip-type-discriminator-fragment-spread.expected", input, expected).await; } -#[test] -fn abstract_type_refinement_dont_skip_type_discriminator_inline_fragment() { +#[tokio::test] +async fn abstract_type_refinement_dont_skip_type_discriminator_inline_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-skip-type-discriminator-inline-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-skip-type-discriminator-inline-fragment.expected"); - test_fixture(transform_fixture, "abstract-type-refinement-dont-skip-type-discriminator-inline-fragment.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-skip-type-discriminator-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-type-refinement-dont-skip-type-discriminator-inline-fragment.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-skip-type-discriminator-inline-fragment.expected", input, expected).await; } -#[test] -fn abstract_type_refinement_dont_skip_type_discriminator_when_identical_selections() { +#[tokio::test] +async fn abstract_type_refinement_dont_skip_type_discriminator_when_identical_selections() { let input = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-skip-type-discriminator-when-identical-selections.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-dont-skip-type-discriminator-when-identical-selections.expected"); - test_fixture(transform_fixture, "abstract-type-refinement-dont-skip-type-discriminator-when-identical-selections.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-skip-type-discriminator-when-identical-selections.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-type-refinement-dont-skip-type-discriminator-when-identical-selections.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-dont-skip-type-discriminator-when-identical-selections.expected", input, expected).await; } -#[test] -fn abstract_type_refinement_no_unnecessary_type_discriminator_under_condition_incorrect() { +#[tokio::test] +async fn abstract_type_refinement_no_unnecessary_type_discriminator_under_condition_incorrect() { let input = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-no-unnecessary-type-discriminator-under-condition_incorrect.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/abstract-type-refinement-no-unnecessary-type-discriminator-under-condition_incorrect.expected"); - test_fixture(transform_fixture, "abstract-type-refinement-no-unnecessary-type-discriminator-under-condition_incorrect.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-no-unnecessary-type-discriminator-under-condition_incorrect.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-type-refinement-no-unnecessary-type-discriminator-under-condition_incorrect.graphql", "compile_relay_artifacts/fixtures/abstract-type-refinement-no-unnecessary-type-discriminator-under-condition_incorrect.expected", input, expected).await; } -#[test] -fn actor_change_simple_query() { +#[tokio::test] +async fn actor_change_simple_query() { let input = include_str!("compile_relay_artifacts/fixtures/actor-change-simple-query.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/actor-change-simple-query.expected"); - test_fixture(transform_fixture, "actor-change-simple-query.graphql", "compile_relay_artifacts/fixtures/actor-change-simple-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "actor-change-simple-query.graphql", "compile_relay_artifacts/fixtures/actor-change-simple-query.expected", input, expected).await; } -#[test] -fn alias_same_as_name() { +#[tokio::test] +async fn alias_same_as_name() { let input = include_str!("compile_relay_artifacts/fixtures/alias-same-as-name.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/alias-same-as-name.expected"); - test_fixture(transform_fixture, "alias-same-as-name.graphql", "compile_relay_artifacts/fixtures/alias-same-as-name.expected", input, expected); + test_fixture(transform_fixture, file!(), "alias-same-as-name.graphql", "compile_relay_artifacts/fixtures/alias-same-as-name.expected", input, expected).await; } -#[test] -fn append_edge() { +#[tokio::test] +async fn append_edge() { let input = include_str!("compile_relay_artifacts/fixtures/append-edge.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/append-edge.expected"); - test_fixture(transform_fixture, "append-edge.graphql", "compile_relay_artifacts/fixtures/append-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "append-edge.graphql", "compile_relay_artifacts/fixtures/append-edge.expected", input, expected).await; } -#[test] -fn append_edge_on_interface() { +#[tokio::test] +async fn append_edge_on_interface() { let input = include_str!("compile_relay_artifacts/fixtures/append-edge-on-interface.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/append-edge-on-interface.expected"); - test_fixture(transform_fixture, "append-edge-on-interface.graphql", "compile_relay_artifacts/fixtures/append-edge-on-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "append-edge-on-interface.graphql", "compile_relay_artifacts/fixtures/append-edge-on-interface.expected", input, expected).await; } -#[test] -fn append_node() { +#[tokio::test] +async fn append_node() { let input = include_str!("compile_relay_artifacts/fixtures/append-node.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/append-node.expected"); - test_fixture(transform_fixture, "append-node.graphql", "compile_relay_artifacts/fixtures/append-node.expected", input, expected); + test_fixture(transform_fixture, file!(), "append-node.graphql", "compile_relay_artifacts/fixtures/append-node.expected", input, expected).await; } -#[test] -fn append_node_literal_edge_type_name() { +#[tokio::test] +async fn append_node_literal_edge_type_name() { let input = include_str!("compile_relay_artifacts/fixtures/append-node-literal-edge-type-name.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/append-node-literal-edge-type-name.expected"); - test_fixture(transform_fixture, "append-node-literal-edge-type-name.graphql", "compile_relay_artifacts/fixtures/append-node-literal-edge-type-name.expected", input, expected); + test_fixture(transform_fixture, file!(), "append-node-literal-edge-type-name.graphql", "compile_relay_artifacts/fixtures/append-node-literal-edge-type-name.expected", input, expected).await; } -#[test] -fn auto_filled_argument_on_defer() { +#[tokio::test] +async fn append_node_literal_edge_type_name_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-invalid.expected"); + test_fixture(transform_fixture, file!(), "append-node-literal-edge-type-name-invalid.graphql", "compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn append_node_literal_edge_type_name_not_object_type() { + let input = include_str!("compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-not-object-type.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-not-object-type.expected"); + test_fixture(transform_fixture, file!(), "append-node-literal-edge-type-name-not-object-type.graphql", "compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-not-object-type.expected", input, expected).await; +} + +#[tokio::test] +async fn append_node_literal_edge_type_name_variable() { + let input = include_str!("compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-variable.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-variable.expected"); + test_fixture(transform_fixture, file!(), "append-node-literal-edge-type-name-variable.graphql", "compile_relay_artifacts/fixtures/append-node-literal-edge-type-name-variable.expected", input, expected).await; +} + +#[tokio::test] +async fn auto_filled_argument_on_defer() { let input = include_str!("compile_relay_artifacts/fixtures/auto-filled-argument-on-defer.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/auto-filled-argument-on-defer.expected"); - test_fixture(transform_fixture, "auto-filled-argument-on-defer.graphql", "compile_relay_artifacts/fixtures/auto-filled-argument-on-defer.expected", input, expected); + test_fixture(transform_fixture, file!(), "auto-filled-argument-on-defer.graphql", "compile_relay_artifacts/fixtures/auto-filled-argument-on-defer.expected", input, expected).await; } -#[test] -fn auto_filled_argument_on_match() { +#[tokio::test] +async fn auto_filled_argument_on_match() { let input = include_str!("compile_relay_artifacts/fixtures/auto-filled-argument-on-match.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/auto-filled-argument-on-match.expected"); - test_fixture(transform_fixture, "auto-filled-argument-on-match.graphql", "compile_relay_artifacts/fixtures/auto-filled-argument-on-match.expected", input, expected); + test_fixture(transform_fixture, file!(), "auto-filled-argument-on-match.graphql", "compile_relay_artifacts/fixtures/auto-filled-argument-on-match.expected", input, expected).await; } -#[test] -fn circular_inline_fragment_invalid() { +#[tokio::test] +async fn circular_fragment() { + let input = include_str!("compile_relay_artifacts/fixtures/circular-fragment.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/circular-fragment.expected"); + test_fixture(transform_fixture, file!(), "circular-fragment.graphql", "compile_relay_artifacts/fixtures/circular-fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn circular_inline_fragment_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/circular-inline-fragment.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/circular-inline-fragment.invalid.expected"); - test_fixture(transform_fixture, "circular-inline-fragment.invalid.graphql", "compile_relay_artifacts/fixtures/circular-inline-fragment.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "circular-inline-fragment.invalid.graphql", "compile_relay_artifacts/fixtures/circular-inline-fragment.invalid.expected", input, expected).await; } -#[test] -fn circular_no_inline_fragment() { +#[tokio::test] +async fn circular_no_inline_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/circular-no-inline-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/circular-no-inline-fragment.expected"); - test_fixture(transform_fixture, "circular-no-inline-fragment.graphql", "compile_relay_artifacts/fixtures/circular-no-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "circular-no-inline-fragment.graphql", "compile_relay_artifacts/fixtures/circular-no-inline-fragment.expected", input, expected).await; } -#[test] -fn client_conditions() { +#[tokio::test] +async fn client_conditions() { let input = include_str!("compile_relay_artifacts/fixtures/client-conditions.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client-conditions.expected"); - test_fixture(transform_fixture, "client-conditions.graphql", "compile_relay_artifacts/fixtures/client-conditions.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-conditions.graphql", "compile_relay_artifacts/fixtures/client-conditions.expected", input, expected).await; } -#[test] -fn client_edge_from_client_type_to_client_type() { +#[tokio::test] +async fn client_edge_from_client_type_to_client_type() { let input = include_str!("compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type.expected"); - test_fixture(transform_fixture, "client_edge_from_client_type_to_client_type.graphql", "compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type.expected", input, expected); + test_fixture(transform_fixture, file!(), "client_edge_from_client_type_to_client_type.graphql", "compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type.expected", input, expected).await; +} + +#[tokio::test] +async fn client_edge_from_client_type_to_client_type_terse() { + let input = include_str!("compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse.expected"); + test_fixture(transform_fixture, file!(), "client_edge_from_client_type_to_client_type_terse.graphql", "compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse.expected", input, expected).await; +} + +#[tokio::test] +async fn client_edge_from_client_type_to_client_type_terse_live() { + let input = include_str!("compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_live.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_live.expected"); + test_fixture(transform_fixture, file!(), "client_edge_from_client_type_to_client_type_terse_live.graphql", "compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_live.expected", input, expected).await; +} + +#[tokio::test] +async fn client_edge_from_client_type_to_client_type_terse_plural() { + let input = include_str!("compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_plural.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_plural.expected"); + test_fixture(transform_fixture, file!(), "client_edge_from_client_type_to_client_type_terse_plural.graphql", "compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_plural.expected", input, expected).await; } -#[test] -fn client_edge_from_server_type_to_client_type() { +#[tokio::test] +async fn client_edge_from_client_type_to_client_type_terse_scalar() { + let input = include_str!("compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_scalar.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_scalar.expected"); + test_fixture(transform_fixture, file!(), "client_edge_from_client_type_to_client_type_terse_scalar.graphql", "compile_relay_artifacts/fixtures/client_edge_from_client_type_to_client_type_terse_scalar.expected", input, expected).await; +} + +#[tokio::test] +async fn client_edge_from_server_type_to_client_type() { let input = include_str!("compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type.expected"); - test_fixture(transform_fixture, "client_edge_from_server_type_to_client_type.graphql", "compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type.expected", input, expected); + test_fixture(transform_fixture, file!(), "client_edge_from_server_type_to_client_type.graphql", "compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type.expected", input, expected).await; } -#[test] -fn client_edge_from_server_type_to_client_type_fragment_reads_client_field() { +#[tokio::test] +async fn client_edge_from_server_type_to_client_type_fragment_reads_client_field() { let input = include_str!("compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type_fragment_reads_client_field.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type_fragment_reads_client_field.expected"); - test_fixture(transform_fixture, "client_edge_from_server_type_to_client_type_fragment_reads_client_field.graphql", "compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type_fragment_reads_client_field.expected", input, expected); + test_fixture(transform_fixture, file!(), "client_edge_from_server_type_to_client_type_fragment_reads_client_field.graphql", "compile_relay_artifacts/fixtures/client_edge_from_server_type_to_client_type_fragment_reads_client_field.expected", input, expected).await; } -#[test] -fn client_fields_in_inline_fragments() { +#[tokio::test] +async fn client_fields_in_inline_fragments() { let input = include_str!("compile_relay_artifacts/fixtures/client-fields-in-inline-fragments.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client-fields-in-inline-fragments.expected"); - test_fixture(transform_fixture, "client-fields-in-inline-fragments.graphql", "compile_relay_artifacts/fixtures/client-fields-in-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields-in-inline-fragments.graphql", "compile_relay_artifacts/fixtures/client-fields-in-inline-fragments.expected", input, expected).await; } -#[test] -fn client_fields_of_client_type() { +#[tokio::test] +async fn client_fields_of_client_type() { let input = include_str!("compile_relay_artifacts/fixtures/client-fields-of-client-type.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client-fields-of-client-type.expected"); - test_fixture(transform_fixture, "client-fields-of-client-type.graphql", "compile_relay_artifacts/fixtures/client-fields-of-client-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields-of-client-type.graphql", "compile_relay_artifacts/fixtures/client-fields-of-client-type.expected", input, expected).await; } -#[test] -fn client_fields_on_roots() { +#[tokio::test] +async fn client_fields_on_roots() { let input = include_str!("compile_relay_artifacts/fixtures/client-fields-on-roots.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client-fields-on-roots.expected"); - test_fixture(transform_fixture, "client-fields-on-roots.graphql", "compile_relay_artifacts/fixtures/client-fields-on-roots.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields-on-roots.graphql", "compile_relay_artifacts/fixtures/client-fields-on-roots.expected", input, expected).await; } -#[test] -fn client_fields_only_invalid() { +#[tokio::test] +async fn client_fields_only_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/client_fields_only_invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client_fields_only_invalid.expected"); - test_fixture(transform_fixture, "client_fields_only_invalid.graphql", "compile_relay_artifacts/fixtures/client_fields_only_invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "client_fields_only_invalid.graphql", "compile_relay_artifacts/fixtures/client_fields_only_invalid.expected", input, expected).await; } -#[test] -fn client_fields_only_no_fragment_invalid() { +#[tokio::test] +async fn client_fields_only_no_fragment_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/client_fields_only_no_fragment_invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client_fields_only_no_fragment_invalid.expected"); - test_fixture(transform_fixture, "client_fields_only_no_fragment_invalid.graphql", "compile_relay_artifacts/fixtures/client_fields_only_no_fragment_invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "client_fields_only_no_fragment_invalid.graphql", "compile_relay_artifacts/fixtures/client_fields_only_no_fragment_invalid.expected", input, expected).await; } -#[test] -fn client_fields_overlapping_error_invalid() { +#[tokio::test] +async fn client_fields_overlapping_error_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/client-fields-overlapping-error.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client-fields-overlapping-error.invalid.expected"); - test_fixture(transform_fixture, "client-fields-overlapping-error.invalid.graphql", "compile_relay_artifacts/fixtures/client-fields-overlapping-error.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields-overlapping-error.invalid.graphql", "compile_relay_artifacts/fixtures/client-fields-overlapping-error.invalid.expected", input, expected).await; } -#[test] -fn client_fields_overlapping_with_nulls() { +#[tokio::test] +async fn client_fields_overlapping_with_nulls() { let input = include_str!("compile_relay_artifacts/fixtures/client-fields-overlapping-with-nulls.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client-fields-overlapping-with-nulls.expected"); - test_fixture(transform_fixture, "client-fields-overlapping-with-nulls.graphql", "compile_relay_artifacts/fixtures/client-fields-overlapping-with-nulls.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields-overlapping-with-nulls.graphql", "compile_relay_artifacts/fixtures/client-fields-overlapping-with-nulls.expected", input, expected).await; } -#[test] -fn client_fragment_spreads() { +#[tokio::test] +async fn client_fields_with_undefined_global_variables_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/client-fields-with-undefined-global-variables.invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/client-fields-with-undefined-global-variables.invalid.expected"); + test_fixture(transform_fixture, file!(), "client-fields-with-undefined-global-variables.invalid.graphql", "compile_relay_artifacts/fixtures/client-fields-with-undefined-global-variables.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn client_fragment_spreads() { let input = include_str!("compile_relay_artifacts/fixtures/client-fragment-spreads.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client-fragment-spreads.expected"); - test_fixture(transform_fixture, "client-fragment-spreads.graphql", "compile_relay_artifacts/fixtures/client-fragment-spreads.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fragment-spreads.graphql", "compile_relay_artifacts/fixtures/client-fragment-spreads.expected", input, expected).await; } -#[test] -fn client_fragment_spreads_in_query() { +#[tokio::test] +async fn client_fragment_spreads_in_query() { let input = include_str!("compile_relay_artifacts/fixtures/client-fragment-spreads-in-query.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client-fragment-spreads-in-query.expected"); - test_fixture(transform_fixture, "client-fragment-spreads-in-query.graphql", "compile_relay_artifacts/fixtures/client-fragment-spreads-in-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fragment-spreads-in-query.graphql", "compile_relay_artifacts/fixtures/client-fragment-spreads-in-query.expected", input, expected).await; } -#[test] -fn client_inline_fragments() { +#[tokio::test] +async fn client_inline_fragments() { let input = include_str!("compile_relay_artifacts/fixtures/client-inline-fragments.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client-inline-fragments.expected"); - test_fixture(transform_fixture, "client-inline-fragments.graphql", "compile_relay_artifacts/fixtures/client-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-inline-fragments.graphql", "compile_relay_artifacts/fixtures/client-inline-fragments.expected", input, expected).await; +} + +#[tokio::test] +async fn client_inline_fragments_duplicate() { + let input = include_str!("compile_relay_artifacts/fixtures/client-inline-fragments-duplicate.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/client-inline-fragments-duplicate.expected"); + test_fixture(transform_fixture, file!(), "client-inline-fragments-duplicate.graphql", "compile_relay_artifacts/fixtures/client-inline-fragments-duplicate.expected", input, expected).await; } -#[test] -fn client_inline_fragments_in_query() { +#[tokio::test] +async fn client_inline_fragments_in_query() { let input = include_str!("compile_relay_artifacts/fixtures/client-inline-fragments-in-query.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client-inline-fragments-in-query.expected"); - test_fixture(transform_fixture, "client-inline-fragments-in-query.graphql", "compile_relay_artifacts/fixtures/client-inline-fragments-in-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-inline-fragments-in-query.graphql", "compile_relay_artifacts/fixtures/client-inline-fragments-in-query.expected", input, expected).await; +} + +#[tokio::test] +async fn client_interfaces() { + let input = include_str!("compile_relay_artifacts/fixtures/client-interfaces.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/client-interfaces.expected"); + test_fixture(transform_fixture, file!(), "client-interfaces.graphql", "compile_relay_artifacts/fixtures/client-interfaces.expected", input, expected).await; } -#[test] -fn client_linked_fields() { +#[tokio::test] +async fn client_interfaces_implemented_wrong_type_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/client-interfaces-implemented-wrong-type.invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/client-interfaces-implemented-wrong-type.invalid.expected"); + test_fixture(transform_fixture, file!(), "client-interfaces-implemented-wrong-type.invalid.graphql", "compile_relay_artifacts/fixtures/client-interfaces-implemented-wrong-type.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn client_interfaces_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/client-interfaces.invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/client-interfaces.invalid.expected"); + test_fixture(transform_fixture, file!(), "client-interfaces.invalid.graphql", "compile_relay_artifacts/fixtures/client-interfaces.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn client_interfaces_no_inline() { + let input = include_str!("compile_relay_artifacts/fixtures/client-interfaces-no-inline.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/client-interfaces-no-inline.expected"); + test_fixture(transform_fixture, file!(), "client-interfaces-no-inline.graphql", "compile_relay_artifacts/fixtures/client-interfaces-no-inline.expected", input, expected).await; +} + +#[tokio::test] +async fn client_interfaces_no_inline_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/client-interfaces-no-inline.invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/client-interfaces-no-inline.invalid.expected"); + test_fixture(transform_fixture, file!(), "client-interfaces-no-inline.invalid.graphql", "compile_relay_artifacts/fixtures/client-interfaces-no-inline.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn client_linked_fields() { let input = include_str!("compile_relay_artifacts/fixtures/client-linked-fields.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client-linked-fields.expected"); - test_fixture(transform_fixture, "client-linked-fields.graphql", "compile_relay_artifacts/fixtures/client-linked-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-linked-fields.graphql", "compile_relay_artifacts/fixtures/client-linked-fields.expected", input, expected).await; } -#[test] -fn client_only_query() { +#[tokio::test] +async fn client_only_query() { let input = include_str!("compile_relay_artifacts/fixtures/client-only-query.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client-only-query.expected"); - test_fixture(transform_fixture, "client-only-query.graphql", "compile_relay_artifacts/fixtures/client-only-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-only-query.graphql", "compile_relay_artifacts/fixtures/client-only-query.expected", input, expected).await; } -#[test] -fn client_only_query_with_scalar_extension() { +#[tokio::test] +async fn client_only_query_with_scalar_extension() { let input = include_str!("compile_relay_artifacts/fixtures/client-only-query-with-scalar-extension.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client-only-query-with-scalar-extension.expected"); - test_fixture(transform_fixture, "client-only-query-with-scalar-extension.graphql", "compile_relay_artifacts/fixtures/client-only-query-with-scalar-extension.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-only-query-with-scalar-extension.graphql", "compile_relay_artifacts/fixtures/client-only-query-with-scalar-extension.expected", input, expected).await; } -#[test] -fn client_scalar_fields() { +#[tokio::test] +async fn client_scalar_fields() { let input = include_str!("compile_relay_artifacts/fixtures/client-scalar-fields.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/client-scalar-fields.expected"); - test_fixture(transform_fixture, "client-scalar-fields.graphql", "compile_relay_artifacts/fixtures/client-scalar-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-scalar-fields.graphql", "compile_relay_artifacts/fixtures/client-scalar-fields.expected", input, expected).await; } -#[test] -fn complex_arguments() { +#[tokio::test] +async fn complex_arguments() { let input = include_str!("compile_relay_artifacts/fixtures/complex-arguments.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/complex-arguments.expected"); - test_fixture(transform_fixture, "complex-arguments.graphql", "compile_relay_artifacts/fixtures/complex-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "complex-arguments.graphql", "compile_relay_artifacts/fixtures/complex-arguments.expected", input, expected).await; } -#[test] -fn complex_arguments_in_list() { +#[tokio::test] +async fn complex_arguments_in_list() { let input = include_str!("compile_relay_artifacts/fixtures/complex-arguments-in-list.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/complex-arguments-in-list.expected"); - test_fixture(transform_fixture, "complex-arguments-in-list.graphql", "compile_relay_artifacts/fixtures/complex-arguments-in-list.expected", input, expected); + test_fixture(transform_fixture, file!(), "complex-arguments-in-list.graphql", "compile_relay_artifacts/fixtures/complex-arguments-in-list.expected", input, expected).await; } -#[test] -fn complex_arguments_with_mutliple_variables() { +#[tokio::test] +async fn complex_arguments_with_mutliple_variables() { let input = include_str!("compile_relay_artifacts/fixtures/complex-arguments-with-mutliple-variables.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/complex-arguments-with-mutliple-variables.expected"); - test_fixture(transform_fixture, "complex-arguments-with-mutliple-variables.graphql", "compile_relay_artifacts/fixtures/complex-arguments-with-mutliple-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "complex-arguments-with-mutliple-variables.graphql", "compile_relay_artifacts/fixtures/complex-arguments-with-mutliple-variables.expected", input, expected).await; } -#[test] -fn conflicting_selections_with_actor_change_invalid() { +#[tokio::test] +async fn conflicting_selections_with_actor_change_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/conflicting-selections-with-actor-change.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/conflicting-selections-with-actor-change.invalid.expected"); - test_fixture(transform_fixture, "conflicting-selections-with-actor-change.invalid.graphql", "compile_relay_artifacts/fixtures/conflicting-selections-with-actor-change.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "conflicting-selections-with-actor-change.invalid.graphql", "compile_relay_artifacts/fixtures/conflicting-selections-with-actor-change.invalid.expected", input, expected).await; } -#[test] -fn conflicting_selections_with_defer_invalid() { +#[tokio::test] +async fn conflicting_selections_with_defer_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/conflicting-selections-with-defer.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/conflicting-selections-with-defer.invalid.expected"); - test_fixture(transform_fixture, "conflicting-selections-with-defer.invalid.graphql", "compile_relay_artifacts/fixtures/conflicting-selections-with-defer.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "conflicting-selections-with-defer.invalid.graphql", "compile_relay_artifacts/fixtures/conflicting-selections-with-defer.invalid.expected", input, expected).await; } -#[test] -fn conflicting_selections_with_no_inline_invalid() { +#[tokio::test] +async fn conflicting_selections_with_no_inline_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/conflicting-selections-with-no-inline.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/conflicting-selections-with-no-inline.invalid.expected"); - test_fixture(transform_fixture, "conflicting-selections-with-no-inline.invalid.graphql", "compile_relay_artifacts/fixtures/conflicting-selections-with-no-inline.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "conflicting-selections-with-no-inline.invalid.graphql", "compile_relay_artifacts/fixtures/conflicting-selections-with-no-inline.invalid.expected", input, expected).await; } -#[test] -fn connection() { +#[tokio::test] +async fn connection() { let input = include_str!("compile_relay_artifacts/fixtures/connection.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/connection.expected"); - test_fixture(transform_fixture, "connection.graphql", "compile_relay_artifacts/fixtures/connection.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection.graphql", "compile_relay_artifacts/fixtures/connection.expected", input, expected).await; } -#[test] -fn connection_with_aliased_edges_page_info() { +#[tokio::test] +async fn connection_name_matches_fragment_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/connection-name-matches-fragment.invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/connection-name-matches-fragment.invalid.expected"); + test_fixture(transform_fixture, file!(), "connection-name-matches-fragment.invalid.graphql", "compile_relay_artifacts/fixtures/connection-name-matches-fragment.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn connection_with_aliased_edges_page_info() { let input = include_str!("compile_relay_artifacts/fixtures/connection-with-aliased-edges-page_info.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/connection-with-aliased-edges-page_info.expected"); - test_fixture(transform_fixture, "connection-with-aliased-edges-page_info.graphql", "compile_relay_artifacts/fixtures/connection-with-aliased-edges-page_info.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-aliased-edges-page_info.graphql", "compile_relay_artifacts/fixtures/connection-with-aliased-edges-page_info.expected", input, expected).await; } -#[test] -fn connection_with_dynamic_key() { +#[tokio::test] +async fn connection_with_dynamic_key() { let input = include_str!("compile_relay_artifacts/fixtures/connection-with-dynamic-key.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/connection-with-dynamic-key.expected"); - test_fixture(transform_fixture, "connection-with-dynamic-key.graphql", "compile_relay_artifacts/fixtures/connection-with-dynamic-key.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-dynamic-key.graphql", "compile_relay_artifacts/fixtures/connection-with-dynamic-key.expected", input, expected).await; } -#[test] -fn connection_with_dynamic_key_missing_variable_definition_invalid() { +#[tokio::test] +async fn connection_with_dynamic_key_missing_variable_definition_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/connection-with-dynamic-key-missing-variable-definition.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/connection-with-dynamic-key-missing-variable-definition.invalid.expected"); - test_fixture(transform_fixture, "connection-with-dynamic-key-missing-variable-definition.invalid.graphql", "compile_relay_artifacts/fixtures/connection-with-dynamic-key-missing-variable-definition.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-dynamic-key-missing-variable-definition.invalid.graphql", "compile_relay_artifacts/fixtures/connection-with-dynamic-key-missing-variable-definition.invalid.expected", input, expected).await; } -#[test] -fn constant_variable_matches_constant_value() { +#[tokio::test] +async fn constant_variable_matches_constant_value() { let input = include_str!("compile_relay_artifacts/fixtures/constant_variable_matches_constant_value.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/constant_variable_matches_constant_value.expected"); - test_fixture(transform_fixture, "constant_variable_matches_constant_value.graphql", "compile_relay_artifacts/fixtures/constant_variable_matches_constant_value.expected", input, expected); + test_fixture(transform_fixture, file!(), "constant_variable_matches_constant_value.graphql", "compile_relay_artifacts/fixtures/constant_variable_matches_constant_value.expected", input, expected).await; } -#[test] -fn defer_if_arguments() { +#[tokio::test] +async fn defer_if_arguments() { let input = include_str!("compile_relay_artifacts/fixtures/defer_if_arguments.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/defer_if_arguments.expected"); - test_fixture(transform_fixture, "defer_if_arguments.graphql", "compile_relay_artifacts/fixtures/defer_if_arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "defer_if_arguments.graphql", "compile_relay_artifacts/fixtures/defer_if_arguments.expected", input, expected).await; } -#[test] -fn defer_multiple_fragments_same_parent() { +#[tokio::test] +async fn defer_multiple_fragments_same_parent() { let input = include_str!("compile_relay_artifacts/fixtures/defer-multiple-fragments-same-parent.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/defer-multiple-fragments-same-parent.expected"); - test_fixture(transform_fixture, "defer-multiple-fragments-same-parent.graphql", "compile_relay_artifacts/fixtures/defer-multiple-fragments-same-parent.expected", input, expected); + test_fixture(transform_fixture, file!(), "defer-multiple-fragments-same-parent.graphql", "compile_relay_artifacts/fixtures/defer-multiple-fragments-same-parent.expected", input, expected).await; } -#[test] -fn delete_edge() { +#[tokio::test] +async fn delete_edge() { let input = include_str!("compile_relay_artifacts/fixtures/delete-edge.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/delete-edge.expected"); - test_fixture(transform_fixture, "delete-edge.graphql", "compile_relay_artifacts/fixtures/delete-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "delete-edge.graphql", "compile_relay_artifacts/fixtures/delete-edge.expected", input, expected).await; } -#[test] -fn delete_edge_plural() { +#[tokio::test] +async fn delete_edge_plural() { let input = include_str!("compile_relay_artifacts/fixtures/delete-edge-plural.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/delete-edge-plural.expected"); - test_fixture(transform_fixture, "delete-edge-plural.graphql", "compile_relay_artifacts/fixtures/delete-edge-plural.expected", input, expected); + test_fixture(transform_fixture, file!(), "delete-edge-plural.graphql", "compile_relay_artifacts/fixtures/delete-edge-plural.expected", input, expected).await; } -#[test] -fn directive_with_conditions() { +#[tokio::test] +async fn directive_with_conditions() { let input = include_str!("compile_relay_artifacts/fixtures/directive_with_conditions.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/directive_with_conditions.expected"); - test_fixture(transform_fixture, "directive_with_conditions.graphql", "compile_relay_artifacts/fixtures/directive_with_conditions.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive_with_conditions.graphql", "compile_relay_artifacts/fixtures/directive_with_conditions.expected", input, expected).await; } -#[test] -fn duplicate_directive_invalid() { +#[tokio::test] +async fn duplicate_directive_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/duplicate-directive.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/duplicate-directive.invalid.expected"); - test_fixture(transform_fixture, "duplicate-directive.invalid.graphql", "compile_relay_artifacts/fixtures/duplicate-directive.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "duplicate-directive.invalid.graphql", "compile_relay_artifacts/fixtures/duplicate-directive.invalid.expected", input, expected).await; } -#[test] -fn duplicate_fragment_name() { +#[tokio::test] +async fn duplicate_fragment_name() { let input = include_str!("compile_relay_artifacts/fixtures/duplicate_fragment_name.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/duplicate_fragment_name.expected"); - test_fixture(transform_fixture, "duplicate_fragment_name.graphql", "compile_relay_artifacts/fixtures/duplicate_fragment_name.expected", input, expected); + test_fixture(transform_fixture, file!(), "duplicate_fragment_name.graphql", "compile_relay_artifacts/fixtures/duplicate_fragment_name.expected", input, expected).await; } -#[test] -fn duplicate_query_name() { +#[tokio::test] +async fn duplicate_query_name() { let input = include_str!("compile_relay_artifacts/fixtures/duplicate_query_name.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/duplicate_query_name.expected"); - test_fixture(transform_fixture, "duplicate_query_name.graphql", "compile_relay_artifacts/fixtures/duplicate_query_name.expected", input, expected); + test_fixture(transform_fixture, file!(), "duplicate_query_name.graphql", "compile_relay_artifacts/fixtures/duplicate_query_name.expected", input, expected).await; } -#[test] -fn explicit_null_argument() { +#[tokio::test] +async fn explicit_null_argument() { let input = include_str!("compile_relay_artifacts/fixtures/explicit-null-argument.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/explicit-null-argument.expected"); - test_fixture(transform_fixture, "explicit-null-argument.graphql", "compile_relay_artifacts/fixtures/explicit-null-argument.expected", input, expected); + test_fixture(transform_fixture, file!(), "explicit-null-argument.graphql", "compile_relay_artifacts/fixtures/explicit-null-argument.expected", input, expected).await; } -#[test] -fn explicit_null_default_value() { +#[tokio::test] +async fn explicit_null_default_value() { let input = include_str!("compile_relay_artifacts/fixtures/explicit-null-default-value.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/explicit-null-default-value.expected"); - test_fixture(transform_fixture, "explicit-null-default-value.graphql", "compile_relay_artifacts/fixtures/explicit-null-default-value.expected", input, expected); + test_fixture(transform_fixture, file!(), "explicit-null-default-value.graphql", "compile_relay_artifacts/fixtures/explicit-null-default-value.expected", input, expected).await; } -#[test] -fn false_positive_circular_fragment_reference_regression() { +#[tokio::test] +async fn false_positive_circular_fragment_reference_regression() { let input = include_str!("compile_relay_artifacts/fixtures/false-positive-circular-fragment-reference-regression.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/false-positive-circular-fragment-reference-regression.expected"); - test_fixture(transform_fixture, "false-positive-circular-fragment-reference-regression.graphql", "compile_relay_artifacts/fixtures/false-positive-circular-fragment-reference-regression.expected", input, expected); + test_fixture(transform_fixture, file!(), "false-positive-circular-fragment-reference-regression.graphql", "compile_relay_artifacts/fixtures/false-positive-circular-fragment-reference-regression.expected", input, expected).await; } -#[test] -fn fields_with_null_argument_values() { +#[tokio::test] +async fn fields_with_null_argument_values() { let input = include_str!("compile_relay_artifacts/fixtures/fields-with-null-argument-values.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fields-with-null-argument-values.expected"); - test_fixture(transform_fixture, "fields-with-null-argument-values.graphql", "compile_relay_artifacts/fixtures/fields-with-null-argument-values.expected", input, expected); -} - -#[test] -fn flight_props_transform() { - let input = include_str!("compile_relay_artifacts/fixtures/flight-props-transform.graphql"); - let expected = include_str!("compile_relay_artifacts/fixtures/flight-props-transform.expected"); - test_fixture(transform_fixture, "flight-props-transform.graphql", "compile_relay_artifacts/fixtures/flight-props-transform.expected", input, expected); + test_fixture(transform_fixture, file!(), "fields-with-null-argument-values.graphql", "compile_relay_artifacts/fixtures/fields-with-null-argument-values.expected", input, expected).await; } -#[test] -fn fragment_alias() { +#[tokio::test] +async fn fragment_alias() { let input = include_str!("compile_relay_artifacts/fixtures/fragment-alias.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fragment-alias.expected"); - test_fixture(transform_fixture, "fragment-alias.graphql", "compile_relay_artifacts/fixtures/fragment-alias.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-alias.graphql", "compile_relay_artifacts/fixtures/fragment-alias.expected", input, expected).await; } -#[test] -fn fragment_alias_on_inline_fragment_does_not_get_flattened() { +#[tokio::test] +async fn fragment_alias_on_inline_fragment_does_not_get_flattened() { let input = include_str!("compile_relay_artifacts/fixtures/fragment-alias-on-inline-fragment-does-not-get-flattened.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fragment-alias-on-inline-fragment-does-not-get-flattened.expected"); - test_fixture(transform_fixture, "fragment-alias-on-inline-fragment-does-not-get-flattened.graphql", "compile_relay_artifacts/fixtures/fragment-alias-on-inline-fragment-does-not-get-flattened.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-alias-on-inline-fragment-does-not-get-flattened.graphql", "compile_relay_artifacts/fixtures/fragment-alias-on-inline-fragment-does-not-get-flattened.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_arg_passed_to_resolver_rutime_arg() { + let input = include_str!("compile_relay_artifacts/fixtures/fragment-arg-passed-to-resolver-rutime-arg.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/fragment-arg-passed-to-resolver-rutime-arg.expected"); + test_fixture(transform_fixture, file!(), "fragment-arg-passed-to-resolver-rutime-arg.graphql", "compile_relay_artifacts/fixtures/fragment-arg-passed-to-resolver-rutime-arg.expected", input, expected).await; } -#[test] -fn fragment_on_node_interface() { +#[tokio::test] +async fn fragment_on_node_interface() { let input = include_str!("compile_relay_artifacts/fixtures/fragment-on-node-interface.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fragment-on-node-interface.expected"); - test_fixture(transform_fixture, "fragment-on-node-interface.graphql", "compile_relay_artifacts/fixtures/fragment-on-node-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-node-interface.graphql", "compile_relay_artifacts/fixtures/fragment-on-node-interface.expected", input, expected).await; } -#[test] -fn fragment_on_non_node_fetchable_type() { +#[tokio::test] +async fn fragment_on_non_node_fetchable_type() { let input = include_str!("compile_relay_artifacts/fixtures/fragment-on-non-node-fetchable-type.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fragment-on-non-node-fetchable-type.expected"); - test_fixture(transform_fixture, "fragment-on-non-node-fetchable-type.graphql", "compile_relay_artifacts/fixtures/fragment-on-non-node-fetchable-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-non-node-fetchable-type.graphql", "compile_relay_artifacts/fixtures/fragment-on-non-node-fetchable-type.expected", input, expected).await; } -#[test] -fn fragment_on_object_implementing_node_interface() { +#[tokio::test] +async fn fragment_on_object_implementing_node_interface() { let input = include_str!("compile_relay_artifacts/fixtures/fragment-on-object-implementing-node-interface.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fragment-on-object-implementing-node-interface.expected"); - test_fixture(transform_fixture, "fragment-on-object-implementing-node-interface.graphql", "compile_relay_artifacts/fixtures/fragment-on-object-implementing-node-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-object-implementing-node-interface.graphql", "compile_relay_artifacts/fixtures/fragment-on-object-implementing-node-interface.expected", input, expected).await; } -#[test] -fn fragment_on_query() { +#[tokio::test] +async fn fragment_on_query() { let input = include_str!("compile_relay_artifacts/fixtures/fragment-on-query.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fragment-on-query.expected"); - test_fixture(transform_fixture, "fragment-on-query.graphql", "compile_relay_artifacts/fixtures/fragment-on-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-query.graphql", "compile_relay_artifacts/fixtures/fragment-on-query.expected", input, expected).await; } -#[test] -fn fragment_on_query_with_cycle_invalid() { +#[tokio::test] +async fn fragment_on_query_with_cycle_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/fragment-on-query-with-cycle.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fragment-on-query-with-cycle.invalid.expected"); - test_fixture(transform_fixture, "fragment-on-query-with-cycle.invalid.graphql", "compile_relay_artifacts/fixtures/fragment-on-query-with-cycle.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-query-with-cycle.invalid.graphql", "compile_relay_artifacts/fixtures/fragment-on-query-with-cycle.invalid.expected", input, expected).await; } -#[test] -fn fragment_on_viewer() { +#[tokio::test] +async fn fragment_on_viewer() { let input = include_str!("compile_relay_artifacts/fixtures/fragment-on-viewer.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fragment-on-viewer.expected"); - test_fixture(transform_fixture, "fragment-on-viewer.graphql", "compile_relay_artifacts/fixtures/fragment-on-viewer.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-viewer.graphql", "compile_relay_artifacts/fixtures/fragment-on-viewer.expected", input, expected).await; } -#[test] -fn fragment_with_defer_arguments() { +#[tokio::test] +async fn fragment_with_defer_arguments() { let input = include_str!("compile_relay_artifacts/fixtures/fragment-with-defer-arguments.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fragment-with-defer-arguments.expected"); - test_fixture(transform_fixture, "fragment-with-defer-arguments.graphql", "compile_relay_artifacts/fixtures/fragment-with-defer-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-defer-arguments.graphql", "compile_relay_artifacts/fixtures/fragment-with-defer-arguments.expected", input, expected).await; } -#[test] -fn fragment_with_defer_arguments_without_label() { +#[tokio::test] +async fn fragment_with_defer_arguments_without_label() { let input = include_str!("compile_relay_artifacts/fixtures/fragment-with-defer-arguments-without-label.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fragment-with-defer-arguments-without-label.expected"); - test_fixture(transform_fixture, "fragment-with-defer-arguments-without-label.graphql", "compile_relay_artifacts/fixtures/fragment-with-defer-arguments-without-label.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-defer-arguments-without-label.graphql", "compile_relay_artifacts/fixtures/fragment-with-defer-arguments-without-label.expected", input, expected).await; } -#[test] -fn fragment_with_defer_in_stream() { +#[tokio::test] +async fn fragment_with_defer_in_stream() { let input = include_str!("compile_relay_artifacts/fixtures/fragment-with-defer-in-stream.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fragment-with-defer-in-stream.expected"); - test_fixture(transform_fixture, "fragment-with-defer-in-stream.graphql", "compile_relay_artifacts/fixtures/fragment-with-defer-in-stream.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-defer-in-stream.graphql", "compile_relay_artifacts/fixtures/fragment-with-defer-in-stream.expected", input, expected).await; } -#[test] -fn fragment_with_defer_on_abstract_type() { +#[tokio::test] +async fn fragment_with_defer_on_abstract_type() { let input = include_str!("compile_relay_artifacts/fixtures/fragment-with-defer-on-abstract-type.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fragment-with-defer-on-abstract-type.expected"); - test_fixture(transform_fixture, "fragment-with-defer-on-abstract-type.graphql", "compile_relay_artifacts/fixtures/fragment-with-defer-on-abstract-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-defer-on-abstract-type.graphql", "compile_relay_artifacts/fixtures/fragment-with-defer-on-abstract-type.expected", input, expected).await; } -#[test] -fn fragment_with_match_directive() { +#[tokio::test] +async fn fragment_with_match_directive() { let input = include_str!("compile_relay_artifacts/fixtures/fragment-with-match-directive.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fragment-with-match-directive.expected"); - test_fixture(transform_fixture, "fragment-with-match-directive.graphql", "compile_relay_artifacts/fixtures/fragment-with-match-directive.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-match-directive.graphql", "compile_relay_artifacts/fixtures/fragment-with-match-directive.expected", input, expected).await; } -#[test] -fn fragment_with_stream() { +#[tokio::test] +async fn fragment_with_stream() { let input = include_str!("compile_relay_artifacts/fixtures/fragment-with-stream.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/fragment-with-stream.expected"); - test_fixture(transform_fixture, "fragment-with-stream.graphql", "compile_relay_artifacts/fixtures/fragment-with-stream.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-stream.graphql", "compile_relay_artifacts/fixtures/fragment-with-stream.expected", input, expected).await; } -#[test] -fn id_as_alias_invalid() { +#[tokio::test] +async fn id_as_alias_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/id-as-alias.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/id-as-alias.invalid.expected"); - test_fixture(transform_fixture, "id-as-alias.invalid.graphql", "compile_relay_artifacts/fixtures/id-as-alias.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "id-as-alias.invalid.graphql", "compile_relay_artifacts/fixtures/id-as-alias.invalid.expected", input, expected).await; } -#[test] -fn incompatible_variable_usage_across_documents() { +#[tokio::test] +async fn incompatible_variable_usage_across_documents() { let input = include_str!("compile_relay_artifacts/fixtures/incompatible-variable-usage-across-documents.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/incompatible-variable-usage-across-documents.expected"); - test_fixture(transform_fixture, "incompatible-variable-usage-across-documents.graphql", "compile_relay_artifacts/fixtures/incompatible-variable-usage-across-documents.expected", input, expected); + test_fixture(transform_fixture, file!(), "incompatible-variable-usage-across-documents.graphql", "compile_relay_artifacts/fixtures/incompatible-variable-usage-across-documents.expected", input, expected).await; } -#[test] -fn inline_and_mask_are_incompatible_invalid() { +#[tokio::test] +async fn inline_and_mask_are_incompatible_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/inline-and-mask-are-incompatible.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/inline-and-mask-are-incompatible.invalid.expected"); - test_fixture(transform_fixture, "inline-and-mask-are-incompatible.invalid.graphql", "compile_relay_artifacts/fixtures/inline-and-mask-are-incompatible.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-and-mask-are-incompatible.invalid.graphql", "compile_relay_artifacts/fixtures/inline-and-mask-are-incompatible.invalid.expected", input, expected).await; } -#[test] -fn inline_data_fragment() { +#[tokio::test] +async fn inline_data_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/inline-data-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/inline-data-fragment.expected"); - test_fixture(transform_fixture, "inline-data-fragment.graphql", "compile_relay_artifacts/fixtures/inline-data-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-data-fragment.graphql", "compile_relay_artifacts/fixtures/inline-data-fragment.expected", input, expected).await; } -#[test] -fn inline_data_fragment_global_vars() { +#[tokio::test] +async fn inline_data_fragment_global_vars() { let input = include_str!("compile_relay_artifacts/fixtures/inline-data-fragment-global-vars.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/inline-data-fragment-global-vars.expected"); - test_fixture(transform_fixture, "inline-data-fragment-global-vars.graphql", "compile_relay_artifacts/fixtures/inline-data-fragment-global-vars.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-data-fragment-global-vars.graphql", "compile_relay_artifacts/fixtures/inline-data-fragment-global-vars.expected", input, expected).await; } -#[test] -fn inline_data_fragment_local_args() { +#[tokio::test] +async fn inline_data_fragment_local_args() { let input = include_str!("compile_relay_artifacts/fixtures/inline-data-fragment-local-args.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/inline-data-fragment-local-args.expected"); - test_fixture(transform_fixture, "inline-data-fragment-local-args.graphql", "compile_relay_artifacts/fixtures/inline-data-fragment-local-args.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-data-fragment-local-args.graphql", "compile_relay_artifacts/fixtures/inline-data-fragment-local-args.expected", input, expected).await; } -#[test] -fn inline_fragment_on_abstract_client_type_nested_in_resolver_client_edge() { +#[tokio::test] +async fn inline_fragment_on_abstract_client_type_nested_in_resolver_client_edge() { let input = include_str!("compile_relay_artifacts/fixtures/inline-fragment-on-abstract-client-type-nested-in-resolver-client-edge.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/inline-fragment-on-abstract-client-type-nested-in-resolver-client-edge.expected"); - test_fixture(transform_fixture, "inline-fragment-on-abstract-client-type-nested-in-resolver-client-edge.graphql", "compile_relay_artifacts/fixtures/inline-fragment-on-abstract-client-type-nested-in-resolver-client-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-on-abstract-client-type-nested-in-resolver-client-edge.graphql", "compile_relay_artifacts/fixtures/inline-fragment-on-abstract-client-type-nested-in-resolver-client-edge.expected", input, expected).await; } -#[test] -fn kitchen_sink() { +#[tokio::test] +async fn kitchen_sink() { let input = include_str!("compile_relay_artifacts/fixtures/kitchen-sink.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/kitchen-sink.expected"); - test_fixture(transform_fixture, "kitchen-sink.graphql", "compile_relay_artifacts/fixtures/kitchen-sink.expected", input, expected); + test_fixture(transform_fixture, file!(), "kitchen-sink.graphql", "compile_relay_artifacts/fixtures/kitchen-sink.expected", input, expected).await; } -#[test] -fn linked_handle_field() { +#[tokio::test] +async fn linked_handle_field() { let input = include_str!("compile_relay_artifacts/fixtures/linked-handle-field.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/linked-handle-field.expected"); - test_fixture(transform_fixture, "linked-handle-field.graphql", "compile_relay_artifacts/fixtures/linked-handle-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "linked-handle-field.graphql", "compile_relay_artifacts/fixtures/linked-handle-field.expected", input, expected).await; } -#[test] -fn match_field_overlap_across_documents() { +#[tokio::test] +async fn match_field_overlap_across_documents() { let input = include_str!("compile_relay_artifacts/fixtures/match-field-overlap-across-documents.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/match-field-overlap-across-documents.expected"); - test_fixture(transform_fixture, "match-field-overlap-across-documents.graphql", "compile_relay_artifacts/fixtures/match-field-overlap-across-documents.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-field-overlap-across-documents.graphql", "compile_relay_artifacts/fixtures/match-field-overlap-across-documents.expected", input, expected).await; } -#[test] -fn match_on_child_of_plural() { +#[tokio::test] +async fn match_on_child_of_plural() { let input = include_str!("compile_relay_artifacts/fixtures/match-on-child-of-plural.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/match-on-child-of-plural.expected"); - test_fixture(transform_fixture, "match-on-child-of-plural.graphql", "compile_relay_artifacts/fixtures/match-on-child-of-plural.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-on-child-of-plural.graphql", "compile_relay_artifacts/fixtures/match-on-child-of-plural.expected", input, expected).await; } -#[test] -fn match_with_invalid_key_invalid() { +#[tokio::test] +async fn match_with_invalid_key_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/match-with-invalid-key.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/match-with-invalid-key.invalid.expected"); - test_fixture(transform_fixture, "match-with-invalid-key.invalid.graphql", "compile_relay_artifacts/fixtures/match-with-invalid-key.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-with-invalid-key.invalid.graphql", "compile_relay_artifacts/fixtures/match-with-invalid-key.invalid.expected", input, expected).await; } -#[test] -fn missing_argument_on_field_invalid() { +#[tokio::test] +async fn missing_argument_on_field_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/missing-argument-on-field.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/missing-argument-on-field.invalid.expected"); - test_fixture(transform_fixture, "missing-argument-on-field.invalid.graphql", "compile_relay_artifacts/fixtures/missing-argument-on-field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "missing-argument-on-field.invalid.graphql", "compile_relay_artifacts/fixtures/missing-argument-on-field.invalid.expected", input, expected).await; } -#[test] -fn module_deduping() { +#[tokio::test] +async fn module_deduping() { let input = include_str!("compile_relay_artifacts/fixtures/module-deduping.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/module-deduping.expected"); - test_fixture(transform_fixture, "module-deduping.graphql", "compile_relay_artifacts/fixtures/module-deduping.expected", input, expected); + test_fixture(transform_fixture, file!(), "module-deduping.graphql", "compile_relay_artifacts/fixtures/module-deduping.expected", input, expected).await; } -#[test] -fn module_in_inline_fragment() { +#[tokio::test] +async fn module_in_inline_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/module-in-inline-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/module-in-inline-fragment.expected"); - test_fixture(transform_fixture, "module-in-inline-fragment.graphql", "compile_relay_artifacts/fixtures/module-in-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "module-in-inline-fragment.graphql", "compile_relay_artifacts/fixtures/module-in-inline-fragment.expected", input, expected).await; } -#[test] -fn module_overlap_across_documents() { +#[tokio::test] +async fn module_overlap_across_documents() { let input = include_str!("compile_relay_artifacts/fixtures/module-overlap-across-documents.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/module-overlap-across-documents.expected"); - test_fixture(transform_fixture, "module-overlap-across-documents.graphql", "compile_relay_artifacts/fixtures/module-overlap-across-documents.expected", input, expected); + test_fixture(transform_fixture, file!(), "module-overlap-across-documents.graphql", "compile_relay_artifacts/fixtures/module-overlap-across-documents.expected", input, expected).await; } -#[test] -fn module_overlap_within_document_invalid() { +#[tokio::test] +async fn module_overlap_within_document_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/module-overlap-within-document.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/module-overlap-within-document.invalid.expected"); - test_fixture(transform_fixture, "module-overlap-within-document.invalid.graphql", "compile_relay_artifacts/fixtures/module-overlap-within-document.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "module-overlap-within-document.invalid.graphql", "compile_relay_artifacts/fixtures/module-overlap-within-document.invalid.expected", input, expected).await; } -#[test] -fn module_with_defer() { +#[tokio::test] +async fn module_with_defer() { let input = include_str!("compile_relay_artifacts/fixtures/module-with-defer.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/module-with-defer.expected"); - test_fixture(transform_fixture, "module-with-defer.graphql", "compile_relay_artifacts/fixtures/module-with-defer.expected", input, expected); + test_fixture(transform_fixture, file!(), "module-with-defer.graphql", "compile_relay_artifacts/fixtures/module-with-defer.expected", input, expected).await; } -#[test] -fn multiple_client_edges() { +#[tokio::test] +async fn multiple_client_edges() { let input = include_str!("compile_relay_artifacts/fixtures/multiple-client-edges.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/multiple-client-edges.expected"); - test_fixture(transform_fixture, "multiple-client-edges.graphql", "compile_relay_artifacts/fixtures/multiple-client-edges.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple-client-edges.graphql", "compile_relay_artifacts/fixtures/multiple-client-edges.expected", input, expected).await; } -#[test] -fn multiple_conditions() { +#[tokio::test] +async fn multiple_conditions() { let input = include_str!("compile_relay_artifacts/fixtures/multiple_conditions.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/multiple_conditions.expected"); - test_fixture(transform_fixture, "multiple_conditions.graphql", "compile_relay_artifacts/fixtures/multiple_conditions.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple_conditions.graphql", "compile_relay_artifacts/fixtures/multiple_conditions.expected", input, expected).await; } -#[test] -fn multiple_modules_different_component_invalid() { +#[tokio::test] +async fn multiple_modules_different_component_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/multiple-modules-different-component.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/multiple-modules-different-component.invalid.expected"); - test_fixture(transform_fixture, "multiple-modules-different-component.invalid.graphql", "compile_relay_artifacts/fixtures/multiple-modules-different-component.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple-modules-different-component.invalid.graphql", "compile_relay_artifacts/fixtures/multiple-modules-different-component.invalid.expected", input, expected).await; } -#[test] -fn multiple_modules_different_fragment_invalid() { +#[tokio::test] +async fn multiple_modules_different_fragment_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/multiple-modules-different-fragment.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/multiple-modules-different-fragment.invalid.expected"); - test_fixture(transform_fixture, "multiple-modules-different-fragment.invalid.graphql", "compile_relay_artifacts/fixtures/multiple-modules-different-fragment.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple-modules-different-fragment.invalid.graphql", "compile_relay_artifacts/fixtures/multiple-modules-different-fragment.invalid.expected", input, expected).await; } -#[test] -fn multiple_modules_same_selections() { +#[tokio::test] +async fn multiple_modules_same_selections() { let input = include_str!("compile_relay_artifacts/fixtures/multiple-modules-same-selections.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/multiple-modules-same-selections.expected"); - test_fixture(transform_fixture, "multiple-modules-same-selections.graphql", "compile_relay_artifacts/fixtures/multiple-modules-same-selections.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple-modules-same-selections.graphql", "compile_relay_artifacts/fixtures/multiple-modules-same-selections.expected", input, expected).await; } -#[test] -fn multiple_modules_with_key() { +#[tokio::test] +async fn multiple_modules_with_key() { let input = include_str!("compile_relay_artifacts/fixtures/multiple-modules-with-key.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/multiple-modules-with-key.expected"); - test_fixture(transform_fixture, "multiple-modules-with-key.graphql", "compile_relay_artifacts/fixtures/multiple-modules-with-key.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple-modules-with-key.graphql", "compile_relay_artifacts/fixtures/multiple-modules-with-key.expected", input, expected).await; } -#[test] -fn multiple_modules_without_key_invalid() { +#[tokio::test] +async fn multiple_modules_without_key_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/multiple-modules-without-key.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/multiple-modules-without-key.invalid.expected"); - test_fixture(transform_fixture, "multiple-modules-without-key.invalid.graphql", "compile_relay_artifacts/fixtures/multiple-modules-without-key.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple-modules-without-key.invalid.graphql", "compile_relay_artifacts/fixtures/multiple-modules-without-key.invalid.expected", input, expected).await; } -#[test] -fn nested_conditions() { +#[tokio::test] +async fn nested_conditions() { let input = include_str!("compile_relay_artifacts/fixtures/nested_conditions.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/nested_conditions.expected"); - test_fixture(transform_fixture, "nested_conditions.graphql", "compile_relay_artifacts/fixtures/nested_conditions.expected", input, expected); + test_fixture(transform_fixture, file!(), "nested_conditions.graphql", "compile_relay_artifacts/fixtures/nested_conditions.expected", input, expected).await; } -#[test] -fn nested_conditions_2() { +#[tokio::test] +async fn nested_conditions_2() { let input = include_str!("compile_relay_artifacts/fixtures/nested-conditions-2.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/nested-conditions-2.expected"); - test_fixture(transform_fixture, "nested-conditions-2.graphql", "compile_relay_artifacts/fixtures/nested-conditions-2.expected", input, expected); + test_fixture(transform_fixture, file!(), "nested-conditions-2.graphql", "compile_relay_artifacts/fixtures/nested-conditions-2.expected", input, expected).await; } -#[test] -fn no_inline_abstract_fragment() { +#[tokio::test] +async fn no_inline_abstract_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/no-inline-abstract-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/no-inline-abstract-fragment.expected"); - test_fixture(transform_fixture, "no-inline-abstract-fragment.graphql", "compile_relay_artifacts/fixtures/no-inline-abstract-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "no-inline-abstract-fragment.graphql", "compile_relay_artifacts/fixtures/no-inline-abstract-fragment.expected", input, expected).await; } -#[test] -fn no_inline_fragment() { +#[tokio::test] +async fn no_inline_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/no-inline-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/no-inline-fragment.expected"); - test_fixture(transform_fixture, "no-inline-fragment.graphql", "compile_relay_artifacts/fixtures/no-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "no-inline-fragment.graphql", "compile_relay_artifacts/fixtures/no-inline-fragment.expected", input, expected).await; } -#[test] -fn no_inline_fragment_and_module() { +#[tokio::test] +async fn no_inline_fragment_and_module() { let input = include_str!("compile_relay_artifacts/fixtures/no-inline-fragment-and-module.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/no-inline-fragment-and-module.expected"); - test_fixture(transform_fixture, "no-inline-fragment-and-module.graphql", "compile_relay_artifacts/fixtures/no-inline-fragment-and-module.expected", input, expected); + test_fixture(transform_fixture, file!(), "no-inline-fragment-and-module.graphql", "compile_relay_artifacts/fixtures/no-inline-fragment-and-module.expected", input, expected).await; } -#[test] -fn no_inline_fragment_in_raw_response_query() { +#[tokio::test] +async fn no_inline_fragment_in_raw_response_query() { let input = include_str!("compile_relay_artifacts/fixtures/no-inline-fragment-in-raw-response-query.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/no-inline-fragment-in-raw-response-query.expected"); - test_fixture(transform_fixture, "no-inline-fragment-in-raw-response-query.graphql", "compile_relay_artifacts/fixtures/no-inline-fragment-in-raw-response-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "no-inline-fragment-in-raw-response-query.graphql", "compile_relay_artifacts/fixtures/no-inline-fragment-in-raw-response-query.expected", input, expected).await; } -#[test] -fn original_client_fields_test() { +#[tokio::test] +async fn original_client_fields_test() { let input = include_str!("compile_relay_artifacts/fixtures/original-client-fields-test.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/original-client-fields-test.expected"); - test_fixture(transform_fixture, "original-client-fields-test.graphql", "compile_relay_artifacts/fixtures/original-client-fields-test.expected", input, expected); + test_fixture(transform_fixture, file!(), "original-client-fields-test.graphql", "compile_relay_artifacts/fixtures/original-client-fields-test.expected", input, expected).await; } -#[test] -fn plural_fragment() { +#[tokio::test] +async fn plural_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/plural-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/plural-fragment.expected"); - test_fixture(transform_fixture, "plural-fragment.graphql", "compile_relay_artifacts/fixtures/plural-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "plural-fragment.graphql", "compile_relay_artifacts/fixtures/plural-fragment.expected", input, expected).await; } -#[test] -fn prepend_node() { +#[tokio::test] +async fn prepend_node() { let input = include_str!("compile_relay_artifacts/fixtures/prepend-node.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/prepend-node.expected"); - test_fixture(transform_fixture, "prepend-node.graphql", "compile_relay_artifacts/fixtures/prepend-node.expected", input, expected); + test_fixture(transform_fixture, file!(), "prepend-node.graphql", "compile_relay_artifacts/fixtures/prepend-node.expected", input, expected).await; } -#[test] -fn provided_variable_directive() { +#[tokio::test] +async fn provided_variable_directive() { let input = include_str!("compile_relay_artifacts/fixtures/provided-variable-directive.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/provided-variable-directive.expected"); - test_fixture(transform_fixture, "provided-variable-directive.graphql", "compile_relay_artifacts/fixtures/provided-variable-directive.expected", input, expected); + test_fixture(transform_fixture, file!(), "provided-variable-directive.graphql", "compile_relay_artifacts/fixtures/provided-variable-directive.expected", input, expected).await; } -#[test] -fn provided_variable_in_fragment() { +#[tokio::test] +async fn provided_variable_in_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/provided-variable-in-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/provided-variable-in-fragment.expected"); - test_fixture(transform_fixture, "provided-variable-in-fragment.graphql", "compile_relay_artifacts/fixtures/provided-variable-in-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "provided-variable-in-fragment.graphql", "compile_relay_artifacts/fixtures/provided-variable-in-fragment.expected", input, expected).await; } -#[test] -fn provided_variable_multiple_queries() { +#[tokio::test] +async fn provided_variable_multiple_queries() { let input = include_str!("compile_relay_artifacts/fixtures/provided-variable-multiple-queries.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/provided-variable-multiple-queries.expected"); - test_fixture(transform_fixture, "provided-variable-multiple-queries.graphql", "compile_relay_artifacts/fixtures/provided-variable-multiple-queries.expected", input, expected); + test_fixture(transform_fixture, file!(), "provided-variable-multiple-queries.graphql", "compile_relay_artifacts/fixtures/provided-variable-multiple-queries.expected", input, expected).await; } -#[test] -fn provided_variable_nested_split_operation() { +#[tokio::test] +async fn provided_variable_nested_split_operation() { let input = include_str!("compile_relay_artifacts/fixtures/provided-variable-nested-split-operation.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/provided-variable-nested-split-operation.expected"); - test_fixture(transform_fixture, "provided-variable-nested-split-operation.graphql", "compile_relay_artifacts/fixtures/provided-variable-nested-split-operation.expected", input, expected); + test_fixture(transform_fixture, file!(), "provided-variable-nested-split-operation.graphql", "compile_relay_artifacts/fixtures/provided-variable-nested-split-operation.expected", input, expected).await; } -#[test] -fn provided_variable_no_inline_fragment() { +#[tokio::test] +async fn provided_variable_no_inline_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/provided-variable-no-inline-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/provided-variable-no-inline-fragment.expected"); - test_fixture(transform_fixture, "provided-variable-no-inline-fragment.graphql", "compile_relay_artifacts/fixtures/provided-variable-no-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "provided-variable-no-inline-fragment.graphql", "compile_relay_artifacts/fixtures/provided-variable-no-inline-fragment.expected", input, expected).await; } -#[test] -fn provided_variable_passed_in_argument_invalid() { +#[tokio::test] +async fn provided_variable_passed_in_argument_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/provided-variable-passed-in-argument-invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/provided-variable-passed-in-argument-invalid.expected"); - test_fixture(transform_fixture, "provided-variable-passed-in-argument-invalid.graphql", "compile_relay_artifacts/fixtures/provided-variable-passed-in-argument-invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "provided-variable-passed-in-argument-invalid.graphql", "compile_relay_artifacts/fixtures/provided-variable-passed-in-argument-invalid.expected", input, expected).await; } -#[test] -fn provided_variable_refetchable_fragment() { +#[tokio::test] +async fn provided_variable_passed_in_argument_refetchable_fragment_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/provided-variable-passed-in-argument-refetchable-fragment-invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/provided-variable-passed-in-argument-refetchable-fragment-invalid.expected"); + test_fixture(transform_fixture, file!(), "provided-variable-passed-in-argument-refetchable-fragment-invalid.graphql", "compile_relay_artifacts/fixtures/provided-variable-passed-in-argument-refetchable-fragment-invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn provided_variable_refetchable_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment.expected"); - test_fixture(transform_fixture, "provided-variable-refetchable-fragment.graphql", "compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "provided-variable-refetchable-fragment.graphql", "compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment.expected", input, expected).await; } -#[test] -fn provided_variable_reused_nested_fragment() { +#[tokio::test] +async fn provided_variable_refetchable_fragment_combination() { + let input = include_str!("compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment-combination.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment-combination.expected"); + test_fixture(transform_fixture, file!(), "provided-variable-refetchable-fragment-combination.graphql", "compile_relay_artifacts/fixtures/provided-variable-refetchable-fragment-combination.expected", input, expected).await; +} + +#[tokio::test] +async fn provided_variable_reused_nested_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/provided-variable-reused-nested-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/provided-variable-reused-nested-fragment.expected"); - test_fixture(transform_fixture, "provided-variable-reused-nested-fragment.graphql", "compile_relay_artifacts/fixtures/provided-variable-reused-nested-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "provided-variable-reused-nested-fragment.graphql", "compile_relay_artifacts/fixtures/provided-variable-reused-nested-fragment.expected", input, expected).await; } -#[test] -fn provided_variable_reused_nested_linked_fragment() { +#[tokio::test] +async fn provided_variable_reused_nested_linked_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/provided-variable-reused-nested-linked-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/provided-variable-reused-nested-linked-fragment.expected"); - test_fixture(transform_fixture, "provided-variable-reused-nested-linked-fragment.graphql", "compile_relay_artifacts/fixtures/provided-variable-reused-nested-linked-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "provided-variable-reused-nested-linked-fragment.graphql", "compile_relay_artifacts/fixtures/provided-variable-reused-nested-linked-fragment.expected", input, expected).await; } -#[test] -fn provided_variable_split_operation() { +#[tokio::test] +async fn provided_variable_split_operation() { let input = include_str!("compile_relay_artifacts/fixtures/provided-variable-split-operation.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/provided-variable-split-operation.expected"); - test_fixture(transform_fixture, "provided-variable-split-operation.graphql", "compile_relay_artifacts/fixtures/provided-variable-split-operation.expected", input, expected); + test_fixture(transform_fixture, file!(), "provided-variable-split-operation.graphql", "compile_relay_artifacts/fixtures/provided-variable-split-operation.expected", input, expected).await; } -#[test] -fn query_with_and_without_module_directive() { +#[tokio::test] +async fn query_with_and_without_module_directive() { let input = include_str!("compile_relay_artifacts/fixtures/query-with-and-without-module-directive.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/query-with-and-without-module-directive.expected"); - test_fixture(transform_fixture, "query-with-and-without-module-directive.graphql", "compile_relay_artifacts/fixtures/query-with-and-without-module-directive.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-and-without-module-directive.graphql", "compile_relay_artifacts/fixtures/query-with-and-without-module-directive.expected", input, expected).await; } -#[test] -fn query_with_conditional_module() { +#[tokio::test] +async fn query_with_conditional_module() { let input = include_str!("compile_relay_artifacts/fixtures/query-with-conditional-module.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/query-with-conditional-module.expected"); - test_fixture(transform_fixture, "query-with-conditional-module.graphql", "compile_relay_artifacts/fixtures/query-with-conditional-module.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-conditional-module.graphql", "compile_relay_artifacts/fixtures/query-with-conditional-module.expected", input, expected).await; } -#[test] -fn query_with_fragment_variables() { +#[tokio::test] +async fn query_with_fragment_variables() { let input = include_str!("compile_relay_artifacts/fixtures/query-with-fragment-variables.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/query-with-fragment-variables.expected"); - test_fixture(transform_fixture, "query-with-fragment-variables.graphql", "compile_relay_artifacts/fixtures/query-with-fragment-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-fragment-variables.graphql", "compile_relay_artifacts/fixtures/query-with-fragment-variables.expected", input, expected).await; } -#[test] -fn query_with_match_directive() { +#[tokio::test] +async fn query_with_match_directive() { let input = include_str!("compile_relay_artifacts/fixtures/query-with-match-directive.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/query-with-match-directive.expected"); - test_fixture(transform_fixture, "query-with-match-directive.graphql", "compile_relay_artifacts/fixtures/query-with-match-directive.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-match-directive.graphql", "compile_relay_artifacts/fixtures/query-with-match-directive.expected", input, expected).await; } -#[test] -fn query_with_match_directive_no_inline_experimental() { +#[tokio::test] +async fn query_with_match_directive_no_inline_experimental() { let input = include_str!("compile_relay_artifacts/fixtures/query-with-match-directive-no-inline-experimental.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/query-with-match-directive-no-inline-experimental.expected"); - test_fixture(transform_fixture, "query-with-match-directive-no-inline-experimental.graphql", "compile_relay_artifacts/fixtures/query-with-match-directive-no-inline-experimental.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-match-directive-no-inline-experimental.graphql", "compile_relay_artifacts/fixtures/query-with-match-directive-no-inline-experimental.expected", input, expected).await; } -#[test] -fn query_with_match_directive_no_modules_invalid() { +#[tokio::test] +async fn query_with_match_directive_no_modules_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/query-with-match-directive-no-modules.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/query-with-match-directive-no-modules.invalid.expected"); - test_fixture(transform_fixture, "query-with-match-directive-no-modules.invalid.graphql", "compile_relay_artifacts/fixtures/query-with-match-directive-no-modules.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-match-directive-no-modules.invalid.graphql", "compile_relay_artifacts/fixtures/query-with-match-directive-no-modules.invalid.expected", input, expected).await; } -#[test] -fn query_with_match_directive_with_extra_argument() { +#[tokio::test] +async fn query_with_match_directive_with_extra_argument() { let input = include_str!("compile_relay_artifacts/fixtures/query-with-match-directive-with-extra-argument.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/query-with-match-directive-with-extra-argument.expected"); - test_fixture(transform_fixture, "query-with-match-directive-with-extra-argument.graphql", "compile_relay_artifacts/fixtures/query-with-match-directive-with-extra-argument.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-match-directive-with-extra-argument.graphql", "compile_relay_artifacts/fixtures/query-with-match-directive-with-extra-argument.expected", input, expected).await; } -#[test] -fn query_with_match_directive_with_typename() { +#[tokio::test] +async fn query_with_match_directive_with_typename() { let input = include_str!("compile_relay_artifacts/fixtures/query-with-match-directive-with-typename.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/query-with-match-directive-with-typename.expected"); - test_fixture(transform_fixture, "query-with-match-directive-with-typename.graphql", "compile_relay_artifacts/fixtures/query-with-match-directive-with-typename.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-match-directive-with-typename.graphql", "compile_relay_artifacts/fixtures/query-with-match-directive-with-typename.expected", input, expected).await; } -#[test] -fn query_with_module_directive() { +#[tokio::test] +async fn query_with_module_directive() { let input = include_str!("compile_relay_artifacts/fixtures/query-with-module-directive.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/query-with-module-directive.expected"); - test_fixture(transform_fixture, "query-with-module-directive.graphql", "compile_relay_artifacts/fixtures/query-with-module-directive.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-module-directive.graphql", "compile_relay_artifacts/fixtures/query-with-module-directive.expected", input, expected).await; } -#[test] -fn query_with_module_directive_and_arguments() { +#[tokio::test] +async fn query_with_module_directive_and_arguments() { let input = include_str!("compile_relay_artifacts/fixtures/query-with-module-directive-and-arguments.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/query-with-module-directive-and-arguments.expected"); - test_fixture(transform_fixture, "query-with-module-directive-and-arguments.graphql", "compile_relay_artifacts/fixtures/query-with-module-directive-and-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-module-directive-and-arguments.graphql", "compile_relay_artifacts/fixtures/query-with-module-directive-and-arguments.expected", input, expected).await; } -#[test] -fn query_with_module_directive_custom_import() { +#[tokio::test] +async fn query_with_module_directive_custom_import() { let input = include_str!("compile_relay_artifacts/fixtures/query-with-module-directive-custom-import.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/query-with-module-directive-custom-import.expected"); - test_fixture(transform_fixture, "query-with-module-directive-custom-import.graphql", "compile_relay_artifacts/fixtures/query-with-module-directive-custom-import.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-module-directive-custom-import.graphql", "compile_relay_artifacts/fixtures/query-with-module-directive-custom-import.expected", input, expected).await; } -#[test] -fn query_with_module_directive_jsresource_import() { +#[tokio::test] +async fn query_with_module_directive_jsresource_import() { let input = include_str!("compile_relay_artifacts/fixtures/query-with-module-directive-jsresource-import.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/query-with-module-directive-jsresource-import.expected"); - test_fixture(transform_fixture, "query-with-module-directive-jsresource-import.graphql", "compile_relay_artifacts/fixtures/query-with-module-directive-jsresource-import.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-module-directive-jsresource-import.graphql", "compile_relay_artifacts/fixtures/query-with-module-directive-jsresource-import.expected", input, expected).await; } -#[test] -fn query_with_raw_response_type_directive() { +#[tokio::test] +async fn query_with_raw_response_type_directive() { let input = include_str!("compile_relay_artifacts/fixtures/query-with-raw-response-type-directive.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/query-with-raw-response-type-directive.expected"); - test_fixture(transform_fixture, "query-with-raw-response-type-directive.graphql", "compile_relay_artifacts/fixtures/query-with-raw-response-type-directive.expected", input, expected); -} - -#[test] -fn query_with_relay_client_component() { - let input = include_str!("compile_relay_artifacts/fixtures/query-with-relay-client-component.graphql"); - let expected = include_str!("compile_relay_artifacts/fixtures/query-with-relay-client-component.expected"); - test_fixture(transform_fixture, "query-with-relay-client-component.graphql", "compile_relay_artifacts/fixtures/query-with-relay-client-component.expected", input, expected); -} - -#[test] -fn query_with_relay_client_component_with_argument_definitions() { - let input = include_str!("compile_relay_artifacts/fixtures/query-with-relay-client-component-with-argument-definitions.graphql"); - let expected = include_str!("compile_relay_artifacts/fixtures/query-with-relay-client-component-with-argument-definitions.expected"); - test_fixture(transform_fixture, "query-with-relay-client-component-with-argument-definitions.graphql", "compile_relay_artifacts/fixtures/query-with-relay-client-component-with-argument-definitions.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-raw-response-type-directive.graphql", "compile_relay_artifacts/fixtures/query-with-raw-response-type-directive.expected", input, expected).await; } -#[test] -fn redundant_selection_in_inline_fragments() { +#[tokio::test] +async fn redundant_selection_in_inline_fragments() { let input = include_str!("compile_relay_artifacts/fixtures/redundant-selection-in-inline-fragments.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/redundant-selection-in-inline-fragments.expected"); - test_fixture(transform_fixture, "redundant-selection-in-inline-fragments.graphql", "compile_relay_artifacts/fixtures/redundant-selection-in-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "redundant-selection-in-inline-fragments.graphql", "compile_relay_artifacts/fixtures/redundant-selection-in-inline-fragments.expected", input, expected).await; } -#[test] -fn refetchable_conflict_with_operation_invalid() { +#[tokio::test] +async fn refetchable_conflict_with_operation_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/refetchable_conflict_with_operation.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/refetchable_conflict_with_operation.invalid.expected"); - test_fixture(transform_fixture, "refetchable_conflict_with_operation.invalid.graphql", "compile_relay_artifacts/fixtures/refetchable_conflict_with_operation.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable_conflict_with_operation.invalid.graphql", "compile_relay_artifacts/fixtures/refetchable_conflict_with_operation.invalid.expected", input, expected).await; } -#[test] -fn refetchable_conflict_with_refetchable_invalid() { +#[tokio::test] +async fn refetchable_conflict_with_refetchable_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/refetchable_conflict_with_refetchable.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/refetchable_conflict_with_refetchable.invalid.expected"); - test_fixture(transform_fixture, "refetchable_conflict_with_refetchable.invalid.graphql", "compile_relay_artifacts/fixtures/refetchable_conflict_with_refetchable.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable_conflict_with_refetchable.invalid.graphql", "compile_relay_artifacts/fixtures/refetchable_conflict_with_refetchable.invalid.expected", input, expected).await; } -#[test] -fn refetchable_connection() { +#[tokio::test] +async fn refetchable_connection() { let input = include_str!("compile_relay_artifacts/fixtures/refetchable-connection.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/refetchable-connection.expected"); - test_fixture(transform_fixture, "refetchable-connection.graphql", "compile_relay_artifacts/fixtures/refetchable-connection.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-connection.graphql", "compile_relay_artifacts/fixtures/refetchable-connection.expected", input, expected).await; } -#[test] -fn refetchable_connection_custom_handler() { +#[tokio::test] +async fn refetchable_connection_custom_handler() { let input = include_str!("compile_relay_artifacts/fixtures/refetchable-connection-custom-handler.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/refetchable-connection-custom-handler.expected"); - test_fixture(transform_fixture, "refetchable-connection-custom-handler.graphql", "compile_relay_artifacts/fixtures/refetchable-connection-custom-handler.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-connection-custom-handler.graphql", "compile_relay_artifacts/fixtures/refetchable-connection-custom-handler.expected", input, expected).await; } -#[test] -fn refetchable_fragment_directives_invalid() { +#[tokio::test] +async fn refetchable_fragment_directives_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/refetchable_fragment_directives.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/refetchable_fragment_directives.invalid.expected"); - test_fixture(transform_fixture, "refetchable_fragment_directives.invalid.graphql", "compile_relay_artifacts/fixtures/refetchable_fragment_directives.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable_fragment_directives.invalid.graphql", "compile_relay_artifacts/fixtures/refetchable_fragment_directives.invalid.expected", input, expected).await; } -#[test] -fn refetchable_fragment_on_node_with_missing_id() { +#[tokio::test] +async fn refetchable_fragment_on_node_and_fetchable() { + let input = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable.expected"); + test_fixture(transform_fixture, file!(), "refetchable-fragment-on-node-and-fetchable.graphql", "compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable.expected", input, expected).await; +} + +#[tokio::test] +async fn refetchable_fragment_on_node_and_fetchable_arg() { + let input = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-arg.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-arg.expected"); + test_fixture(transform_fixture, file!(), "refetchable-fragment-on-node-and-fetchable-arg.graphql", "compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-arg.expected", input, expected).await; +} + +#[tokio::test] +async fn refetchable_fragment_on_node_and_fetchable_no_flag() { + let input = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-no-flag.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-no-flag.expected"); + test_fixture(transform_fixture, file!(), "refetchable-fragment-on-node-and-fetchable-no-flag.graphql", "compile_relay_artifacts/fixtures/refetchable-fragment-on-node-and-fetchable-no-flag.expected", input, expected).await; +} + +#[tokio::test] +async fn refetchable_fragment_on_node_with_missing_id() { let input = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-on-node-with-missing-id.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-on-node-with-missing-id.expected"); - test_fixture(transform_fixture, "refetchable-fragment-on-node-with-missing-id.graphql", "compile_relay_artifacts/fixtures/refetchable-fragment-on-node-with-missing-id.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-fragment-on-node-with-missing-id.graphql", "compile_relay_artifacts/fixtures/refetchable-fragment-on-node-with-missing-id.expected", input, expected).await; } -#[test] -fn refetchable_fragment_with_connection() { +#[tokio::test] +async fn refetchable_fragment_with_connection() { let input = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-with-connection.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-with-connection.expected"); - test_fixture(transform_fixture, "refetchable-fragment-with-connection.graphql", "compile_relay_artifacts/fixtures/refetchable-fragment-with-connection.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-fragment-with-connection.graphql", "compile_relay_artifacts/fixtures/refetchable-fragment-with-connection.expected", input, expected).await; } -#[test] -fn refetchable_fragment_with_connection_bidirectional() { +#[tokio::test] +async fn refetchable_fragment_with_connection_bidirectional() { let input = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-bidirectional.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-bidirectional.expected"); - test_fixture(transform_fixture, "refetchable-fragment-with-connection-bidirectional.graphql", "compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-bidirectional.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-fragment-with-connection-bidirectional.graphql", "compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-bidirectional.expected", input, expected).await; } -#[test] -fn refetchable_fragment_with_connection_es_modules() { +#[tokio::test] +async fn refetchable_fragment_with_connection_es_modules() { let input = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-es-modules.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-es-modules.expected"); - test_fixture(transform_fixture, "refetchable-fragment-with-connection-es-modules.graphql", "compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-es-modules.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-fragment-with-connection-es-modules.graphql", "compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-es-modules.expected", input, expected).await; } -#[test] -fn refetchable_fragment_with_connection_with_stream() { +#[tokio::test] +async fn refetchable_fragment_with_connection_with_stream() { let input = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-with-stream.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-with-stream.expected"); - test_fixture(transform_fixture, "refetchable-fragment-with-connection-with-stream.graphql", "compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-with-stream.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-fragment-with-connection-with-stream.graphql", "compile_relay_artifacts/fixtures/refetchable-fragment-with-connection-with-stream.expected", input, expected).await; } -#[test] -fn refetchable_with_arguments_conflicting_invalid() { +#[tokio::test] +async fn refetchable_with_arguments_conflicting_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/refetchable-with-arguments-conflicting.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/refetchable-with-arguments-conflicting.invalid.expected"); - test_fixture(transform_fixture, "refetchable-with-arguments-conflicting.invalid.graphql", "compile_relay_artifacts/fixtures/refetchable-with-arguments-conflicting.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-with-arguments-conflicting.invalid.graphql", "compile_relay_artifacts/fixtures/refetchable-with-arguments-conflicting.invalid.expected", input, expected).await; } -#[test] -fn refetchable_with_arguments_invalid() { +#[tokio::test] +async fn refetchable_with_arguments_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/refetchable-with-arguments.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/refetchable-with-arguments.invalid.expected"); - test_fixture(transform_fixture, "refetchable-with-arguments.invalid.graphql", "compile_relay_artifacts/fixtures/refetchable-with-arguments.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-with-arguments.invalid.graphql", "compile_relay_artifacts/fixtures/refetchable-with-arguments.invalid.expected", input, expected).await; } -#[test] -fn relay_client_id_field() { +#[tokio::test] +async fn relay_client_id_field() { let input = include_str!("compile_relay_artifacts/fixtures/relay-client-id-field.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-client-id-field.expected"); - test_fixture(transform_fixture, "relay-client-id-field.graphql", "compile_relay_artifacts/fixtures/relay-client-id-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-client-id-field.graphql", "compile_relay_artifacts/fixtures/relay-client-id-field.expected", input, expected).await; } -#[test] -fn relay_live_resolver() { +#[tokio::test] +async fn relay_live_resolver() { let input = include_str!("compile_relay_artifacts/fixtures/relay-live-resolver.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-live-resolver.expected"); - test_fixture(transform_fixture, "relay-live-resolver.graphql", "compile_relay_artifacts/fixtures/relay-live-resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-live-resolver.graphql", "compile_relay_artifacts/fixtures/relay-live-resolver.expected", input, expected).await; } -#[test] -fn relay_live_resolver_without_fragment() { +#[tokio::test] +async fn relay_live_resolver_without_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/relay-live-resolver-without-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-live-resolver-without-fragment.expected"); - test_fixture(transform_fixture, "relay-live-resolver-without-fragment.graphql", "compile_relay_artifacts/fixtures/relay-live-resolver-without-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-live-resolver-without-fragment.graphql", "compile_relay_artifacts/fixtures/relay-live-resolver-without-fragment.expected", input, expected).await; } -#[test] -fn relay_model_resolver() { +#[tokio::test] +async fn relay_model_resolver() { let input = include_str!("compile_relay_artifacts/fixtures/relay-model-resolver.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-model-resolver.expected"); - test_fixture(transform_fixture, "relay-model-resolver.graphql", "compile_relay_artifacts/fixtures/relay-model-resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-model-resolver.graphql", "compile_relay_artifacts/fixtures/relay-model-resolver.expected", input, expected).await; } -#[test] -fn relay_resolver() { +#[tokio::test] +async fn relay_resolver() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver.expected"); - test_fixture(transform_fixture, "relay-resolver.graphql", "compile_relay_artifacts/fixtures/relay-resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver.graphql", "compile_relay_artifacts/fixtures/relay-resolver.expected", input, expected).await; } -#[test] -fn relay_resolver_alias() { +#[tokio::test] +async fn relay_resolver_alias() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-alias.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-alias.expected"); - test_fixture(transform_fixture, "relay-resolver-alias.graphql", "compile_relay_artifacts/fixtures/relay-resolver-alias.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-alias.graphql", "compile_relay_artifacts/fixtures/relay-resolver-alias.expected", input, expected).await; } -#[test] -fn relay_resolver_backing_client_edge() { +#[tokio::test] +async fn relay_resolver_backing_client_edge() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-backing-client-edge.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-backing-client-edge.expected"); - test_fixture(transform_fixture, "relay-resolver-backing-client-edge.graphql", "compile_relay_artifacts/fixtures/relay-resolver-backing-client-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-backing-client-edge.graphql", "compile_relay_artifacts/fixtures/relay-resolver-backing-client-edge.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_edge_to_interface() { + let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-edge-to-interface.graphql", "compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_edge_to_interface_with_child_interface_and_no_implementors() { + let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-child-interface-and-no-implementors.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-child-interface-and-no-implementors.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-edge-to-interface-with-child-interface-and-no-implementors.graphql", "compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-child-interface-and-no-implementors.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_edge_to_interface_with_no_implementors() { + let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-no-implementors.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-no-implementors.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-edge-to-interface-with-no-implementors.graphql", "compile_relay_artifacts/fixtures/relay-resolver-edge-to-interface-with-no-implementors.expected", input, expected).await; } -#[test] -fn relay_resolver_es_modules() { +#[tokio::test] +async fn relay_resolver_es_modules() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-es-modules.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-es-modules.expected"); - test_fixture(transform_fixture, "relay-resolver-es-modules.graphql", "compile_relay_artifacts/fixtures/relay-resolver-es-modules.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-es-modules.graphql", "compile_relay_artifacts/fixtures/relay-resolver-es-modules.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_fragment_on_interface() { + let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-fragment-on-interface.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-fragment-on-interface.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-fragment-on-interface.graphql", "compile_relay_artifacts/fixtures/relay-resolver-fragment-on-interface.expected", input, expected).await; } -#[test] -fn relay_resolver_live_weak_object() { +#[tokio::test] +async fn relay_resolver_live_weak_object() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-live-weak-object.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-live-weak-object.expected"); - test_fixture(transform_fixture, "relay-resolver-live-weak-object.graphql", "compile_relay_artifacts/fixtures/relay-resolver-live-weak-object.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-live-weak-object.graphql", "compile_relay_artifacts/fixtures/relay-resolver-live-weak-object.expected", input, expected).await; } -#[test] -fn relay_resolver_named_import() { +#[tokio::test] +async fn relay_resolver_named_import() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-named-import.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-named-import.expected"); - test_fixture(transform_fixture, "relay-resolver-named-import.graphql", "compile_relay_artifacts/fixtures/relay-resolver-named-import.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-named-import.graphql", "compile_relay_artifacts/fixtures/relay-resolver-named-import.expected", input, expected).await; } -#[test] -fn relay_resolver_on_abstract_client_type() { +#[tokio::test] +async fn relay_resolver_on_abstract_client_type() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-on-abstract-client-type.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-on-abstract-client-type.expected"); - test_fixture(transform_fixture, "relay-resolver-on-abstract-client-type.graphql", "compile_relay_artifacts/fixtures/relay-resolver-on-abstract-client-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-on-abstract-client-type.graphql", "compile_relay_artifacts/fixtures/relay-resolver-on-abstract-client-type.expected", input, expected).await; } -#[test] -fn relay_resolver_required() { +#[tokio::test] +async fn relay_resolver_plural_fragment_on_interface() { + let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-plural-fragment-on-interface.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-plural-fragment-on-interface.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-plural-fragment-on-interface.graphql", "compile_relay_artifacts/fixtures/relay-resolver-plural-fragment-on-interface.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_required() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-required.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-required.expected"); - test_fixture(transform_fixture, "relay-resolver-required.graphql", "compile_relay_artifacts/fixtures/relay-resolver-required.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-required.graphql", "compile_relay_artifacts/fixtures/relay-resolver-required.expected", input, expected).await; } -#[test] -fn relay_resolver_weak_object() { +#[tokio::test] +async fn relay_resolver_weak_object() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-weak-object.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-weak-object.expected"); - test_fixture(transform_fixture, "relay-resolver-weak-object.graphql", "compile_relay_artifacts/fixtures/relay-resolver-weak-object.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-weak-object.graphql", "compile_relay_artifacts/fixtures/relay-resolver-weak-object.expected", input, expected).await; } -#[test] -fn relay_resolver_weak_object_plural() { +#[tokio::test] +async fn relay_resolver_weak_object_normalization_ast() { + let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-weak-object-normalization-ast.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-weak-object-normalization-ast.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-weak-object-normalization-ast.graphql", "compile_relay_artifacts/fixtures/relay-resolver-weak-object-normalization-ast.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_weak_object_plural() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-weak-object-plural.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-weak-object-plural.expected"); - test_fixture(transform_fixture, "relay-resolver-weak-object-plural.graphql", "compile_relay_artifacts/fixtures/relay-resolver-weak-object-plural.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-weak-object-plural.graphql", "compile_relay_artifacts/fixtures/relay-resolver-weak-object-plural.expected", input, expected).await; } -#[test] -fn relay_resolver_with_args() { +#[tokio::test] +async fn relay_resolver_with_args() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-args.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-args.expected"); - test_fixture(transform_fixture, "relay-resolver-with-args.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-args.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-args.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-args.expected", input, expected).await; } -#[test] -fn relay_resolver_with_args_and_alias() { +#[tokio::test] +async fn relay_resolver_with_args_and_alias() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-args-and-alias.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-args-and-alias.expected"); - test_fixture(transform_fixture, "relay-resolver-with-args-and-alias.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-args-and-alias.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-args-and-alias.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-args-and-alias.expected", input, expected).await; } -#[test] -fn relay_resolver_with_args_and_alias_no_fragment() { +#[tokio::test] +async fn relay_resolver_with_args_and_alias_no_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-args-and-alias-no-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-args-and-alias-no-fragment.expected"); - test_fixture(transform_fixture, "relay-resolver-with-args-and-alias-no-fragment.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-args-and-alias-no-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-args-and-alias-no-fragment.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-args-and-alias-no-fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_with_args_fragment_spread_using_undefined_global_variable_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-args-fragment-spread-using-undefined-global-variable.invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-args-fragment-spread-using-undefined-global-variable.invalid.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-with-args-fragment-spread-using-undefined-global-variable.invalid.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-args-fragment-spread-using-undefined-global-variable.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_with_fragment_on_client_type() { +#[tokio::test] +async fn relay_resolver_with_fragment_on_client_type() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-fragment-on-client-type.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-fragment-on-client-type.expected"); - test_fixture(transform_fixture, "relay-resolver-with-fragment-on-client-type.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-fragment-on-client-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-fragment-on-client-type.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-fragment-on-client-type.expected", input, expected).await; } -#[test] -fn relay_resolver_with_output_type_client_object() { +#[tokio::test] +async fn relay_resolver_with_output_type_client_object() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-output-type-client-object.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-output-type-client-object.expected"); - test_fixture(transform_fixture, "relay-resolver-with-output-type-client-object.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-output-type-client-object.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-client-object.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-output-type-client-object.expected", input, expected).await; } -#[test] -fn relay_resolver_with_output_type_scalar() { +#[tokio::test] +async fn relay_resolver_with_output_type_scalar() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-output-type-scalar.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-output-type-scalar.expected"); - test_fixture(transform_fixture, "relay-resolver-with-output-type-scalar.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-output-type-scalar.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-scalar.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-output-type-scalar.expected", input, expected).await; } -#[test] -fn relay_resolver_with_required_client_edge() { +#[tokio::test] +async fn relay_resolver_with_required_client_edge() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-required-client-edge.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-required-client-edge.expected"); - test_fixture(transform_fixture, "relay-resolver-with-required-client-edge.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-required-client-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-required-client-edge.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-required-client-edge.expected", input, expected).await; } -#[test] -fn relay_resolver_with_spread_invalid() { +#[tokio::test] +async fn relay_resolver_with_spread_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-spread.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-spread.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-with-spread.invalid.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-spread.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-spread.invalid.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-spread.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_with_undefined_field_and_fragment_args_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-and-fragment-args.invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-and-fragment-args.invalid.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-with-undefined-field-and-fragment-args.invalid.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-and-fragment-args.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_without_fragment_on_client_type() { +#[tokio::test] +async fn relay_resolver_with_undefined_field_args_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args.invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args.invalid.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-with-undefined-field-args.invalid.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_with_undefined_field_args_linked_field_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-linked-field.invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-linked-field.invalid.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-with-undefined-field-args-linked-field.invalid.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-linked-field.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_with_undefined_field_args_scalar_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-scalar.invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-scalar.invalid.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-with-undefined-field-args-scalar.invalid.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-undefined-field-args-scalar.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_with_undefined_fragment_args_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args.invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args.invalid.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-with-undefined-fragment-args.invalid.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_with_undefined_fragment_args_linked_field_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args-linked-field.invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args-linked-field.invalid.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-with-undefined-fragment-args-linked-field.invalid.graphql", "compile_relay_artifacts/fixtures/relay-resolver-with-undefined-fragment-args-linked-field.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_without_fragment_on_client_type() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolver-without-fragment-on-client-type.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolver-without-fragment-on-client-type.expected"); - test_fixture(transform_fixture, "relay-resolver-without-fragment-on-client-type.graphql", "compile_relay_artifacts/fixtures/relay-resolver-without-fragment-on-client-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-without-fragment-on-client-type.graphql", "compile_relay_artifacts/fixtures/relay-resolver-without-fragment-on-client-type.expected", input, expected).await; } -#[test] -fn relay_resolvers_with_different_field_args_are_not_merged() { +#[tokio::test] +async fn relay_resolvers_with_different_field_args_are_not_merged() { let input = include_str!("compile_relay_artifacts/fixtures/relay-resolvers-with-different-field-args-are-not-merged.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/relay-resolvers-with-different-field-args-are-not-merged.expected"); - test_fixture(transform_fixture, "relay-resolvers-with-different-field-args-are-not-merged.graphql", "compile_relay_artifacts/fixtures/relay-resolvers-with-different-field-args-are-not-merged.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolvers-with-different-field-args-are-not-merged.graphql", "compile_relay_artifacts/fixtures/relay-resolvers-with-different-field-args-are-not-merged.expected", input, expected).await; } -#[test] -fn required_argument_not_passed_default_value() { +#[tokio::test] +async fn required_argument_not_passed_default_value() { let input = include_str!("compile_relay_artifacts/fixtures/required_argument_not_passed_default_value.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/required_argument_not_passed_default_value.expected"); - test_fixture(transform_fixture, "required_argument_not_passed_default_value.graphql", "compile_relay_artifacts/fixtures/required_argument_not_passed_default_value.expected", input, expected); + test_fixture(transform_fixture, file!(), "required_argument_not_passed_default_value.graphql", "compile_relay_artifacts/fixtures/required_argument_not_passed_default_value.expected", input, expected).await; } -#[test] -fn required_argument_not_passed_no_args_invalid() { +#[tokio::test] +async fn required_argument_not_passed_no_args_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/required_argument_not_passed_no_args.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/required_argument_not_passed_no_args.invalid.expected"); - test_fixture(transform_fixture, "required_argument_not_passed_no_args.invalid.graphql", "compile_relay_artifacts/fixtures/required_argument_not_passed_no_args.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "required_argument_not_passed_no_args.invalid.graphql", "compile_relay_artifacts/fixtures/required_argument_not_passed_no_args.invalid.expected", input, expected).await; } -#[test] -fn required_argument_not_passed_other_args_invalid() { +#[tokio::test] +async fn required_argument_not_passed_other_args_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/required_argument_not_passed_other_args.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/required_argument_not_passed_other_args.invalid.expected"); - test_fixture(transform_fixture, "required_argument_not_passed_other_args.invalid.graphql", "compile_relay_artifacts/fixtures/required_argument_not_passed_other_args.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "required_argument_not_passed_other_args.invalid.graphql", "compile_relay_artifacts/fixtures/required_argument_not_passed_other_args.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn required_bubbles_to_client_edge() { + let input = include_str!("compile_relay_artifacts/fixtures/required-bubbles-to-client-edge.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/required-bubbles-to-client-edge.expected"); + test_fixture(transform_fixture, file!(), "required-bubbles-to-client-edge.graphql", "compile_relay_artifacts/fixtures/required-bubbles-to-client-edge.expected", input, expected).await; +} + +#[tokio::test] +async fn required_bubbles_to_inline_aliased_fragment() { + let input = include_str!("compile_relay_artifacts/fixtures/required-bubbles-to-inline-aliased-fragment.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/required-bubbles-to-inline-aliased-fragment.expected"); + test_fixture(transform_fixture, file!(), "required-bubbles-to-inline-aliased-fragment.graphql", "compile_relay_artifacts/fixtures/required-bubbles-to-inline-aliased-fragment.expected", input, expected).await; } -#[test] -fn required_directive() { +#[tokio::test] +async fn required_directive() { let input = include_str!("compile_relay_artifacts/fixtures/required-directive.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/required-directive.expected"); - test_fixture(transform_fixture, "required-directive.graphql", "compile_relay_artifacts/fixtures/required-directive.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-directive.graphql", "compile_relay_artifacts/fixtures/required-directive.expected", input, expected).await; } -#[test] -fn same_fields_with_different_args_invalid() { +#[tokio::test] +async fn resolver_field_with_all_fragment_args_omitted() { + let input = include_str!("compile_relay_artifacts/fixtures/resolver-field-with-all-fragment-args-omitted.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/resolver-field-with-all-fragment-args-omitted.expected"); + test_fixture(transform_fixture, file!(), "resolver-field-with-all-fragment-args-omitted.graphql", "compile_relay_artifacts/fixtures/resolver-field-with-all-fragment-args-omitted.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_field_with_all_runtime_args_omitted() { + let input = include_str!("compile_relay_artifacts/fixtures/resolver-field-with-all-runtime-args-omitted.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/resolver-field-with-all-runtime-args-omitted.expected"); + test_fixture(transform_fixture, file!(), "resolver-field-with-all-runtime-args-omitted.graphql", "compile_relay_artifacts/fixtures/resolver-field-with-all-runtime-args-omitted.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_with_root_fragment_on_model_type() { + let input = include_str!("compile_relay_artifacts/fixtures/resolver-with-root-fragment-on-model-type.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/resolver-with-root-fragment-on-model-type.expected"); + test_fixture(transform_fixture, file!(), "resolver-with-root-fragment-on-model-type.graphql", "compile_relay_artifacts/fixtures/resolver-with-root-fragment-on-model-type.expected", input, expected).await; +} + +#[tokio::test] +async fn same_fields_with_different_args_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/same-fields-with-different-args.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/same-fields-with-different-args.invalid.expected"); - test_fixture(transform_fixture, "same-fields-with-different-args.invalid.graphql", "compile_relay_artifacts/fixtures/same-fields-with-different-args.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "same-fields-with-different-args.invalid.graphql", "compile_relay_artifacts/fixtures/same-fields-with-different-args.invalid.expected", input, expected).await; } -#[test] -fn same_fields_with_different_args_variables_invalid() { +#[tokio::test] +async fn same_fields_with_different_args_variables_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/same_fields_with_different_args_variables.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/same_fields_with_different_args_variables.invalid.expected"); - test_fixture(transform_fixture, "same_fields_with_different_args_variables.invalid.graphql", "compile_relay_artifacts/fixtures/same_fields_with_different_args_variables.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "same_fields_with_different_args_variables.invalid.graphql", "compile_relay_artifacts/fixtures/same_fields_with_different_args_variables.invalid.expected", input, expected).await; } -#[test] -fn scalar_handle_field() { +#[tokio::test] +async fn scalar_handle_field() { let input = include_str!("compile_relay_artifacts/fixtures/scalar-handle-field.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/scalar-handle-field.expected"); - test_fixture(transform_fixture, "scalar-handle-field.graphql", "compile_relay_artifacts/fixtures/scalar-handle-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "scalar-handle-field.graphql", "compile_relay_artifacts/fixtures/scalar-handle-field.expected", input, expected).await; } -#[test] -fn selection_set_conflict_added_argument() { +#[tokio::test] +async fn selection_set_conflict_added_argument() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_added_argument.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_added_argument.expected"); - test_fixture(transform_fixture, "selection_set_conflict_added_argument.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_added_argument.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_added_argument.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_added_argument.expected", input, expected).await; } -#[test] -fn selection_set_conflict_alias_covering_name() { +#[tokio::test] +async fn selection_set_conflict_alias_covering_name() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_alias_covering_name.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_alias_covering_name.expected"); - test_fixture(transform_fixture, "selection_set_conflict_alias_covering_name.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_alias_covering_name.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_alias_covering_name.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_alias_covering_name.expected", input, expected).await; } -#[test] -fn selection_set_conflict_composite_vs_noncomposite() { +#[tokio::test] +async fn selection_set_conflict_composite_vs_noncomposite() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_composite_vs_noncomposite.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_composite_vs_noncomposite.expected"); - test_fixture(transform_fixture, "selection_set_conflict_composite_vs_noncomposite.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_composite_vs_noncomposite.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_composite_vs_noncomposite.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_composite_vs_noncomposite.expected", input, expected).await; } -#[test] -fn selection_set_conflict_conflicting_list_and_non_list_types() { +#[tokio::test] +async fn selection_set_conflict_conflicting_list_and_non_list_types() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types.expected"); - test_fixture(transform_fixture, "selection_set_conflict_conflicting_list_and_non_list_types.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_conflicting_list_and_non_list_types.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types.expected", input, expected).await; } -#[test] -fn selection_set_conflict_conflicting_list_and_non_list_types_opposite_order() { +#[tokio::test] +async fn selection_set_conflict_conflicting_list_and_non_list_types_opposite_order() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types_opposite_order.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types_opposite_order.expected"); - test_fixture(transform_fixture, "selection_set_conflict_conflicting_list_and_non_list_types_opposite_order.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types_opposite_order.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_conflicting_list_and_non_list_types_opposite_order.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_list_and_non_list_types_opposite_order.expected", input, expected).await; } -#[test] -fn selection_set_conflict_conflicting_nullable_and_non_nullable_types() { +#[tokio::test] +async fn selection_set_conflict_conflicting_nullable_and_non_nullable_types() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_nullable_and_non_nullable_types.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_nullable_and_non_nullable_types.expected"); - test_fixture(transform_fixture, "selection_set_conflict_conflicting_nullable_and_non_nullable_types.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_nullable_and_non_nullable_types.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_conflicting_nullable_and_non_nullable_types.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_nullable_and_non_nullable_types.expected", input, expected).await; } -#[test] -fn selection_set_conflict_conflicting_selection_sets_inside_list_type() { +#[tokio::test] +async fn selection_set_conflict_conflicting_selection_sets_inside_list_type() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_selection_sets_inside_list_type.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_selection_sets_inside_list_type.expected"); - test_fixture(transform_fixture, "selection_set_conflict_conflicting_selection_sets_inside_list_type.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_selection_sets_inside_list_type.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_conflicting_selection_sets_inside_list_type.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_selection_sets_inside_list_type.expected", input, expected).await; } -#[test] -fn selection_set_conflict_conflicting_selection_sets_inside_list_type_multiple_conflicts() { +#[tokio::test] +async fn selection_set_conflict_conflicting_selection_sets_inside_list_type_multiple_conflicts() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_selection_sets_inside_list_type_multiple_conflicts.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_selection_sets_inside_list_type_multiple_conflicts.expected"); - test_fixture(transform_fixture, "selection_set_conflict_conflicting_selection_sets_inside_list_type_multiple_conflicts.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_selection_sets_inside_list_type_multiple_conflicts.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_conflicting_selection_sets_inside_list_type_multiple_conflicts.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_conflicting_selection_sets_inside_list_type_multiple_conflicts.expected", input, expected).await; } -#[test] -fn selection_set_conflict_different_aliases() { +#[tokio::test] +async fn selection_set_conflict_different_aliases() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_aliases.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_aliases.expected"); - test_fixture(transform_fixture, "selection_set_conflict_different_aliases.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_aliases.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_different_aliases.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_aliases.expected", input, expected).await; } -#[test] -fn selection_set_conflict_different_arguments() { +#[tokio::test] +async fn selection_set_conflict_different_arguments() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_arguments.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_arguments.expected"); - test_fixture(transform_fixture, "selection_set_conflict_different_arguments.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_different_arguments.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_arguments.expected", input, expected).await; } -#[test] -fn selection_set_conflict_different_arguments_with_list() { +#[tokio::test] +async fn selection_set_conflict_different_arguments_with_list() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_arguments_with_list.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_arguments_with_list.expected"); - test_fixture(transform_fixture, "selection_set_conflict_different_arguments_with_list.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_arguments_with_list.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_different_arguments_with_list.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_arguments_with_list.expected", input, expected).await; } -#[test] -fn selection_set_conflict_different_name() { +#[tokio::test] +async fn selection_set_conflict_different_name() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_name.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_name.expected"); - test_fixture(transform_fixture, "selection_set_conflict_different_name.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_name.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_different_name.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_name.expected", input, expected).await; } -#[test] -fn selection_set_conflict_different_return_types_for_field_but_same_shape() { +#[tokio::test] +async fn selection_set_conflict_different_return_types_for_field_but_same_shape() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_return_types_for_field_but_same_shape.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_return_types_for_field_but_same_shape.expected"); - test_fixture(transform_fixture, "selection_set_conflict_different_return_types_for_field_but_same_shape.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_return_types_for_field_but_same_shape.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_different_return_types_for_field_but_same_shape.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_return_types_for_field_but_same_shape.expected", input, expected).await; } -#[test] -fn selection_set_conflict_different_types_with_conflict() { +#[tokio::test] +async fn selection_set_conflict_different_types_with_conflict() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_types_with_conflict.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_types_with_conflict.expected"); - test_fixture(transform_fixture, "selection_set_conflict_different_types_with_conflict.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_types_with_conflict.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_different_types_with_conflict.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_types_with_conflict.expected", input, expected).await; } -#[test] -fn selection_set_conflict_different_types_with_conflict_different_shape() { +#[tokio::test] +async fn selection_set_conflict_different_types_with_conflict_different_shape() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_types_with_conflict_different_shape.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_types_with_conflict_different_shape.expected"); - test_fixture(transform_fixture, "selection_set_conflict_different_types_with_conflict_different_shape.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_types_with_conflict_different_shape.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_different_types_with_conflict_different_shape.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_types_with_conflict_different_shape.expected", input, expected).await; } -#[test] -fn selection_set_conflict_different_types_with_conflict_in_typeless_inline_fragments() { +#[tokio::test] +async fn selection_set_conflict_different_types_with_conflict_in_typeless_inline_fragments() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_types_with_conflict_in_typeless_inline_fragments.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_types_with_conflict_in_typeless_inline_fragments.expected"); - test_fixture(transform_fixture, "selection_set_conflict_different_types_with_conflict_in_typeless_inline_fragments.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_types_with_conflict_in_typeless_inline_fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_different_types_with_conflict_in_typeless_inline_fragments.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_types_with_conflict_in_typeless_inline_fragments.expected", input, expected).await; } -#[test] -fn selection_set_conflict_different_types_without_conflict() { +#[tokio::test] +async fn selection_set_conflict_different_types_without_conflict() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_types_without_conflict.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_types_without_conflict.expected"); - test_fixture(transform_fixture, "selection_set_conflict_different_types_without_conflict.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_types_without_conflict.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_different_types_without_conflict.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_types_without_conflict.expected", input, expected).await; } -#[test] -fn selection_set_conflict_different_types_without_conflict_1() { +#[tokio::test] +async fn selection_set_conflict_different_types_without_conflict_1() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_types_without_conflict_1.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_types_without_conflict_1.expected"); - test_fixture(transform_fixture, "selection_set_conflict_different_types_without_conflict_1.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_types_without_conflict_1.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_different_types_without_conflict_1.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_types_without_conflict_1.expected", input, expected).await; } -#[test] -fn selection_set_conflict_different_types_without_conflict_in_typeless_inline_fragments() { +#[tokio::test] +async fn selection_set_conflict_different_types_without_conflict_in_typeless_inline_fragments() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_types_without_conflict_in_typeless_inline_fragments.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_different_types_without_conflict_in_typeless_inline_fragments.expected"); - test_fixture(transform_fixture, "selection_set_conflict_different_types_without_conflict_in_typeless_inline_fragments.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_types_without_conflict_in_typeless_inline_fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_different_types_without_conflict_in_typeless_inline_fragments.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_different_types_without_conflict_in_typeless_inline_fragments.expected", input, expected).await; } -#[test] -fn selection_set_conflict_inconsistent_stream_usage_1() { +#[tokio::test] +async fn selection_set_conflict_inconsistent_stream_usage_1() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_1.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_1.expected"); - test_fixture(transform_fixture, "selection_set_conflict_inconsistent_stream_usage_1.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_1.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_inconsistent_stream_usage_1.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_1.expected", input, expected).await; } -#[test] -fn selection_set_conflict_inconsistent_stream_usage_2() { +#[tokio::test] +async fn selection_set_conflict_inconsistent_stream_usage_2() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_2.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_2.expected"); - test_fixture(transform_fixture, "selection_set_conflict_inconsistent_stream_usage_2.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_2.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_inconsistent_stream_usage_2.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_inconsistent_stream_usage_2.expected", input, expected).await; } -#[test] -fn selection_set_conflict_invalid_same_fragments_in_different_contexts() { +#[tokio::test] +async fn selection_set_conflict_invalid_same_fragments_in_different_contexts() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_invalid_same_fragments_in_different_contexts.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_invalid_same_fragments_in_different_contexts.expected"); - test_fixture(transform_fixture, "selection_set_conflict_invalid_same_fragments_in_different_contexts.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_invalid_same_fragments_in_different_contexts.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_invalid_same_fragments_in_different_contexts.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_invalid_same_fragments_in_different_contexts.expected", input, expected).await; } -#[test] -fn selection_set_conflict_missing_argument() { +#[tokio::test] +async fn selection_set_conflict_missing_argument() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_missing_argument.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_missing_argument.expected"); - test_fixture(transform_fixture, "selection_set_conflict_missing_argument.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_missing_argument.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_missing_argument.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_missing_argument.expected", input, expected).await; } -#[test] -fn selection_set_conflict_multiple_conflicts() { +#[tokio::test] +async fn selection_set_conflict_multiple_conflicts() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_multiple_conflicts.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_multiple_conflicts.expected"); - test_fixture(transform_fixture, "selection_set_conflict_multiple_conflicts.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_multiple_conflicts.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_multiple_conflicts.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_multiple_conflicts.expected", input, expected).await; } -#[test] -fn selection_set_conflict_multiple_conflicts_with_different_args() { +#[tokio::test] +async fn selection_set_conflict_multiple_conflicts_with_different_args() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_multiple_conflicts_with_different_args.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_multiple_conflicts_with_different_args.expected"); - test_fixture(transform_fixture, "selection_set_conflict_multiple_conflicts_with_different_args.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_multiple_conflicts_with_different_args.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_multiple_conflicts_with_different_args.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_multiple_conflicts_with_different_args.expected", input, expected).await; } -#[test] -fn selection_set_conflict_nested_conflict() { +#[tokio::test] +async fn selection_set_conflict_nested_conflict() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_nested_conflict.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_nested_conflict.expected"); - test_fixture(transform_fixture, "selection_set_conflict_nested_conflict.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_nested_conflict.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_nested_conflict.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_nested_conflict.expected", input, expected).await; } -#[test] -fn selection_set_conflict_stream_on_nodes_or_edges() { +#[tokio::test] +async fn selection_set_conflict_stream_on_nodes_or_edges() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges.expected"); - test_fixture(transform_fixture, "selection_set_conflict_stream_on_nodes_or_edges.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_stream_on_nodes_or_edges.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges.expected", input, expected).await; } -#[test] -fn selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info() { +#[tokio::test] +async fn selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info.expected"); - test_fixture(transform_fixture, "selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info.expected", input, expected).await; } -#[test] -fn selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias() { +#[tokio::test] +async fn selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias.expected"); - test_fixture(transform_fixture, "selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_stream_on_nodes_or_edges_without_defer_on_page_info_and_page_info_alias.expected", input, expected).await; } -#[test] -fn selection_set_conflict_valid() { +#[tokio::test] +async fn selection_set_conflict_valid() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_valid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_valid.expected"); - test_fixture(transform_fixture, "selection_set_conflict_valid.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_valid.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_valid.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_valid.expected", input, expected).await; } -#[test] -fn selection_set_conflict_valid_stream() { +#[tokio::test] +async fn selection_set_conflict_valid_stream() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_valid_stream.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_valid_stream.expected"); - test_fixture(transform_fixture, "selection_set_conflict_valid_stream.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_valid_stream.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_valid_stream.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_valid_stream.expected", input, expected).await; } -#[test] -fn selection_set_conflict_with_fragment() { +#[tokio::test] +async fn selection_set_conflict_with_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_with_fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_with_fragment.expected"); - test_fixture(transform_fixture, "selection_set_conflict_with_fragment.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_with_fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_with_fragment.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_with_fragment.expected", input, expected).await; } -#[test] -fn selection_set_conflict_with_inline_fragment() { +#[tokio::test] +async fn selection_set_conflict_with_inline_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_with_inline_fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_with_inline_fragment.expected"); - test_fixture(transform_fixture, "selection_set_conflict_with_inline_fragment.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_with_inline_fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_with_inline_fragment.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_with_inline_fragment.expected", input, expected).await; } -#[test] -fn selection_set_conflict_with_nested_fragments() { +#[tokio::test] +async fn selection_set_conflict_with_nested_fragments() { let input = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_with_nested_fragments.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selection_set_conflict_with_nested_fragments.expected"); - test_fixture(transform_fixture, "selection_set_conflict_with_nested_fragments.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_with_nested_fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "selection_set_conflict_with_nested_fragments.graphql", "compile_relay_artifacts/fixtures/selection_set_conflict_with_nested_fragments.expected", input, expected).await; } -#[test] -fn selections_on_interface() { +#[tokio::test] +async fn selections_on_interface() { let input = include_str!("compile_relay_artifacts/fixtures/selections-on-interface.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/selections-on-interface.expected"); - test_fixture(transform_fixture, "selections-on-interface.graphql", "compile_relay_artifacts/fixtures/selections-on-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "selections-on-interface.graphql", "compile_relay_artifacts/fixtures/selections-on-interface.expected", input, expected).await; } -#[test] -fn sibling_client_selections() { +#[tokio::test] +async fn sibling_client_selections() { let input = include_str!("compile_relay_artifacts/fixtures/sibling-client-selections.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/sibling-client-selections.expected"); - test_fixture(transform_fixture, "sibling-client-selections.graphql", "compile_relay_artifacts/fixtures/sibling-client-selections.expected", input, expected); + test_fixture(transform_fixture, file!(), "sibling-client-selections.graphql", "compile_relay_artifacts/fixtures/sibling-client-selections.expected", input, expected).await; } -#[test] -fn spread_of_assignable_fragment() { +#[tokio::test] +async fn spread_of_assignable_fragment() { let input = include_str!("compile_relay_artifacts/fixtures/spread-of-assignable-fragment.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/spread-of-assignable-fragment.expected"); - test_fixture(transform_fixture, "spread-of-assignable-fragment.graphql", "compile_relay_artifacts/fixtures/spread-of-assignable-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "spread-of-assignable-fragment.graphql", "compile_relay_artifacts/fixtures/spread-of-assignable-fragment.expected", input, expected).await; } -#[test] -fn stable_literals() { +#[tokio::test] +async fn stable_literals() { let input = include_str!("compile_relay_artifacts/fixtures/stable-literals.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/stable-literals.expected"); - test_fixture(transform_fixture, "stable-literals.graphql", "compile_relay_artifacts/fixtures/stable-literals.expected", input, expected); + test_fixture(transform_fixture, file!(), "stable-literals.graphql", "compile_relay_artifacts/fixtures/stable-literals.expected", input, expected).await; } -#[test] -fn stream_and_handle() { +#[tokio::test] +async fn stream_and_handle() { let input = include_str!("compile_relay_artifacts/fixtures/stream-and-handle.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/stream-and-handle.expected"); - test_fixture(transform_fixture, "stream-and-handle.graphql", "compile_relay_artifacts/fixtures/stream-and-handle.expected", input, expected); + test_fixture(transform_fixture, file!(), "stream-and-handle.graphql", "compile_relay_artifacts/fixtures/stream-and-handle.expected", input, expected).await; } -#[test] -fn stream_connection() { +#[tokio::test] +async fn stream_connection() { let input = include_str!("compile_relay_artifacts/fixtures/stream-connection.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/stream-connection.expected"); - test_fixture(transform_fixture, "stream-connection.graphql", "compile_relay_artifacts/fixtures/stream-connection.expected", input, expected); + test_fixture(transform_fixture, file!(), "stream-connection.graphql", "compile_relay_artifacts/fixtures/stream-connection.expected", input, expected).await; } -#[test] -fn stream_connection_conditional() { +#[tokio::test] +async fn stream_connection_conditional() { let input = include_str!("compile_relay_artifacts/fixtures/stream-connection-conditional.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/stream-connection-conditional.expected"); - test_fixture(transform_fixture, "stream-connection-conditional.graphql", "compile_relay_artifacts/fixtures/stream-connection-conditional.expected", input, expected); + test_fixture(transform_fixture, file!(), "stream-connection-conditional.graphql", "compile_relay_artifacts/fixtures/stream-connection-conditional.expected", input, expected).await; } -#[test] -fn stream_if_arguments() { +#[tokio::test] +async fn stream_if_arguments() { let input = include_str!("compile_relay_artifacts/fixtures/stream_if_arguments.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/stream_if_arguments.expected"); - test_fixture(transform_fixture, "stream_if_arguments.graphql", "compile_relay_artifacts/fixtures/stream_if_arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "stream_if_arguments.graphql", "compile_relay_artifacts/fixtures/stream_if_arguments.expected", input, expected).await; } -#[test] -fn supported_arg() { +#[tokio::test] +async fn supported_arg() { let input = include_str!("compile_relay_artifacts/fixtures/supported_arg.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/supported_arg.expected"); - test_fixture(transform_fixture, "supported_arg.graphql", "compile_relay_artifacts/fixtures/supported_arg.expected", input, expected); + test_fixture(transform_fixture, file!(), "supported_arg.graphql", "compile_relay_artifacts/fixtures/supported_arg.expected", input, expected).await; } -#[test] -fn supported_arg_non_static_invalid() { +#[tokio::test] +async fn supported_arg_non_static_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/supported_arg_non_static.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/supported_arg_non_static.invalid.expected"); - test_fixture(transform_fixture, "supported_arg_non_static.invalid.graphql", "compile_relay_artifacts/fixtures/supported_arg_non_static.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "supported_arg_non_static.invalid.graphql", "compile_relay_artifacts/fixtures/supported_arg_non_static.invalid.expected", input, expected).await; } -#[test] -fn unions() { +#[tokio::test] +async fn unions() { let input = include_str!("compile_relay_artifacts/fixtures/unions.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/unions.expected"); - test_fixture(transform_fixture, "unions.graphql", "compile_relay_artifacts/fixtures/unions.expected", input, expected); + test_fixture(transform_fixture, file!(), "unions.graphql", "compile_relay_artifacts/fixtures/unions.expected", input, expected).await; } -#[test] -fn unknown_root_variable_in_fragment_invalid() { +#[tokio::test] +async fn unknown_root_variable_in_fragment_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/unknown-root-variable-in-fragment.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/unknown-root-variable-in-fragment.invalid.expected"); - test_fixture(transform_fixture, "unknown-root-variable-in-fragment.invalid.graphql", "compile_relay_artifacts/fixtures/unknown-root-variable-in-fragment.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unknown-root-variable-in-fragment.invalid.graphql", "compile_relay_artifacts/fixtures/unknown-root-variable-in-fragment.invalid.expected", input, expected).await; } -#[test] -fn unmasked_fragment_spreads_dup_arguments() { +#[tokio::test] +async fn unmasked_fragment_spreads_dup_arguments() { let input = include_str!("compile_relay_artifacts/fixtures/unmasked-fragment-spreads-dup-arguments.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/unmasked-fragment-spreads-dup-arguments.expected"); - test_fixture(transform_fixture, "unmasked-fragment-spreads-dup-arguments.graphql", "compile_relay_artifacts/fixtures/unmasked-fragment-spreads-dup-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "unmasked-fragment-spreads-dup-arguments.graphql", "compile_relay_artifacts/fixtures/unmasked-fragment-spreads-dup-arguments.expected", input, expected).await; } -#[test] -fn unmasked_fragment_spreads_global_arguments() { +#[tokio::test] +async fn unmasked_fragment_spreads_global_arguments() { let input = include_str!("compile_relay_artifacts/fixtures/unmasked-fragment-spreads-global-arguments.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/unmasked-fragment-spreads-global-arguments.expected"); - test_fixture(transform_fixture, "unmasked-fragment-spreads-global-arguments.graphql", "compile_relay_artifacts/fixtures/unmasked-fragment-spreads-global-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "unmasked-fragment-spreads-global-arguments.graphql", "compile_relay_artifacts/fixtures/unmasked-fragment-spreads-global-arguments.expected", input, expected).await; } -#[test] -fn unmasked_fragment_spreads_local_arguments_invalid() { +#[tokio::test] +async fn unmasked_fragment_spreads_local_arguments_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/unmasked-fragment-spreads-local-arguments.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/unmasked-fragment-spreads-local-arguments.invalid.expected"); - test_fixture(transform_fixture, "unmasked-fragment-spreads-local-arguments.invalid.graphql", "compile_relay_artifacts/fixtures/unmasked-fragment-spreads-local-arguments.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unmasked-fragment-spreads-local-arguments.invalid.graphql", "compile_relay_artifacts/fixtures/unmasked-fragment-spreads-local-arguments.invalid.expected", input, expected).await; } -#[test] -fn unmasked_fragment_spreads_on_query() { +#[tokio::test] +async fn unmasked_fragment_spreads_on_query() { let input = include_str!("compile_relay_artifacts/fixtures/unmasked-fragment-spreads-on-query.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/unmasked-fragment-spreads-on-query.expected"); - test_fixture(transform_fixture, "unmasked-fragment-spreads-on-query.graphql", "compile_relay_artifacts/fixtures/unmasked-fragment-spreads-on-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "unmasked-fragment-spreads-on-query.graphql", "compile_relay_artifacts/fixtures/unmasked-fragment-spreads-on-query.expected", input, expected).await; } -#[test] -fn unmasked_fragment_spreads_recursive() { +#[tokio::test] +async fn unmasked_fragment_spreads_recursive() { let input = include_str!("compile_relay_artifacts/fixtures/unmasked-fragment-spreads-recursive.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/unmasked-fragment-spreads-recursive.expected"); - test_fixture(transform_fixture, "unmasked-fragment-spreads-recursive.graphql", "compile_relay_artifacts/fixtures/unmasked-fragment-spreads-recursive.expected", input, expected); + test_fixture(transform_fixture, file!(), "unmasked-fragment-spreads-recursive.graphql", "compile_relay_artifacts/fixtures/unmasked-fragment-spreads-recursive.expected", input, expected).await; } -#[test] -fn unused_fragment_arg_invalid() { +#[tokio::test] +async fn unused_fragment_arg_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/unused_fragment_arg.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/unused_fragment_arg.invalid.expected"); - test_fixture(transform_fixture, "unused_fragment_arg.invalid.graphql", "compile_relay_artifacts/fixtures/unused_fragment_arg.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unused_fragment_arg.invalid.graphql", "compile_relay_artifacts/fixtures/unused_fragment_arg.invalid.expected", input, expected).await; } -#[test] -fn unused_fragment_arg_unchecked() { +#[tokio::test] +async fn unused_fragment_arg_unchecked() { let input = include_str!("compile_relay_artifacts/fixtures/unused_fragment_arg_unchecked.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/unused_fragment_arg_unchecked.expected"); - test_fixture(transform_fixture, "unused_fragment_arg_unchecked.graphql", "compile_relay_artifacts/fixtures/unused_fragment_arg_unchecked.expected", input, expected); + test_fixture(transform_fixture, file!(), "unused_fragment_arg_unchecked.graphql", "compile_relay_artifacts/fixtures/unused_fragment_arg_unchecked.expected", input, expected).await; } -#[test] -fn unused_fragment_argdef_invalid() { +#[tokio::test] +async fn unused_fragment_argdef_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/unused_fragment_argdef.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/unused_fragment_argdef.invalid.expected"); - test_fixture(transform_fixture, "unused_fragment_argdef.invalid.graphql", "compile_relay_artifacts/fixtures/unused_fragment_argdef.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unused_fragment_argdef.invalid.graphql", "compile_relay_artifacts/fixtures/unused_fragment_argdef.invalid.expected", input, expected).await; } -#[test] -fn unused_fragment_argdef_invalid_suppression_arg_invalid() { +#[tokio::test] +async fn unused_fragment_argdef_invalid_suppression_arg_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/unused_fragment_argdef_invalid_suppression_arg.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/unused_fragment_argdef_invalid_suppression_arg.invalid.expected"); - test_fixture(transform_fixture, "unused_fragment_argdef_invalid_suppression_arg.invalid.graphql", "compile_relay_artifacts/fixtures/unused_fragment_argdef_invalid_suppression_arg.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unused_fragment_argdef_invalid_suppression_arg.invalid.graphql", "compile_relay_artifacts/fixtures/unused_fragment_argdef_invalid_suppression_arg.invalid.expected", input, expected).await; } -#[test] -fn unused_variables_removed_from_print_not_codegen() { +#[tokio::test] +async fn unused_variables_removed_from_print_not_codegen() { let input = include_str!("compile_relay_artifacts/fixtures/unused-variables-removed-from-print-not-codegen.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/unused-variables-removed-from-print-not-codegen.expected"); - test_fixture(transform_fixture, "unused-variables-removed-from-print-not-codegen.graphql", "compile_relay_artifacts/fixtures/unused-variables-removed-from-print-not-codegen.expected", input, expected); + test_fixture(transform_fixture, file!(), "unused-variables-removed-from-print-not-codegen.graphql", "compile_relay_artifacts/fixtures/unused-variables-removed-from-print-not-codegen.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread() { +#[tokio::test] +async fn updatable_fragment_spread() { let input = include_str!("compile_relay_artifacts/fixtures/updatable-fragment-spread.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/updatable-fragment-spread.expected"); - test_fixture(transform_fixture, "updatable-fragment-spread.graphql", "compile_relay_artifacts/fixtures/updatable-fragment-spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-fragment-spread.graphql", "compile_relay_artifacts/fixtures/updatable-fragment-spread.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_with_defer_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_with_defer_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/updatable-fragment-spread-with-defer.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/updatable-fragment-spread-with-defer.invalid.expected"); - test_fixture(transform_fixture, "updatable-fragment-spread-with-defer.invalid.graphql", "compile_relay_artifacts/fixtures/updatable-fragment-spread-with-defer.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-fragment-spread-with-defer.invalid.graphql", "compile_relay_artifacts/fixtures/updatable-fragment-spread-with-defer.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_with_include_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_with_include_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/updatable-fragment-spread-with-include.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/updatable-fragment-spread-with-include.invalid.expected"); - test_fixture(transform_fixture, "updatable-fragment-spread-with-include.invalid.graphql", "compile_relay_artifacts/fixtures/updatable-fragment-spread-with-include.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-fragment-spread-with-include.invalid.graphql", "compile_relay_artifacts/fixtures/updatable-fragment-spread-with-include.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_with_typename_sibling() { +#[tokio::test] +async fn updatable_fragment_spread_with_typename_sibling() { let input = include_str!("compile_relay_artifacts/fixtures/updatable-fragment-spread-with-typename-sibling.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/updatable-fragment-spread-with-typename-sibling.expected"); - test_fixture(transform_fixture, "updatable-fragment-spread-with-typename-sibling.graphql", "compile_relay_artifacts/fixtures/updatable-fragment-spread-with-typename-sibling.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-fragment-spread-with-typename-sibling.graphql", "compile_relay_artifacts/fixtures/updatable-fragment-spread-with-typename-sibling.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_with_unused_variables() { +#[tokio::test] +async fn updatable_fragment_spread_with_unused_variables() { let input = include_str!("compile_relay_artifacts/fixtures/updatable-fragment-spread-with-unused-variables.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/updatable-fragment-spread-with-unused-variables.expected"); - test_fixture(transform_fixture, "updatable-fragment-spread-with-unused-variables.graphql", "compile_relay_artifacts/fixtures/updatable-fragment-spread-with-unused-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-fragment-spread-with-unused-variables.graphql", "compile_relay_artifacts/fixtures/updatable-fragment-spread-with-unused-variables.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_with_variables() { +#[tokio::test] +async fn updatable_fragment_spread_with_variables() { let input = include_str!("compile_relay_artifacts/fixtures/updatable-fragment-spread-with-variables.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/updatable-fragment-spread-with-variables.expected"); - test_fixture(transform_fixture, "updatable-fragment-spread-with-variables.graphql", "compile_relay_artifacts/fixtures/updatable-fragment-spread-with-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-fragment-spread-with-variables.graphql", "compile_relay_artifacts/fixtures/updatable-fragment-spread-with-variables.expected", input, expected).await; } -#[test] -fn validate_global_variables_invalid() { +#[tokio::test] +async fn validate_global_variables_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/validate-global-variables.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/validate-global-variables.invalid.expected"); - test_fixture(transform_fixture, "validate-global-variables.invalid.graphql", "compile_relay_artifacts/fixtures/validate-global-variables.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "validate-global-variables.invalid.graphql", "compile_relay_artifacts/fixtures/validate-global-variables.invalid.expected", input, expected).await; } -#[test] -fn validate_global_variables_shared_fragment_invalid() { +#[tokio::test] +async fn validate_global_variables_shared_fragment_invalid() { let input = include_str!("compile_relay_artifacts/fixtures/validate-global-variables-shared-fragment.invalid.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/validate-global-variables-shared-fragment.invalid.expected"); - test_fixture(transform_fixture, "validate-global-variables-shared-fragment.invalid.graphql", "compile_relay_artifacts/fixtures/validate-global-variables-shared-fragment.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "validate-global-variables-shared-fragment.invalid.graphql", "compile_relay_artifacts/fixtures/validate-global-variables-shared-fragment.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn validate_global_variables_undefined_invalid() { + let input = include_str!("compile_relay_artifacts/fixtures/validate-global-variables-undefined.invalid.graphql"); + let expected = include_str!("compile_relay_artifacts/fixtures/validate-global-variables-undefined.invalid.expected"); + test_fixture(transform_fixture, file!(), "validate-global-variables-undefined.invalid.graphql", "compile_relay_artifacts/fixtures/validate-global-variables-undefined.invalid.expected", input, expected).await; } -#[test] -fn viewer_query() { +#[tokio::test] +async fn viewer_query() { let input = include_str!("compile_relay_artifacts/fixtures/viewer-query.graphql"); let expected = include_str!("compile_relay_artifacts/fixtures/viewer-query.expected"); - test_fixture(transform_fixture, "viewer-query.graphql", "compile_relay_artifacts/fixtures/viewer-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "viewer-query.graphql", "compile_relay_artifacts/fixtures/viewer-query.expected", input, expected).await; } diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id.rs b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id.rs new file mode 100644 index 0000000000000..f870d300a3075 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id.rs @@ -0,0 +1,199 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::ConsoleLogger; +use common::FeatureFlag; +use common::FeatureFlags; +use common::NamedItem; +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build_ir_with_extra_features; +use graphql_ir::BuilderOptions; +use graphql_ir::FragmentDefinition; +use graphql_ir::FragmentDefinitionName; +use graphql_ir::FragmentVariablesSemantic; +use graphql_ir::OperationDefinition; +use graphql_ir::OperationDefinitionName; +use graphql_ir::Program; +use graphql_ir::RelayMode; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use graphql_text_printer::print_full_operation; +use intern::string_key::Intern; +use relay_codegen::build_request_params; +use relay_codegen::print_fragment; +use relay_codegen::print_operation; +use relay_codegen::print_request; +use relay_codegen::JsModuleFormat; +use relay_compiler::validate; +use relay_compiler::ProjectConfig; +use relay_config::ProjectName; +use relay_config::SchemaConfig; +use relay_test_schema::get_test_schema_with_custom_id; +use relay_test_schema::get_test_schema_with_custom_id_with_extensions; +use relay_transforms::apply_transforms; +use relay_transforms::DIRECTIVE_SPLIT_OPERATION; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + + if fixture.content.contains("%TODO%") { + if fixture.content.contains("expected-to-throw") { + return Err("TODO".to_string()); + } + return Ok("TODO".to_string()); + } + + let node_interface_query_variable_name = + if fixture.content.contains("# use-custom-variable-name") { + Some("variable_name".intern()) + } else { + None + }; + + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + let (base, schema) = match parts.as_slice() { + [base, extensions] => ( + base, + get_test_schema_with_custom_id_with_extensions(extensions), + ), + [base] => (base, get_test_schema_with_custom_id()), + _ => panic!("Invalid fixture input {}", fixture.content), + }; + + let ast = parse_executable(base, source_location) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let ir_result = build_ir_with_extra_features( + &schema, + &ast.definitions, + &BuilderOptions { + allow_undefined_fragment_spreads: false, + fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, + relay_mode: Some(RelayMode), + default_anonymous_operation_name: None, + allow_custom_scalar_literals: true, // for compatibility + }, + ); + let ir = ir_result + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let program = Program::from_definitions(Arc::clone(&schema), ir); + + let feature_flags = FeatureFlags { + no_inline: FeatureFlag::Enabled, + enable_relay_resolver_transform: true, + enable_catch_directive_transform: FeatureFlag::Disabled, + enable_3d_branch_arg_generation: true, + actor_change_support: FeatureFlag::Enabled, + text_artifacts: FeatureFlag::Disabled, + skip_printing_nulls: FeatureFlag::Disabled, + enable_fragment_aliases: FeatureFlag::Enabled, + compact_query_text: FeatureFlag::Disabled, + emit_normalization_nodes_for_client_edges: true, + ..Default::default() + }; + + let default_schema_config = SchemaConfig::default(); + + let project_config = ProjectConfig { + name: ProjectName::default(), + feature_flags: Arc::new(feature_flags), + schema_config: SchemaConfig { + node_interface_id_field: "global_id".intern(), + node_interface_id_variable_name: node_interface_query_variable_name + .unwrap_or(default_schema_config.node_interface_id_variable_name), + ..default_schema_config + }, + js_module_format: JsModuleFormat::Haste, + ..Default::default() + }; + + validate(&program, &project_config, &None) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + // TODO pass base fragment names + let programs = apply_transforms( + &project_config, + Arc::new(program), + Default::default(), + Arc::new(ConsoleLogger), + None, + None, + ) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let mut operations: Vec<&std::sync::Arc> = + programs.normalization.operations().collect(); + operations.sort_by_key(|operation| operation.name.item.0); + let result = operations + .into_iter() + .map(|operation| { + if operation + .directives + .named(*DIRECTIVE_SPLIT_OPERATION) + .is_some() + { + let mut import_statements = Default::default(); + let operation = + print_operation(&schema, operation, &project_config, &mut import_statements); + format!("{}{}", import_statements, operation) + } else { + let name = operation.name.item.0; + let print_operation_node = programs + .operation_text + .operation(OperationDefinitionName(name)); + let text = print_operation_node.map_or_else( + || "Query Text is Empty.".to_string(), + |print_operation_node| { + print_full_operation( + &programs.operation_text, + print_operation_node, + Default::default(), + ) + }, + ); + + let reader_operation = programs + .reader + .operation(OperationDefinitionName(name)) + .expect("a reader fragment should be generated for this operation"); + let operation_fragment = FragmentDefinition { + name: reader_operation.name.map(|x| FragmentDefinitionName(x.0)), + variable_definitions: reader_operation.variable_definitions.clone(), + selections: reader_operation.selections.clone(), + used_global_variables: Default::default(), + directives: reader_operation.directives.clone(), + type_condition: reader_operation.type_, + }; + let request_parameters = build_request_params(operation); + let mut import_statements = Default::default(); + let request = print_request( + &schema, + operation, + &operation_fragment, + request_parameters, + &project_config, + &mut import_statements, + ); + format!("{}{}\n\nQUERY:\n\n{}", import_statements, request, text) + } + }) + .chain({ + let mut fragments: Vec<&std::sync::Arc> = + programs.reader.fragments().collect(); + fragments.sort_by_key(|fragment| fragment.name.item); + fragments.into_iter().map(|fragment| { + let mut import_statements = Default::default(); + let fragment = + print_fragment(&schema, fragment, &project_config, &mut import_statements); + format!("{}{}", import_statements, fragment) + }) + }) + .collect::>(); + Ok(result.join("\n\n")) +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface-with-custom-variable-name.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface-with-custom-variable-name.expected new file mode 100644 index 0000000000000..c02cc1d9e20c8 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface-with-custom-variable-name.expected @@ -0,0 +1,284 @@ +==================================== INPUT ==================================== +fragment fragmentOnNodeInterfaceWithCustomVariableName_RefetchableFragment on Node + @refetchable(queryName: "RefetchableFragmentQuery") { + global_id + ... on User { + name + ...fragmentOnNodeInterfaceWithCustomVariableName_ProfilePicture + } +} + +fragment fragmentOnNodeInterfaceWithCustomVariableName_ProfilePicture on User { + profilePicture(size: $size) { + uri + } +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "global_id" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "size" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "RefetchableFragmentQuery", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "global_id", + "variableName": "global_id" + } + ], + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "fragmentOnNodeInterfaceWithCustomVariableName_RefetchableFragment" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "size" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "global_id" + } + ], + "kind": "Operation", + "name": "RefetchableFragmentQuery", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "global_id", + "variableName": "global_id" + } + ], + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "kind": "TypeDiscriminator", + "abstractKey": "__isNode" + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "global_id", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "size", + "variableName": "size" + } + ], + "concreteType": "Image", + "kind": "LinkedField", + "name": "profilePicture", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "uri", + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "31dcd57f42128b8d975e6224202a17c9", + "id": null, + "metadata": {}, + "name": "RefetchableFragmentQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +query RefetchableFragmentQuery( + $size: [Int] + $global_id: ID! +) { + node(global_id: $global_id) { + __typename + ...fragmentOnNodeInterfaceWithCustomVariableName_RefetchableFragment + global_id + } +} + +fragment fragmentOnNodeInterfaceWithCustomVariableName_ProfilePicture on User { + profilePicture(size: $size) { + uri + } +} + +fragment fragmentOnNodeInterfaceWithCustomVariableName_RefetchableFragment on Node { + __isNode: __typename + global_id + ... on User { + name + ...fragmentOnNodeInterfaceWithCustomVariableName_ProfilePicture + } +} + + +{ + "argumentDefinitions": [ + { + "kind": "RootArgument", + "name": "size" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "fragmentOnNodeInterfaceWithCustomVariableName_ProfilePicture", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "size", + "variableName": "size" + } + ], + "concreteType": "Image", + "kind": "LinkedField", + "name": "profilePicture", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "uri", + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +} + +{ + "argumentDefinitions": [ + { + "kind": "RootArgument", + "name": "size" + } + ], + "kind": "Fragment", + "metadata": { + "refetch": { + "connection": null, + "fragmentPathInResult": [ + "node" + ], + "operation": require('RefetchableFragmentQuery.graphql'), + "identifierInfo": { + "identifierField": "global_id", + "identifierQueryVariableName": "id" + } + } + }, + "name": "fragmentOnNodeInterfaceWithCustomVariableName_RefetchableFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "global_id", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + { + "args": null, + "kind": "FragmentSpread", + "name": "fragmentOnNodeInterfaceWithCustomVariableName_ProfilePicture" + } + ], + "type": "User", + "abstractKey": null + } + ], + "type": "Node", + "abstractKey": "__isNode" +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface-with-custom-variable-name.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface-with-custom-variable-name.graphql new file mode 100644 index 0000000000000..c03762dffdf43 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface-with-custom-variable-name.graphql @@ -0,0 +1,14 @@ +fragment fragmentOnNodeInterfaceWithCustomVariableName_RefetchableFragment on Node + @refetchable(queryName: "RefetchableFragmentQuery") { + global_id + ... on User { + name + ...fragmentOnNodeInterfaceWithCustomVariableName_ProfilePicture + } +} + +fragment fragmentOnNodeInterfaceWithCustomVariableName_ProfilePicture on User { + profilePicture(size: $size) { + uri + } +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface.expected index 01249a6d7a551..0bc7f6caab0e9 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface.expected @@ -244,7 +244,10 @@ fragment fragmentOnNodeInterface_RefetchableFragment on Node { "node" ], "operation": require('RefetchableFragmentQuery.graphql'), - "identifierField": "global_id" + "identifierInfo": { + "identifierField": "global_id", + "identifierQueryVariableName": "id" + } } }, "name": "fragmentOnNodeInterface_RefetchableFragment", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface-with-custom-variable-name.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface-with-custom-variable-name.expected new file mode 100644 index 0000000000000..48b7381920e76 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface-with-custom-variable-name.expected @@ -0,0 +1,269 @@ +==================================== INPUT ==================================== +# use-custom-variable-name +fragment fragmentOnObjectImplementingNodeInterfaceWithCustomVariableName_RefetchableFragment on User +@refetchable(queryName: "RefetchableFragmentQuery") { + global_id + name + ...fragmentOnObjectImplementingNodeInterfaceWithCustomVariableName_ProfilePicture +} + +fragment fragmentOnObjectImplementingNodeInterfaceWithCustomVariableName_ProfilePicture on User { + profilePicture(size: $size) { + uri + } +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "global_id" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "size" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "RefetchableFragmentQuery", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "global_id", + "variableName": "global_id" + } + ], + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "fragmentOnObjectImplementingNodeInterfaceWithCustomVariableName_RefetchableFragment" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "size" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "global_id" + } + ], + "kind": "Operation", + "name": "RefetchableFragmentQuery", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "global_id", + "variableName": "global_id" + } + ], + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "global_id", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "size", + "variableName": "size" + } + ], + "concreteType": "Image", + "kind": "LinkedField", + "name": "profilePicture", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "uri", + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "31dcd57f42128b8d975e6224202a17c9", + "id": null, + "metadata": {}, + "name": "RefetchableFragmentQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +query RefetchableFragmentQuery( + $size: [Int] + $global_id: ID! +) { + node(global_id: $global_id) { + __typename + ...fragmentOnObjectImplementingNodeInterfaceWithCustomVariableName_RefetchableFragment + global_id + } +} + +fragment fragmentOnObjectImplementingNodeInterfaceWithCustomVariableName_ProfilePicture on User { + profilePicture(size: $size) { + uri + } +} + +fragment fragmentOnObjectImplementingNodeInterfaceWithCustomVariableName_RefetchableFragment on User { + global_id + name + ...fragmentOnObjectImplementingNodeInterfaceWithCustomVariableName_ProfilePicture +} + + +{ + "argumentDefinitions": [ + { + "kind": "RootArgument", + "name": "size" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "fragmentOnObjectImplementingNodeInterfaceWithCustomVariableName_ProfilePicture", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "size", + "variableName": "size" + } + ], + "concreteType": "Image", + "kind": "LinkedField", + "name": "profilePicture", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "uri", + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +} + +{ + "argumentDefinitions": [ + { + "kind": "RootArgument", + "name": "size" + } + ], + "kind": "Fragment", + "metadata": { + "refetch": { + "connection": null, + "fragmentPathInResult": [ + "node" + ], + "operation": require('RefetchableFragmentQuery.graphql'), + "identifierInfo": { + "identifierField": "global_id", + "identifierQueryVariableName": "variable_name" + } + } + }, + "name": "fragmentOnObjectImplementingNodeInterfaceWithCustomVariableName_RefetchableFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "global_id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + { + "args": null, + "kind": "FragmentSpread", + "name": "fragmentOnObjectImplementingNodeInterfaceWithCustomVariableName_ProfilePicture" + } + ], + "type": "User", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface-with-custom-variable-name.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface-with-custom-variable-name.graphql new file mode 100644 index 0000000000000..bf5b8421d8dc2 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface-with-custom-variable-name.graphql @@ -0,0 +1,13 @@ +# use-custom-variable-name +fragment fragmentOnObjectImplementingNodeInterfaceWithCustomVariableName_RefetchableFragment on User +@refetchable(queryName: "RefetchableFragmentQuery") { + global_id + name + ...fragmentOnObjectImplementingNodeInterfaceWithCustomVariableName_ProfilePicture +} + +fragment fragmentOnObjectImplementingNodeInterfaceWithCustomVariableName_ProfilePicture on User { + profilePicture(size: $size) { + uri + } +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface.expected index 5f0a60287bc01..aaa405702b77c 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface.expected @@ -235,7 +235,10 @@ fragment fragmentOnObjectImplementingNodeInterface_RefetchableFragment on User { "node" ], "operation": require('RefetchableFragmentQuery.graphql'), - "identifierField": "global_id" + "identifierInfo": { + "identifierField": "global_id", + "identifierQueryVariableName": "id" + } } }, "name": "fragmentOnObjectImplementingNodeInterface_RefetchableFragment", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection-with-custom-variable-name.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection-with-custom-variable-name.expected new file mode 100644 index 0000000000000..1a010ecc7bb53 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection-with-custom-variable-name.expected @@ -0,0 +1,419 @@ +==================================== INPUT ==================================== +# use-custom-variable-name +fragment refetchableConnectionWithCustomVariableName_RefetchableConnection_feedback on Feedback +@refetchable(queryName: "RefetchableConnectionQuery") { + global_id + comments(first: $count, after: $cursor) + @connection(key: "RefetchableConnection_comments") { + edges { + cursor + node { + global_id + } + } + pageInfo { + endCursor + hasNextPage + } + } +} +==================================== OUTPUT =================================== +{ + "fragment": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "count" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "cursor" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "global_id" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "RefetchableConnectionQuery", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "global_id", + "variableName": "global_id" + } + ], + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "refetchableConnectionWithCustomVariableName_RefetchableConnection_feedback" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "count" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "cursor" + }, + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "global_id" + } + ], + "kind": "Operation", + "name": "RefetchableConnectionQuery", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "global_id", + "variableName": "global_id" + } + ], + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "global_id", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "after", + "variableName": "cursor" + }, + { + "kind": "Variable", + "name": "first", + "variableName": "count" + } + ], + "concreteType": "CommentsConnection", + "kind": "LinkedField", + "name": "comments", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "CommentsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "cursor", + "storageKey": null + }, + { + "alias": null, + "args": null, + "concreteType": "Comment", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "global_id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": null + }, + { + "alias": null, + "args": null, + "concreteType": "PageInfo", + "kind": "LinkedField", + "name": "pageInfo", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "endCursor", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "hasNextPage", + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": null + }, + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "after", + "variableName": "cursor" + }, + { + "kind": "Variable", + "name": "first", + "variableName": "count" + } + ], + "filters": null, + "handle": "connection", + "key": "RefetchableConnection_comments", + "kind": "LinkedHandle", + "name": "comments" + } + ], + "type": "Feedback", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "5b0bba64f5feedde7f799aa3e6ec8e76", + "id": null, + "metadata": {}, + "name": "RefetchableConnectionQuery", + "operationKind": "query", + "text": null + } +} + +QUERY: + +query RefetchableConnectionQuery( + $count: Int + $cursor: ID + $global_id: ID! +) { + node(global_id: $global_id) { + __typename + ...refetchableConnectionWithCustomVariableName_RefetchableConnection_feedback + global_id + } +} + +fragment refetchableConnectionWithCustomVariableName_RefetchableConnection_feedback on Feedback { + global_id + comments(first: $count, after: $cursor) { + edges { + cursor + node { + global_id + __typename + } + } + pageInfo { + endCursor + hasNextPage + } + } +} + + +{ + "argumentDefinitions": [ + { + "kind": "RootArgument", + "name": "count" + }, + { + "kind": "RootArgument", + "name": "cursor" + } + ], + "kind": "Fragment", + "metadata": { + "connection": [ + { + "count": "count", + "cursor": "cursor", + "direction": "forward", + "path": [ + "comments" + ] + } + ], + "refetch": { + "connection": { + "forward": { + "count": "count", + "cursor": "cursor" + }, + "backward": null, + "path": [ + "comments" + ] + }, + "fragmentPathInResult": [ + "node" + ], + "operation": require('RefetchableConnectionQuery.graphql'), + "identifierInfo": { + "identifierField": "global_id", + "identifierQueryVariableName": "variable_name" + } + } + }, + "name": "refetchableConnectionWithCustomVariableName_RefetchableConnection_feedback", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "global_id", + "storageKey": null + }, + { + "alias": "comments", + "args": null, + "concreteType": "CommentsConnection", + "kind": "LinkedField", + "name": "__RefetchableConnection_comments_connection", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "CommentsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "cursor", + "storageKey": null + }, + { + "alias": null, + "args": null, + "concreteType": "Comment", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "global_id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": null + }, + { + "alias": null, + "args": null, + "concreteType": "PageInfo", + "kind": "LinkedField", + "name": "pageInfo", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "endCursor", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "hasNextPage", + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "Feedback", + "abstractKey": null +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection-with-custom-variable-name.graphql b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection-with-custom-variable-name.graphql new file mode 100644 index 0000000000000..3ef023f0dfce9 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection-with-custom-variable-name.graphql @@ -0,0 +1,18 @@ +# use-custom-variable-name +fragment refetchableConnectionWithCustomVariableName_RefetchableConnection_feedback on Feedback +@refetchable(queryName: "RefetchableConnectionQuery") { + global_id + comments(first: $count, after: $cursor) + @connection(key: "RefetchableConnection_comments") { + edges { + cursor + node { + global_id + } + } + pageInfo { + endCursor + hasNextPage + } + } +} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection.expected index 006ab5afb60b9..3263f18d6fc58 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection.expected @@ -318,7 +318,10 @@ fragment refetchableConnection_RefetchableConnection_feedback on Feedback { "node" ], "operation": require('RefetchableConnectionQuery.graphql'), - "identifierField": "global_id" + "identifierInfo": { + "identifierField": "global_id", + "identifierQueryVariableName": "id" + } } }, "name": "refetchableConnection_RefetchableConnection_feedback", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable-fragment-on-node-with-missing-id.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable-fragment-on-node-with-missing-id.expected index 551bbf5f813d8..f2517313ed6b7 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable-fragment-on-node-with-missing-id.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable-fragment-on-node-with-missing-id.expected @@ -243,7 +243,10 @@ fragment refetchableFragmentOnNodeWithMissingId_RefetchableFragment on Node { "node" ], "operation": require('RefetchableFragmentQuery.graphql'), - "identifierField": "global_id" + "identifierInfo": { + "identifierField": "global_id", + "identifierQueryVariableName": "id" + } } }, "name": "refetchableFragmentOnNodeWithMissingId_RefetchableFragment", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable_conflict_with_operation.invalid.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable_conflict_with_operation.invalid.expected index a152799d01b9e..467269a00f77e 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable_conflict_with_operation.invalid.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/refetchable_conflict_with_operation.invalid.expected @@ -13,7 +13,7 @@ query refetchableConflictWithOperationQuery { } } ==================================== ERROR ==================================== -✖︎ A unique query name has to be specified in `@refetchable`, an operation `refetchableConflictWithOperationQuery` already exists. +✖︎ The `queryName` specified in `@refetchable` must be unique, a definition with the name `refetchableConflictWithOperationQuery` already exists. refetchable_conflict_with_operation.invalid.graphql:4:27 3 │ fragment refetchableConflictWithOperationF on Node diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/relay-client-id-field.expected b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/relay-client-id-field.expected index 3838f9592455b..9e6634aaab7e2 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/relay-client-id-field.expected +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/fixtures/relay-client-id-field.expected @@ -114,9 +114,7 @@ query relayClientIdField_RelayClientIDFieldQuery($id: ID!) { { "kind": "Literal", "name": "supported", - "value": [ - "PlainCommentBody" - ] + "value": "2Rll6p" } ], "concreteType": null, @@ -200,7 +198,7 @@ query relayClientIdField_RelayClientIDFieldQuery($id: ID!) { ] } ], - "storageKey": "commentBody(supported:[\"PlainCommentBody\"])" + "storageKey": "commentBody(supported:\"2Rll6p\")" } ], "type": "Comment", @@ -323,9 +321,7 @@ query relayClientIdField_RelayClientIDFieldQuery($id: ID!) { { "kind": "Literal", "name": "supported", - "value": [ - "PlainCommentBody" - ] + "value": "2Rll6p" } ], "concreteType": null, @@ -409,7 +405,7 @@ query relayClientIdField_RelayClientIDFieldQuery($id: ID!) { ] } ], - "storageKey": "commentBody(supported:[\"PlainCommentBody\"])" + "storageKey": "commentBody(supported:\"2Rll6p\")" } ], "type": "Comment", diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/mod.rs b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/mod.rs deleted file mode 100644 index 681ecb6a94040..0000000000000 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id/mod.rs +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::ConsoleLogger; -use common::FeatureFlag; -use common::FeatureFlags; -use common::NamedItem; -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build_ir_with_extra_features; -use graphql_ir::BuilderOptions; -use graphql_ir::FragmentDefinition; -use graphql_ir::FragmentDefinitionName; -use graphql_ir::FragmentVariablesSemantic; -use graphql_ir::OperationDefinition; -use graphql_ir::OperationDefinitionName; -use graphql_ir::Program; -use graphql_ir::RelayMode; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use graphql_text_printer::print_full_operation; -use intern::string_key::Intern; -use relay_codegen::build_request_params; -use relay_codegen::print_fragment; -use relay_codegen::print_operation; -use relay_codegen::print_request; -use relay_codegen::JsModuleFormat; -use relay_compiler::validate; -use relay_compiler::ProjectConfig; -use relay_config::SchemaConfig; -use relay_test_schema::get_test_schema_with_custom_id; -use relay_test_schema::get_test_schema_with_custom_id_with_extensions; -use relay_transforms::apply_transforms; -use relay_transforms::DIRECTIVE_SPLIT_OPERATION; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - if fixture.content.contains("%TODO%") { - if fixture.content.contains("expected-to-throw") { - return Err("TODO".to_string()); - } - return Ok("TODO".to_string()); - } - - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - let (base, schema) = match parts.as_slice() { - [base, extensions] => ( - base, - get_test_schema_with_custom_id_with_extensions(extensions), - ), - [base] => (base, get_test_schema_with_custom_id()), - _ => panic!("Invalid fixture input {}", fixture.content), - }; - - let ast = parse_executable(base, source_location) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - let ir_result = build_ir_with_extra_features( - &schema, - &ast.definitions, - &BuilderOptions { - allow_undefined_fragment_spreads: false, - fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, - relay_mode: Some(RelayMode), - default_anonymous_operation_name: None, - }, - ); - let ir = ir_result - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - let program = Program::from_definitions(Arc::clone(&schema), ir); - - let feature_flags = FeatureFlags { - enable_flight_transform: true, - hash_supported_argument: FeatureFlag::Disabled, - no_inline: FeatureFlag::Enabled, - enable_relay_resolver_transform: true, - enable_3d_branch_arg_generation: true, - actor_change_support: FeatureFlag::Enabled, - text_artifacts: FeatureFlag::Disabled, - enable_client_edges: FeatureFlag::Enabled, - skip_printing_nulls: FeatureFlag::Disabled, - enable_fragment_aliases: FeatureFlag::Enabled, - compact_query_text: FeatureFlag::Disabled, - emit_normalization_nodes_for_client_edges: true, - relay_resolver_enable_output_type: FeatureFlag::Disabled, - }; - - let project_config = ProjectConfig { - name: "test".intern(), - feature_flags: Arc::new(feature_flags), - schema_config: SchemaConfig { - node_interface_id_field: "global_id".intern(), - ..Default::default() - }, - js_module_format: JsModuleFormat::Haste, - ..Default::default() - }; - - validate(&program, &project_config, &None) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - // TODO pass base fragment names - let programs = apply_transforms( - &project_config, - Arc::new(program), - Default::default(), - Arc::new(ConsoleLogger), - None, - None, - ) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let mut operations: Vec<&std::sync::Arc> = - programs.normalization.operations().collect(); - operations.sort_by_key(|operation| operation.name.item.0); - let result = operations - .into_iter() - .map(|operation| { - if operation - .directives - .named(*DIRECTIVE_SPLIT_OPERATION) - .is_some() - { - let mut import_statements = Default::default(); - let operation = - print_operation(&schema, operation, &project_config, &mut import_statements); - format!("{}{}", import_statements, operation) - } else { - let name = operation.name.item.0; - let print_operation_node = programs - .operation_text - .operation(OperationDefinitionName(name)); - let text = print_operation_node.map_or_else( - || "Query Text is Empty.".to_string(), - |print_operation_node| { - print_full_operation( - &programs.operation_text, - print_operation_node, - Default::default(), - ) - }, - ); - - let reader_operation = programs - .reader - .operation(OperationDefinitionName(name)) - .expect("a reader fragment should be generated for this operation"); - let operation_fragment = FragmentDefinition { - name: reader_operation.name.map(|x| FragmentDefinitionName(x.0)), - variable_definitions: reader_operation.variable_definitions.clone(), - selections: reader_operation.selections.clone(), - used_global_variables: Default::default(), - directives: reader_operation.directives.clone(), - type_condition: reader_operation.type_, - }; - let request_parameters = build_request_params(operation); - let mut import_statements = Default::default(); - let request = print_request( - &schema, - operation, - &operation_fragment, - request_parameters, - &project_config, - &mut import_statements, - ); - format!("{}{}\n\nQUERY:\n\n{}", import_statements, request, text) - } - }) - .chain({ - let mut fragments: Vec<&std::sync::Arc> = - programs.reader.fragments().collect(); - fragments.sort_by_key(|fragment| fragment.name.item); - fragments.into_iter().map(|fragment| { - let mut import_statements = Default::default(); - let fragment = - print_fragment(&schema, fragment, &project_config, &mut import_statements); - format!("{}{}", import_statements, fragment) - }) - }) - .collect::>(); - Ok(result.join("\n\n")) -} diff --git a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id_test.rs b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id_test.rs index 2c97bed430c07..b6fd1f7425c94 100644 --- a/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id_test.rs +++ b/compiler/crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<029b71da6b224936e7ca559c10f51c93>> + * @generated SignedSource<<7526923a808e2853f811c33eb0dff9c5>> */ mod compile_relay_artifacts_with_custom_id; @@ -12,114 +12,135 @@ mod compile_relay_artifacts_with_custom_id; use compile_relay_artifacts_with_custom_id::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn fragment_on_node_interface() { +#[tokio::test] +async fn fragment_on_node_interface() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface.expected"); - test_fixture(transform_fixture, "fragment-on-node-interface.graphql", "compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-node-interface.graphql", "compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface.expected", input, expected).await; } -#[test] -fn fragment_on_object_implementing_node_interface() { +#[tokio::test] +async fn fragment_on_node_interface_with_custom_variable_name() { + let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface-with-custom-variable-name.graphql"); + let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface-with-custom-variable-name.expected"); + test_fixture(transform_fixture, file!(), "fragment-on-node-interface-with-custom-variable-name.graphql", "compile_relay_artifacts_with_custom_id/fixtures/fragment-on-node-interface-with-custom-variable-name.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_on_object_implementing_node_interface() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface.expected"); - test_fixture(transform_fixture, "fragment-on-object-implementing-node-interface.graphql", "compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-object-implementing-node-interface.graphql", "compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_on_object_implementing_node_interface_with_custom_variable_name() { + let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface-with-custom-variable-name.graphql"); + let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface-with-custom-variable-name.expected"); + test_fixture(transform_fixture, file!(), "fragment-on-object-implementing-node-interface-with-custom-variable-name.graphql", "compile_relay_artifacts_with_custom_id/fixtures/fragment-on-object-implementing-node-interface-with-custom-variable-name.expected", input, expected).await; } -#[test] -fn fragment_on_query() { +#[tokio::test] +async fn fragment_on_query() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/fragment-on-query.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/fragment-on-query.expected"); - test_fixture(transform_fixture, "fragment-on-query.graphql", "compile_relay_artifacts_with_custom_id/fixtures/fragment-on-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-query.graphql", "compile_relay_artifacts_with_custom_id/fixtures/fragment-on-query.expected", input, expected).await; } -#[test] -fn fragment_on_query_with_cycle_invalid() { +#[tokio::test] +async fn fragment_on_query_with_cycle_invalid() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/fragment-on-query-with-cycle.invalid.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/fragment-on-query-with-cycle.invalid.expected"); - test_fixture(transform_fixture, "fragment-on-query-with-cycle.invalid.graphql", "compile_relay_artifacts_with_custom_id/fixtures/fragment-on-query-with-cycle.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-query-with-cycle.invalid.graphql", "compile_relay_artifacts_with_custom_id/fixtures/fragment-on-query-with-cycle.invalid.expected", input, expected).await; } -#[test] -fn fragment_on_viewer() { +#[tokio::test] +async fn fragment_on_viewer() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/fragment-on-viewer.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/fragment-on-viewer.expected"); - test_fixture(transform_fixture, "fragment-on-viewer.graphql", "compile_relay_artifacts_with_custom_id/fixtures/fragment-on-viewer.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-viewer.graphql", "compile_relay_artifacts_with_custom_id/fixtures/fragment-on-viewer.expected", input, expected).await; } -#[test] -fn id_as_alias_invalid() { +#[tokio::test] +async fn id_as_alias_invalid() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/id-as-alias.invalid.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/id-as-alias.invalid.expected"); - test_fixture(transform_fixture, "id-as-alias.invalid.graphql", "compile_relay_artifacts_with_custom_id/fixtures/id-as-alias.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "id-as-alias.invalid.graphql", "compile_relay_artifacts_with_custom_id/fixtures/id-as-alias.invalid.expected", input, expected).await; } -#[test] -fn kitchen_sink() { +#[tokio::test] +async fn kitchen_sink() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/kitchen-sink.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/kitchen-sink.expected"); - test_fixture(transform_fixture, "kitchen-sink.graphql", "compile_relay_artifacts_with_custom_id/fixtures/kitchen-sink.expected", input, expected); + test_fixture(transform_fixture, file!(), "kitchen-sink.graphql", "compile_relay_artifacts_with_custom_id/fixtures/kitchen-sink.expected", input, expected).await; } -#[test] -fn refetchable_conflict_with_operation_invalid() { +#[tokio::test] +async fn refetchable_conflict_with_operation_invalid() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/refetchable_conflict_with_operation.invalid.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/refetchable_conflict_with_operation.invalid.expected"); - test_fixture(transform_fixture, "refetchable_conflict_with_operation.invalid.graphql", "compile_relay_artifacts_with_custom_id/fixtures/refetchable_conflict_with_operation.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable_conflict_with_operation.invalid.graphql", "compile_relay_artifacts_with_custom_id/fixtures/refetchable_conflict_with_operation.invalid.expected", input, expected).await; } -#[test] -fn refetchable_conflict_with_refetchable_invalid() { +#[tokio::test] +async fn refetchable_conflict_with_refetchable_invalid() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/refetchable_conflict_with_refetchable.invalid.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/refetchable_conflict_with_refetchable.invalid.expected"); - test_fixture(transform_fixture, "refetchable_conflict_with_refetchable.invalid.graphql", "compile_relay_artifacts_with_custom_id/fixtures/refetchable_conflict_with_refetchable.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable_conflict_with_refetchable.invalid.graphql", "compile_relay_artifacts_with_custom_id/fixtures/refetchable_conflict_with_refetchable.invalid.expected", input, expected).await; } -#[test] -fn refetchable_connection() { +#[tokio::test] +async fn refetchable_connection() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection.expected"); - test_fixture(transform_fixture, "refetchable-connection.graphql", "compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-connection.graphql", "compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection.expected", input, expected).await; +} + +#[tokio::test] +async fn refetchable_connection_with_custom_variable_name() { + let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection-with-custom-variable-name.graphql"); + let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection-with-custom-variable-name.expected"); + test_fixture(transform_fixture, file!(), "refetchable-connection-with-custom-variable-name.graphql", "compile_relay_artifacts_with_custom_id/fixtures/refetchable-connection-with-custom-variable-name.expected", input, expected).await; } -#[test] -fn refetchable_fragment_on_node_with_missing_id() { +#[tokio::test] +async fn refetchable_fragment_on_node_with_missing_id() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/refetchable-fragment-on-node-with-missing-id.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/refetchable-fragment-on-node-with-missing-id.expected"); - test_fixture(transform_fixture, "refetchable-fragment-on-node-with-missing-id.graphql", "compile_relay_artifacts_with_custom_id/fixtures/refetchable-fragment-on-node-with-missing-id.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-fragment-on-node-with-missing-id.graphql", "compile_relay_artifacts_with_custom_id/fixtures/refetchable-fragment-on-node-with-missing-id.expected", input, expected).await; } -#[test] -fn refetchable_with_arguments_conflicting_invalid() { +#[tokio::test] +async fn refetchable_with_arguments_conflicting_invalid() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/refetchable-with-arguments-conflicting.invalid.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/refetchable-with-arguments-conflicting.invalid.expected"); - test_fixture(transform_fixture, "refetchable-with-arguments-conflicting.invalid.graphql", "compile_relay_artifacts_with_custom_id/fixtures/refetchable-with-arguments-conflicting.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-with-arguments-conflicting.invalid.graphql", "compile_relay_artifacts_with_custom_id/fixtures/refetchable-with-arguments-conflicting.invalid.expected", input, expected).await; } -#[test] -fn refetchable_with_arguments_invalid() { +#[tokio::test] +async fn refetchable_with_arguments_invalid() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/refetchable-with-arguments.invalid.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/refetchable-with-arguments.invalid.expected"); - test_fixture(transform_fixture, "refetchable-with-arguments.invalid.graphql", "compile_relay_artifacts_with_custom_id/fixtures/refetchable-with-arguments.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-with-arguments.invalid.graphql", "compile_relay_artifacts_with_custom_id/fixtures/refetchable-with-arguments.invalid.expected", input, expected).await; } -#[test] -fn relay_client_id_field() { +#[tokio::test] +async fn relay_client_id_field() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/relay-client-id-field.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/relay-client-id-field.expected"); - test_fixture(transform_fixture, "relay-client-id-field.graphql", "compile_relay_artifacts_with_custom_id/fixtures/relay-client-id-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-client-id-field.graphql", "compile_relay_artifacts_with_custom_id/fixtures/relay-client-id-field.expected", input, expected).await; } -#[test] -fn unions() { +#[tokio::test] +async fn unions() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/unions.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/unions.expected"); - test_fixture(transform_fixture, "unions.graphql", "compile_relay_artifacts_with_custom_id/fixtures/unions.expected", input, expected); + test_fixture(transform_fixture, file!(), "unions.graphql", "compile_relay_artifacts_with_custom_id/fixtures/unions.expected", input, expected).await; } -#[test] -fn viewer_query() { +#[tokio::test] +async fn viewer_query() { let input = include_str!("compile_relay_artifacts_with_custom_id/fixtures/viewer-query.graphql"); let expected = include_str!("compile_relay_artifacts_with_custom_id/fixtures/viewer-query.expected"); - test_fixture(transform_fixture, "viewer-query.graphql", "compile_relay_artifacts_with_custom_id/fixtures/viewer-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "viewer-query.graphql", "compile_relay_artifacts_with_custom_id/fixtures/viewer-query.expected", input, expected).await; } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration.rs b/compiler/crates/relay-compiler/tests/relay_compiler_integration.rs new file mode 100644 index 0000000000000..ba8b031b6413d --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration.rs @@ -0,0 +1,159 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::path::Path; +use std::path::PathBuf; +use std::sync::Arc; + +use common::ConsoleLogger; +use common::Diagnostic; +use fixture_tests::Fixture; +use futures_util::FutureExt; +use graphql_cli::DiagnosticPrinter; +use graphql_test_helpers::ProjectFixture; +use graphql_test_helpers::TestDir; +use relay_compiler::build_project::generate_extra_artifacts::default_generate_extra_artifacts_fn; +use relay_compiler::compiler::Compiler; +use relay_compiler::config::Config; +use relay_compiler::errors::BuildProjectError; +use relay_compiler::errors::Error; +use relay_compiler::source_for_location; +use relay_compiler::FileSourceKind; +use relay_compiler::FsSourceReader; +use relay_compiler::LocalPersister; +use relay_compiler::OperationPersister; +use relay_compiler::RemotePersister; +use relay_compiler::SourceReader; +use relay_config::PersistConfig; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let project_fixture = ProjectFixture::deserialize(fixture.content); + + let test_dir = TestDir::new(); + + project_fixture.write_to_dir(test_dir.path()); + + let original_cwd = std::env::current_dir().expect("Could not get cwd"); + + std::env::set_current_dir(test_dir.path()).expect("Could not set cwd"); + + let run_future = async { + let mut config = + Config::search(&PathBuf::from(test_dir.path())).expect("Could not load config"); + + config.file_source_config = FileSourceKind::WalkDir; + config.create_operation_persister = Some(Box::new(|project_config| { + project_config.persist.as_ref().map( + |persist_config| -> Box { + match persist_config { + PersistConfig::Remote(remote_config) => { + Box::new(RemotePersister::new(remote_config.clone())) + } + PersistConfig::Local(local_config) => { + Box::new(LocalPersister::new(local_config.clone())) + } + } + }, + ) + })); + config.generate_extra_artifacts = Some(Box::new(default_generate_extra_artifacts_fn)); + + let compiler = Compiler::new(Arc::new(config), Arc::new(ConsoleLogger)); + let compiler_result = compiler.compile().await; + + match compiler_result { + Ok(_) => { + let mut output = ProjectFixture::read_from_dir(test_dir.path()); + // Omit the input files from the output + output.remove_files(project_fixture); + output + .serialize() + // Jump through a few hoops to avoid having at-generated in either this + // file or our generated `.expected` files, since that would confuse other + // tools. + .replace(&format!("{}generated", '@'), "") + } + Err(compiler_error) => print_compiler_error(test_dir.path(), compiler_error), + } + }; + + let result = match std::panic::AssertUnwindSafe(run_future) + .catch_unwind() + .await + { + Err(panic_err) => { + std::env::set_current_dir(original_cwd) + .expect("Could set cwd (while handling panic from test)"); + std::panic::resume_unwind(panic_err) + } + Ok(ok) => Ok(ok), + }; + + std::env::set_current_dir(original_cwd).expect("Could set cwd"); + + result +} + +fn print_compiler_error(root_dir: &Path, error: Error) -> String { + let mut error_printer = CompilerErrorPrinter::for_root_dir(root_dir); + error_printer.print_error(error); + error_printer.chunks.join("\n") +} + +struct CompilerErrorPrinter<'a> { + chunks: Vec, + root_dir: &'a Path, + source_reader: Box, +} + +impl<'a> CompilerErrorPrinter<'a> { + fn for_root_dir(root_dir: &'a Path) -> Self { + Self { + chunks: vec![], + root_dir, + source_reader: Box::new(FsSourceReader {}), + } + } + + fn print_error(&mut self, compiler_error: Error) { + match compiler_error { + Error::DiagnosticsError { errors } => { + for diagnostic in errors { + self.append_diagnostic(diagnostic) + } + } + Error::BuildProjectsErrors { errors } => { + for err in errors { + self.print_build_error(err); + } + } + err => self.chunks.push(format!("{}", err)), + } + } + + fn print_build_error(&mut self, build_error: BuildProjectError) { + match build_error { + BuildProjectError::ValidationErrors { + errors, + project_name: _, + } => { + for diagnostic in errors { + self.append_diagnostic(diagnostic) + } + } + e => self.chunks.push(format!("{}", e)), + } + } + + fn append_diagnostic(&mut self, diagnostic: Diagnostic) { + let printer = DiagnosticPrinter::new(|source_location| { + source_for_location(self.root_dir, source_location, self.source_reader.as_ref()) + .map(|source| source.to_text_source()) + }); + self.chunks.push(printer.diagnostic_to_string(&diagnostic)) + } +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_extension.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_extension.expected new file mode 100644 index 0000000000000..a8ff79add864b --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_extension.expected @@ -0,0 +1,108 @@ +==================================== INPUT ==================================== +//- bar.js +graphql`mutation barMutation { + foo_mutation +}` + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true + }, + "schemaExtensions": [ + "./extensions.graphql" + ] +} + +//- schema.graphql + +type Mutation { + some_mutation: Boolean +} +type Query { + greeting: String +} + + +//- extensions.graphql + +extend type Mutation { + foo_mutation: Boolean +} +==================================== OUTPUT =================================== +//- __generated__/barMutation.graphql.js +/** + * SignedSource<<8c2ec235b8b068c2c2fffbc3aae787b8>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, Mutation } from 'relay-runtime'; +export type barMutation$variables = {||}; +export type barMutation$data = {| + +foo_mutation: ?boolean, +|}; +export type barMutation = {| + response: barMutation$data, + variables: barMutation$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "foo_mutation", + "storageKey": null + } + ] + } +]; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "barMutation", + "selections": (v0/*: any*/), + "type": "Mutation", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "barMutation", + "selections": (v0/*: any*/) + }, + "params": { + "cacheID": "b7409af7b3a13247f776ad3f9192c84b", + "id": null, + "metadata": {}, + "name": "barMutation", + "operationKind": "mutation", + "text": null + } +}; +})(); + +(node/*: any*/).hash = "1f3610a117a92bb877caf826f77fae63"; + +export default ((node/*: any*/)/*: Mutation< + barMutation$variables, + barMutation$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_extension.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_extension.input new file mode 100644 index 0000000000000..e16df33fb253a --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_extension.input @@ -0,0 +1,33 @@ +//- bar.js +graphql`mutation barMutation { + foo_mutation +}` + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true + }, + "schemaExtensions": [ + "./extensions.graphql" + ] +} + +//- schema.graphql + +type Mutation { + some_mutation: Boolean +} +type Query { + greeting: String +} + + +//- extensions.graphql + +extend type Mutation { + foo_mutation: Boolean +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver.expected new file mode 100644 index 0000000000000..3e9340a126d26 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver.expected @@ -0,0 +1,124 @@ +==================================== INPUT ==================================== +//- foo.js +/** + * @RelayResolver Mutation.foo_mutation: Boolean + */ + +//- bar.js +graphql`mutation barMutation { + foo_mutation +}` + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_relay_resolver_mutations": true + } +} + +//- schema.graphql + +type Mutation { + some_field: Boolean +} + +type Query { + some_field: Boolean +} +==================================== OUTPUT =================================== +//- __generated__/barMutation.graphql.js +/** + * SignedSource<<7f1de50339842bab9cecc72c6fbc076f>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, Mutation } from 'relay-runtime'; +import {foo_mutation as mutationFooMutationResolverType} from "../foo.js"; +// Type assertion validating that `mutationFooMutationResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(mutationFooMutationResolverType: () => ?boolean); +export type barMutation$variables = {||}; +export type barMutation$data = {| + +foo_mutation: ?boolean, +|}; +export type barMutation = {| + response: barMutation$data, + variables: barMutation$variables, +|}; +*/ + +import {foo_mutation as mutationFooMutationResolver} from './../foo'; + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "barMutation", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "foo_mutation", + "resolverModule": mutationFooMutationResolver, + "path": "foo_mutation" + } + ] + } + ], + "type": "Mutation", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "barMutation", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "foo_mutation", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ] + } + ] + }, + "params": { + "cacheID": "b7409af7b3a13247f776ad3f9192c84b", + "id": null, + "metadata": {}, + "name": "barMutation", + "operationKind": "mutation", + "text": null + } +}; + +(node/*: any*/).hash = "1f3610a117a92bb877caf826f77fae63"; + +export default ((node/*: any*/)/*: Mutation< + barMutation$variables, + barMutation$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver.input new file mode 100644 index 0000000000000..60c7a8f29fc64 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver.input @@ -0,0 +1,30 @@ +//- foo.js +/** + * @RelayResolver Mutation.foo_mutation: Boolean + */ + +//- bar.js +graphql`mutation barMutation { + foo_mutation +}` + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_relay_resolver_mutations": true + } +} + +//- schema.graphql + +type Mutation { + some_field: Boolean +} + +type Query { + some_field: Boolean +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_different_mutation_ok.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_different_mutation_ok.expected new file mode 100644 index 0000000000000..3b3c2e0363ed3 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_different_mutation_ok.expected @@ -0,0 +1,198 @@ +==================================== INPUT ==================================== +//- foo.js +/** + * @RelayResolver Mutation.foo_mutation: Person + */ + +/** + * @RelayResolver NotCalledMutation.baz_mutation: Boolean + */ + +//- bar.js +graphql`mutation barMutation { + baz_mutation +}` + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_relay_resolver_mutations": true + }, + "schemaExtensions": [ + "./extensions.graphql" + ] +} + +//- schema.graphql + +type Mutation { + some_field: Boolean +} + +type NotCalledMutation { + some_field: Boolean +} + +type Query { + some_field: Boolean +} + +schema { + query: Query, + mutation: NotCalledMutation, +} + +//- extensions.graphql + +type Person { + name: String! + age: Int! +} +==================================== OUTPUT =================================== +//- __generated__/Mutation__foo_mutation$normalization.graphql.js +/** + * SignedSource<<87fb321a30684890a8b77e1bdb2cf79a>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +export type Mutation__foo_mutation$normalization = {| + +age: number, + +name: string, +|}; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "kind": "SplitOperation", + "metadata": {}, + "name": "Mutation__foo_mutation$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "age", + "storageKey": null + } + ] + } + ] +}; + +export default node; + +//- __generated__/barMutation.graphql.js +/** + * SignedSource<<6f79ca8d7718498c480388c5b22a5130>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, Mutation } from 'relay-runtime'; +import {baz_mutation as notCalledMutationBazMutationResolverType} from "../foo.js"; +// Type assertion validating that `notCalledMutationBazMutationResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(notCalledMutationBazMutationResolverType: () => ?boolean); +export type barMutation$variables = {||}; +export type barMutation$data = {| + +baz_mutation: ?boolean, +|}; +export type barMutation = {| + response: barMutation$data, + variables: barMutation$variables, +|}; +*/ + +import {baz_mutation as notCalledMutationBazMutationResolver} from './../foo'; + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "barMutation", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "baz_mutation", + "resolverModule": notCalledMutationBazMutationResolver, + "path": "baz_mutation" + } + ] + } + ], + "type": "NotCalledMutation", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "barMutation", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "baz_mutation", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ] + } + ] + }, + "params": { + "cacheID": "b7409af7b3a13247f776ad3f9192c84b", + "id": null, + "metadata": {}, + "name": "barMutation", + "operationKind": "mutation", + "text": null + } +}; + +(node/*: any*/).hash = "bbe24bc0c4cab0597abfafef11f07b88"; + +export default ((node/*: any*/)/*: Mutation< + barMutation$variables, + barMutation$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_different_mutation_ok.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_different_mutation_ok.input new file mode 100644 index 0000000000000..4d67f729cd716 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_different_mutation_ok.input @@ -0,0 +1,53 @@ +//- foo.js +/** + * @RelayResolver Mutation.foo_mutation: Person + */ + +/** + * @RelayResolver NotCalledMutation.baz_mutation: Boolean + */ + +//- bar.js +graphql`mutation barMutation { + baz_mutation +}` + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_relay_resolver_mutations": true + }, + "schemaExtensions": [ + "./extensions.graphql" + ] +} + +//- schema.graphql + +type Mutation { + some_field: Boolean +} + +type NotCalledMutation { + some_field: Boolean +} + +type Query { + some_field: Boolean +} + +schema { + query: Query, + mutation: NotCalledMutation, +} + +//- extensions.graphql + +type Person { + name: String! + age: Int! +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_invalid_disabled.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_invalid_disabled.expected new file mode 100644 index 0000000000000..af79de086d08f --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_invalid_disabled.expected @@ -0,0 +1,32 @@ +==================================== INPUT ==================================== +//- foo.js +/** + * @RelayResolver Mutation.foo_mutation: Boolean + */ + +//- bar.js +graphql`mutation barMutation { + foo_mutation +}` + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true + } +} + +//- schema.graphql + +type Mutation +==================================== OUTPUT =================================== +✖︎ Resolvers on the mutation type Mutation are disallowed without the enable_relay_resolver_mutations feature flag + + foo.js:2:28 + 1 │ * + 2 │ * @RelayResolver Mutation.foo_mutation: Boolean + │ ^^^^^^^^^^^^ + 3 │ diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_invalid_disabled.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_invalid_disabled.input new file mode 100644 index 0000000000000..92b1e5115c85a --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_invalid_disabled.input @@ -0,0 +1,23 @@ +//- foo.js +/** + * @RelayResolver Mutation.foo_mutation: Boolean + */ + +//- bar.js +graphql`mutation barMutation { + foo_mutation +}` + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true + } +} + +//- schema.graphql + +type Mutation diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_invalid_nonscalar.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_invalid_nonscalar.expected new file mode 100644 index 0000000000000..8b74788d299e5 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_invalid_nonscalar.expected @@ -0,0 +1,75 @@ +==================================== INPUT ==================================== +//- foo.js +/** + * @RelayResolver Mutation.foo_mutation: Person + */ + +/** + * @RelayResolver Mutation.baz_mutation: [Person] + */ + +/** + * @RelayResolver Mutation.quux_mutation: [Person!] + */ + +//- bar.js +graphql`mutation barMutation { + foo_mutation { + name + } + baz_mutation { + name + } + quux_mutation { + name + } +}` + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_relay_resolver_mutations": true + }, + "schemaExtensions": [ + "./extensions.graphql" + ] +} + +//- schema.graphql + +type Mutation + +//- extensions.graphql + +type Person { + name: String! + age: Int! +} +==================================== OUTPUT =================================== +✖︎ Mutation resolver foo_mutation must return a scalar or enum type, got Person + + foo.js:2:28 + 1 │ * + 2 │ * @RelayResolver Mutation.foo_mutation: Person + │ ^^^^^^^^^^^^ + 3 │ + +✖︎ Mutation resolver baz_mutation must return a scalar or enum type, got Person + + foo.js:6:28 + 5 │ * + 6 │ * @RelayResolver Mutation.baz_mutation: [Person] + │ ^^^^^^^^^^^^ + 7 │ + +✖︎ Mutation resolver quux_mutation must return a scalar or enum type, got Person + + foo.js:10:28 + 9 │ * + 10 │ * @RelayResolver Mutation.quux_mutation: [Person!] + │ ^^^^^^^^^^^^^ + 11 │ diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_invalid_nonscalar.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_invalid_nonscalar.input new file mode 100644 index 0000000000000..6ac94666a9531 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_mutation_resolver_invalid_nonscalar.input @@ -0,0 +1,50 @@ +//- foo.js +/** + * @RelayResolver Mutation.foo_mutation: Person + */ + +/** + * @RelayResolver Mutation.baz_mutation: [Person] + */ + +/** + * @RelayResolver Mutation.quux_mutation: [Person!] + */ + +//- bar.js +graphql`mutation barMutation { + foo_mutation { + name + } + baz_mutation { + name + } + quux_mutation { + name + } +}` + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_relay_resolver_mutations": true + }, + "schemaExtensions": [ + "./extensions.graphql" + ] +} + +//- schema.graphql + +type Mutation + +//- extensions.graphql + +type Person { + name: String! + age: Int! +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_schema_extension_interface_uses_resolver_type.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_schema_extension_interface_uses_resolver_type.expected new file mode 100644 index 0000000000000..f4457c7de0df3 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_schema_extension_interface_uses_resolver_type.expected @@ -0,0 +1,401 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`query PersonComponentQuery { + boss { + admin { + id + } + } +}` + +//- Admin.js +/** + * @RelayResolver Admin implements Worker + */ +export function + +/** + * @RelayResolver Admin.admin: Admin + */ +export function admin() { + return "123"; +} + +/** + * @RelayResolver Query.boss: Admin + */ +export function boss() { + return "123"; +} + + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql + +# A resolver type will implement this +interface Worker { + id: ID! # This field will be created by the making a model resolver + admin: Admin # Here we reference a type defined by resolvers +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<2a569a157aaf5519c1f05d904b6d6d26>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "Admin"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => mixed); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('Admin').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/PersonComponentQuery.graphql.js +/** + * SignedSource<<678979a6fbd285c60deb8d498debb4ca>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { Admin____relay_model_instance$data } from "Admin____relay_model_instance.graphql"; +import {admin as adminAdminResolverType} from "Admin"; +// Type assertion validating that `adminAdminResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminAdminResolverType: ( + __relay_model_instance: Admin____relay_model_instance$data['__relay_model_instance'], +) => ?{| + +id: DataID, +|}); +import {boss as queryBossResolverType} from "Admin"; +// Type assertion validating that `queryBossResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryBossResolverType: () => ?{| + +id: DataID, +|}); +export type PersonComponentQuery$variables = {||}; +export type PersonComponentQuery$data = {| + +boss: ?{| + +admin: ?{| + +id: string, + |}, + |}, +|}; +export type PersonComponentQuery = {| + response: PersonComponentQuery$data, + variables: PersonComponentQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" +}, +v1 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}, +v2 = [ + (v1/*: any*/) +], +v3 = { + "alias": null, + "args": null, + "concreteType": "Admin", + "kind": "LinkedField", + "name": "admin", + "plural": false, + "selections": (v2/*: any*/), + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "PersonComponentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "Admin", + "modelResolvers": { + "Admin": { + "alias": null, + "args": null, + "fragment": (v0/*: any*/), + "kind": "RelayResolver", + "name": "boss", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('Admin').Admin, 'id', true), + "path": "boss.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "boss", + "resolverModule": require('Admin').boss, + "path": "boss" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "Admin", + "kind": "LinkedField", + "name": "boss", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "Admin", + "modelResolvers": { + "Admin": { + "alias": null, + "args": null, + "fragment": (v0/*: any*/), + "kind": "RelayResolver", + "name": "admin", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('Admin').Admin, 'id', true), + "path": "boss.admin.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "admin", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('Admin').admin, '__relay_model_instance', true), + "path": "boss.admin" + }, + "linkedField": (v3/*: any*/) + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "PersonComponentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "boss", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "Admin", + "kind": "LinkedField", + "name": "boss", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "admin", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v2/*: any*/), + "type": "Admin", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "Admin", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": (v3/*: any*/) + }, + (v1/*: any*/) + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "0deed3d106ca29ad927a703c08de07e0", + "id": null, + "metadata": {}, + "name": "PersonComponentQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +(node/*: any*/).hash = "25c3cfb12f96f3cf4479f3effad2e911"; + +module.exports = ((node/*: any*/)/*: ClientQuery< + PersonComponentQuery$variables, + PersonComponentQuery$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_schema_extension_interface_uses_resolver_type.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_schema_extension_interface_uses_resolver_type.input new file mode 100644 index 0000000000000..066f49f6efa00 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/client_schema_extension_interface_uses_resolver_type.input @@ -0,0 +1,55 @@ +//- PersonComponent.js +graphql`query PersonComponentQuery { + boss { + admin { + id + } + } +}` + +//- Admin.js +/** + * @RelayResolver Admin implements Worker + */ +export function + +/** + * @RelayResolver Admin.admin: Admin + */ +export function admin() { + return "123"; +} + +/** + * @RelayResolver Query.boss: Admin + */ +export function boss() { + return "123"; +} + + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql + +# A resolver type will implement this +interface Worker { + id: ID! # This field will be created by the making a model resolver + admin: Admin # Here we reference a type defined by resolvers +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/custom_scalar_variable_default_arg.invalid.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/custom_scalar_variable_default_arg.invalid.expected new file mode 100644 index 0000000000000..e82604f9c2e83 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/custom_scalar_variable_default_arg.invalid.expected @@ -0,0 +1,60 @@ +==================================== INPUT ==================================== +//- foo.js + +graphql`query fooCustomScalarLiteralArgQuery($arg: CustomScalarType! = "foobar", $arg_server: CustomServerScalarType! = "bazquux") { + extension_field(custom_scalar_arg: "a_string") { + __typename + } + extension_scalar_field(custom_scalar_arg: $arg) + with_var: extension_server_scalar(custom_server_scalar_arg: $arg_server) + with_literal: extension_server_scalar(custom_server_scalar_arg: "another_string") +}` + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_relay_resolver_mutations": true, + "enable_strict_custom_scalars": true + }, + "schemaExtensions": [ + "./extensions.graphql" + ] +} + +//- schema.graphql + +type Query + +scalar CustomServerScalarType + +//- extensions.graphql + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: CustomScalarType!): Obj + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int + extension_server_scalar(custom_server_scalar_arg: CustomServerScalarType!): Int +} +==================================== OUTPUT =================================== +✖︎ Unexpected scalar literal `"foobar"` provided in a position expecting custom scalar type `CustomScalarType`. This value should come from a variable. + + foo.js:2:72 + 2 │ query fooCustomScalarLiteralArgQuery($arg: CustomScalarType! = "foobar", $arg_server: CustomServerScalarType! = "bazquux") { + │ ^^^^^^^^ + 3 │ extension_field(custom_scalar_arg: "a_string") { + +✖︎ Unexpected scalar literal `"bazquux"` provided in a position expecting custom scalar type `CustomServerScalarType`. This value should come from a variable. + + foo.js:2:121 + 2 │ query fooCustomScalarLiteralArgQuery($arg: CustomScalarType! = "foobar", $arg_server: CustomServerScalarType! = "bazquux") { + │ ^^^^^^^^^ + 3 │ extension_field(custom_scalar_arg: "a_string") { diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/custom_scalar_variable_default_arg.invalid.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/custom_scalar_variable_default_arg.invalid.input new file mode 100644 index 0000000000000..ad9b86d9347bc --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/custom_scalar_variable_default_arg.invalid.input @@ -0,0 +1,45 @@ +//- foo.js + +graphql`query fooCustomScalarLiteralArgQuery($arg: CustomScalarType! = "foobar", $arg_server: CustomServerScalarType! = "bazquux") { + extension_field(custom_scalar_arg: "a_string") { + __typename + } + extension_scalar_field(custom_scalar_arg: $arg) + with_var: extension_server_scalar(custom_server_scalar_arg: $arg_server) + with_literal: extension_server_scalar(custom_server_scalar_arg: "another_string") +}` + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_relay_resolver_mutations": true, + "enable_strict_custom_scalars": true + }, + "schemaExtensions": [ + "./extensions.graphql" + ] +} + +//- schema.graphql + +type Query + +scalar CustomServerScalarType + +//- extensions.graphql + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: CustomScalarType!): Obj + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int + extension_server_scalar(custom_server_scalar_arg: CustomServerScalarType!): Int +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/custom_scalar_variable_default_arg_non_strict.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/custom_scalar_variable_default_arg_non_strict.expected new file mode 100644 index 0000000000000..6c8a54a3a86f1 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/custom_scalar_variable_default_arg_non_strict.expected @@ -0,0 +1,195 @@ +==================================== INPUT ==================================== +//- foo.js + +graphql`query fooCustomScalarLiteralArgQuery($arg: CustomScalarType! = "foobar", $arg_server: CustomServerScalarType! = "bazquux") { + extension_field(custom_scalar_arg: "a_string") { + __typename + } + extension_scalar_field(custom_scalar_arg: $arg) + with_var: extension_server_scalar(custom_server_scalar_arg: $arg_server) + with_literal: extension_server_scalar(custom_server_scalar_arg: "another_string") +}` + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_relay_resolver_mutations": true + }, + "schemaExtensions": [ + "./extensions.graphql" + ] +} + +//- schema.graphql + +type Query + +scalar CustomServerScalarType + +//- extensions.graphql + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: CustomScalarType!): Obj + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int + extension_server_scalar(custom_server_scalar_arg: CustomServerScalarType!): Int +} +==================================== OUTPUT =================================== +//- __generated__/fooCustomScalarLiteralArgQuery.graphql.js +/** + * SignedSource<<27b4bcb541cbdf999e094b271c6bf062>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +export type fooCustomScalarLiteralArgQuery$variables = {| + arg: any, + arg_server: any, +|}; +export type fooCustomScalarLiteralArgQuery$data = {| + +extension_field: ?{| + +__typename: "Obj", + |}, + +extension_scalar_field: ?number, + +with_literal: ?number, + +with_var: ?number, +|}; +export type fooCustomScalarLiteralArgQuery = {| + response: fooCustomScalarLiteralArgQuery$data, + variables: fooCustomScalarLiteralArgQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": "foobar", + "kind": "LocalArgument", + "name": "arg" + }, + { + "defaultValue": "bazquux", + "kind": "LocalArgument", + "name": "arg_server" + } +], +v1 = [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Literal", + "name": "custom_scalar_arg", + "value": "a_string" + } + ], + "concreteType": "Obj", + "kind": "LinkedField", + "name": "extension_field", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + } + ], + "storageKey": "extension_field(custom_scalar_arg:\"a_string\")" + }, + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "custom_scalar_arg", + "variableName": "arg" + } + ], + "kind": "ScalarField", + "name": "extension_scalar_field", + "storageKey": null + }, + { + "alias": "with_var", + "args": [ + { + "kind": "Variable", + "name": "custom_server_scalar_arg", + "variableName": "arg_server" + } + ], + "kind": "ScalarField", + "name": "extension_server_scalar", + "storageKey": null + }, + { + "alias": "with_literal", + "args": [ + { + "kind": "Literal", + "name": "custom_server_scalar_arg", + "value": "another_string" + } + ], + "kind": "ScalarField", + "name": "extension_server_scalar", + "storageKey": "extension_server_scalar(custom_server_scalar_arg:\"another_string\")" + } + ] + } +]; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "fooCustomScalarLiteralArgQuery", + "selections": (v1/*: any*/), + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "fooCustomScalarLiteralArgQuery", + "selections": (v1/*: any*/) + }, + "params": { + "cacheID": "cd379a63ef2b50edc27e70bb79830d1e", + "id": null, + "metadata": {}, + "name": "fooCustomScalarLiteralArgQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +(node/*: any*/).hash = "6e15b365d98ebe5a10f235ef71786489"; + +export default ((node/*: any*/)/*: ClientQuery< + fooCustomScalarLiteralArgQuery$variables, + fooCustomScalarLiteralArgQuery$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/custom_scalar_variable_default_arg_non_strict.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/custom_scalar_variable_default_arg_non_strict.input new file mode 100644 index 0000000000000..16bbbadc0c47e --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/custom_scalar_variable_default_arg_non_strict.input @@ -0,0 +1,44 @@ +//- foo.js + +graphql`query fooCustomScalarLiteralArgQuery($arg: CustomScalarType! = "foobar", $arg_server: CustomServerScalarType! = "bazquux") { + extension_field(custom_scalar_arg: "a_string") { + __typename + } + extension_scalar_field(custom_scalar_arg: $arg) + with_var: extension_server_scalar(custom_server_scalar_arg: $arg_server) + with_literal: extension_server_scalar(custom_server_scalar_arg: "another_string") +}` + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_relay_resolver_mutations": true + }, + "schemaExtensions": [ + "./extensions.graphql" + ] +} + +//- schema.graphql + +type Query + +scalar CustomServerScalarType + +//- extensions.graphql + +scalar CustomScalarType + +type Obj { + some_key: Int! +} + +extend type Query { + extension_field(custom_scalar_arg: CustomScalarType!): Obj + extension_scalar_field(custom_scalar_arg: CustomScalarType!): Int + extension_server_scalar(custom_server_scalar_arg: CustomServerScalarType!): Int +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/error_handling_fragment.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/error_handling_fragment.expected new file mode 100644 index 0000000000000..0e511968c31dc --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/error_handling_fragment.expected @@ -0,0 +1,71 @@ +==================================== INPUT ==================================== +//- foo.js +graphql` + fragment foo on User @throwOnFieldError { + name + }`; + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { me: User } +type User { name: String } +==================================== OUTPUT =================================== +//- __generated__/foo.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type foo$fragmentType: FragmentType; +export type foo$data = {| + +name: ?string, + +$fragmentType: foo$fragmentType, +|}; +export type foo$key = { + +$data?: foo$data, + +$fragmentSpreads: foo$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "throwOnFieldError": true + }, + "name": "foo", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "c846248549175d6d05faa3bd13697146"; + +module.exports = ((node/*: any*/)/*: Fragment< + foo$fragmentType, + foo$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/error_handling_fragment.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/error_handling_fragment.input new file mode 100644 index 0000000000000..345c73c07f4eb --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/error_handling_fragment.input @@ -0,0 +1,16 @@ +//- foo.js +graphql` + fragment foo on User @throwOnFieldError { + name + }`; + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { me: User } +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/error_handling_query.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/error_handling_query.expected new file mode 100644 index 0000000000000..41accfb2e2041 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/error_handling_query.expected @@ -0,0 +1,103 @@ +==================================== INPUT ==================================== +//- foo.js +graphql` + query fooQuery @throwOnFieldError { + me { + name + } + }`; + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { me: User } +type User { name: String } +==================================== OUTPUT =================================== +//- __generated__/fooQuery.graphql.js +/** + * SignedSource<<3fedea69ae18a5e38707e31bf973e7af>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type fooQuery$variables = {||}; +export type fooQuery$data = {| + +me: ?{| + +name: ?string, + |}, +|}; +export type fooQuery = {| + response: fooQuery$data, + variables: fooQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "storageKey": null + } +]; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "throwOnFieldError": true + }, + "name": "fooQuery", + "selections": (v0/*: any*/), + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "fooQuery", + "selections": (v0/*: any*/) + }, + "params": { + "cacheID": "198e3a8388d05ebc6755d5ddbd3fa905", + "id": null, + "metadata": {}, + "name": "fooQuery", + "operationKind": "query", + "text": "query fooQuery {\n me {\n name\n }\n}\n" + } +}; +})(); + +(node/*: any*/).hash = "5837704043bd9bdb31bb77ca3ed3856e"; + +module.exports = ((node/*: any*/)/*: Query< + fooQuery$variables, + fooQuery$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/error_handling_query.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/error_handling_query.input new file mode 100644 index 0000000000000..e6624ac08ce13 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/error_handling_query.input @@ -0,0 +1,18 @@ +//- foo.js +graphql` + query fooQuery @throwOnFieldError { + me { + name + } + }`; + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { me: User } +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/fragment_arguments.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/fragment_arguments.expected new file mode 100644 index 0000000000000..f7281490fa873 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/fragment_arguments.expected @@ -0,0 +1,221 @@ +==================================== INPUT ==================================== +//- foo.js +graphql` + fragment foo($postCount: Int) on User { + posts(first: $postCount) { + title + } + }`; + +graphql` + query fooQuery { + me { + ...foo(postCount: 5) + } + } +` + +//- relay.config.json +{ + "language": "typescript", + "schema": "./schema.graphql", + "featureFlags": { + "enable_fragment_argument_transform": true + } +} + +//- schema.graphql +type Query { + me: User +} + +type User { + posts(first: Int): [Post] +} + +type Post { + title: String +} +==================================== OUTPUT =================================== +//- __generated__/foo.graphql.ts +/** + * SignedSource<<0e503f5c200310a5580a2b4853a499ce>> + * @lightSyntaxTransform + * @nogrep + */ + +/* tslint:disable */ +/* eslint-disable */ +// @ts-nocheck + +import { Fragment, ReaderFragment } from 'relay-runtime'; +import { FragmentRefs } from "relay-runtime"; +export type foo$data = { + readonly posts: ReadonlyArray<{ + readonly title: string | null | undefined; + } | null | undefined> | null | undefined; + readonly " $fragmentType": "foo"; +}; +export type foo$key = { + readonly " $data"?: foo$data; + readonly " $fragmentSpreads": FragmentRefs<"foo">; +}; + +const node: ReaderFragment = { + "argumentDefinitions": [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "postCount" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "foo", + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "first", + "variableName": "postCount" + } + ], + "concreteType": "Post", + "kind": "LinkedField", + "name": "posts", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "title", + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +(node as any).hash = "55999049d2511b8662e2fb877d8bf661"; + +export default node; + +//- __generated__/fooQuery.graphql.ts +/** + * SignedSource<<20043d1d00f946a33727d27907f7f3f2>> + * @lightSyntaxTransform + * @nogrep + */ + +/* tslint:disable */ +/* eslint-disable */ +// @ts-nocheck + +import { ConcreteRequest, Query } from 'relay-runtime'; +import { FragmentRefs } from "relay-runtime"; +export type fooQuery$variables = Record; +export type fooQuery$data = { + readonly me: { + readonly " $fragmentSpreads": FragmentRefs<"foo">; + } | null | undefined; +}; +export type fooQuery = { + response: fooQuery$data; + variables: fooQuery$variables; +}; + +const node: ConcreteRequest = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "fooQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "args": [ + { + "kind": "Literal", + "name": "postCount", + "value": 5 + } + ], + "kind": "FragmentSpread", + "name": "foo" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "fooQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": [ + { + "kind": "Literal", + "name": "first", + "value": 5 + } + ], + "concreteType": "Post", + "kind": "LinkedField", + "name": "posts", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "title", + "storageKey": null + } + ], + "storageKey": "posts(first:5)" + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "4fbb83aa0c91b56ee5c3eed90060ce7a", + "id": null, + "metadata": {}, + "name": "fooQuery", + "operationKind": "query", + "text": "query fooQuery {\n me {\n ...foo_3lq16u\n }\n}\n\nfragment foo_3lq16u on User {\n posts(first: 5) {\n title\n }\n}\n" + } +}; + +(node as any).hash = "3508818a33211bc36644b79de3011e83"; + +export default node; diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/fragment_arguments.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/fragment_arguments.input new file mode 100644 index 0000000000000..a0ae4d6994536 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/fragment_arguments.input @@ -0,0 +1,37 @@ +//- foo.js +graphql` + fragment foo($postCount: Int) on User { + posts(first: $postCount) { + title + } + }`; + +graphql` + query fooQuery { + me { + ...foo(postCount: 5) + } + } +` + +//- relay.config.json +{ + "language": "typescript", + "schema": "./schema.graphql", + "featureFlags": { + "enable_fragment_argument_transform": true + } +} + +//- schema.graphql +type Query { + me: User +} + +type User { + posts(first: Int): [Post] +} + +type Post { + title: String +} \ No newline at end of file diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/live_resolver_implements_interface_field.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/live_resolver_implements_interface_field.expected new file mode 100644 index 0000000000000..6394d0b078d32 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/live_resolver_implements_interface_field.expected @@ -0,0 +1,378 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + * @live + */ + +/** + * @RelayResolver User.name: String + * @live + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + * @live + */ + +/** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + some_field: Boolean +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! + name: String +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<43fd74ebd2b94a456ec3c8657edf2868>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { LiveState, FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "AdminTypeResolvers"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => LiveState); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType["read"]>>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayLiveResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/PersonComponentFragment.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type PersonComponentFragment$fragmentType: FragmentType; +export type PersonComponentFragment$data = {| + +name: ?string, + +$fragmentType: PersonComponentFragment$fragmentType, +|}; +export type PersonComponentFragment$key = { + +$data?: PersonComponentFragment$data, + +$fragmentSpreads: PersonComponentFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "PersonComponentFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('AdminTypeResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayLiveResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('UserTypeResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "type": "IPerson", + "abstractKey": "__isIPerson" +}; + +(node/*: any*/).hash = "a57dd30bd59412781e9566e1553e2d70"; + +module.exports = ((node/*: any*/)/*: Fragment< + PersonComponentFragment$fragmentType, + PersonComponentFragment$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<5b1064cbbcdf6ad4c972e722067e79b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { LiveState, FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => LiveState); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType["read"]>>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayLiveResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/live_resolver_implements_interface_field.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/live_resolver_implements_interface_field.input new file mode 100644 index 0000000000000..c17055334aeeb --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/live_resolver_implements_interface_field.input @@ -0,0 +1,51 @@ +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + * @live + */ + +/** + * @RelayResolver User.name: String + * @live + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + * @live + */ + +/** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + some_field: Boolean +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! + name: String +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/multiple_resolvers_on_interface_of_all_strong_model_type.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/multiple_resolvers_on_interface_of_all_strong_model_type.expected new file mode 100644 index 0000000000000..dc843e65f444c --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/multiple_resolvers_on_interface_of_all_strong_model_type.expected @@ -0,0 +1,606 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`query PersonComponentQuery { + person { + name + } + actor { + description + } +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson & IActor + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson & IActor + */ + +//- IPersonResolvers.js +/** + * @RelayResolver IPerson.name: String + */ + +//- IActorResolvers.js +/** + * @RelayResolver IActor.description: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql + +type Query { + person: IPerson + actor: IActor +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! +} + +interface IActor { + id: ID! +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<7fbe3989595c55397f4bcc5c81ec30b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "AdminTypeResolvers"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => mixed); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/PersonComponentQuery.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { Admin____relay_model_instance$data } from "Admin____relay_model_instance.graphql"; +import type { User____relay_model_instance$data } from "User____relay_model_instance.graphql"; +import {description as iActorDescriptionResolverType} from "IActorResolvers"; +// Type assertion validating that `iActorDescriptionResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(iActorDescriptionResolverType: ( + model: Admin____relay_model_instance$data['__relay_model_instance'] | User____relay_model_instance$data['__relay_model_instance'], +) => ?string); +import {name as iPersonNameResolverType} from "IPersonResolvers"; +// Type assertion validating that `iPersonNameResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(iPersonNameResolverType: ( + model: Admin____relay_model_instance$data['__relay_model_instance'] | User____relay_model_instance$data['__relay_model_instance'], +) => ?string); +export type PersonComponentQuery$variables = {||}; +export type PersonComponentQuery$data = {| + +actor: ?{| + +description: ?string, + |}, + +person: ?{| + +name: ?string, + |}, +|}; +export type PersonComponentQuery = {| + response: PersonComponentQuery$data, + variables: PersonComponentQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" +}, +v1 = { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" +}, +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}, +v3 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "PersonComponentQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "person", + "plural": false, + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": (v0/*: any*/), + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('IPersonResolvers').name, '__relay_model_instance', true), + "path": "person.name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": (v1/*: any*/), + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('IPersonResolvers').name, '__relay_model_instance', true), + "path": "person.name" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "storageKey": null + }, + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "actor", + "plural": false, + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": (v0/*: any*/), + "kind": "RelayResolver", + "name": "description", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('IActorResolvers').description, '__relay_model_instance', true), + "path": "actor.description" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": (v1/*: any*/), + "kind": "RelayResolver", + "name": "description", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('IActorResolvers').description, '__relay_model_instance', true), + "path": "actor.description" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "PersonComponentQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "person", + "plural": false, + "selections": [ + (v2/*: any*/), + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('IPersonResolvers').name, + "rootFragment": null + } + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('IPersonResolvers').name, + "rootFragment": null + } + } + ], + "type": "User", + "abstractKey": null + }, + (v3/*: any*/) + ] + } + ], + "storageKey": null + }, + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "actor", + "plural": false, + "selections": [ + (v2/*: any*/), + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "name": "description", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('IActorResolvers').description, + "rootFragment": null + } + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "description", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('IActorResolvers').description, + "rootFragment": null + } + } + ], + "type": "User", + "abstractKey": null + }, + (v3/*: any*/) + ] + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "0deed3d106ca29ad927a703c08de07e0", + "id": null, + "metadata": {}, + "name": "PersonComponentQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +(node/*: any*/).hash = "0ed0cac8f6cfaba728cb6608b7840d57"; + +module.exports = ((node/*: any*/)/*: ClientQuery< + PersonComponentQuery$variables, + PersonComponentQuery$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<7ffabc2a97c3589cbfd20a23b3b608ca>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => mixed); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/multiple_resolvers_on_interface_of_all_strong_model_type.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/multiple_resolvers_on_interface_of_all_strong_model_type.input new file mode 100644 index 0000000000000..dc43e54ce44f6 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/multiple_resolvers_on_interface_of_all_strong_model_type.input @@ -0,0 +1,60 @@ +//- PersonComponent.js +graphql`query PersonComponentQuery { + person { + name + } + actor { + description + } +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson & IActor + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson & IActor + */ + +//- IPersonResolvers.js +/** + * @RelayResolver IPerson.name: String + */ + +//- IActorResolvers.js +/** + * @RelayResolver IActor.description: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql + +type Query { + person: IPerson + actor: IActor +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! +} + +interface IActor { + id: ID! +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/multiple_resolvers_returns_interfaces_of_all_strong_model_type.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/multiple_resolvers_returns_interfaces_of_all_strong_model_type.expected new file mode 100644 index 0000000000000..52096b0c891a6 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/multiple_resolvers_returns_interfaces_of_all_strong_model_type.expected @@ -0,0 +1,601 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`query PersonComponentQuery { + person { + name + } + actor { + description + } +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson & IActor + */ + +/** + * @RelayResolver User.name: String + */ + +/** + * @RelayResolver User.description: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson & IActor + */ + +/** + * @RelayResolver Admin.name: String + */ + +/** + * @RelayResolver Admin.description: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql + +type Query { + person: IPerson + actor: IActor +} + +//- schema-extensions/extension.graphql + +interface IPerson { + id: ID! + name: String +} + +interface IActor { + id: ID! + description: String +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<7fbe3989595c55397f4bcc5c81ec30b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "AdminTypeResolvers"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => mixed); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/PersonComponentQuery.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +export type PersonComponentQuery$variables = {||}; +export type PersonComponentQuery$data = {| + +actor: ?{| + +description: ?string, + |}, + +person: ?{| + +name: ?string, + |}, +|}; +export type PersonComponentQuery = {| + response: PersonComponentQuery$data, + variables: PersonComponentQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" +}, +v1 = { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" +}, +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}, +v3 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "PersonComponentQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "person", + "plural": false, + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": (v0/*: any*/), + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('AdminTypeResolvers').name, '__relay_model_instance', true), + "path": "person.name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": (v1/*: any*/), + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('UserTypeResolvers').name, '__relay_model_instance', true), + "path": "person.name" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "storageKey": null + }, + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "actor", + "plural": false, + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": (v0/*: any*/), + "kind": "RelayResolver", + "name": "description", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('AdminTypeResolvers').description, '__relay_model_instance', true), + "path": "actor.description" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": (v1/*: any*/), + "kind": "RelayResolver", + "name": "description", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('UserTypeResolvers').description, '__relay_model_instance', true), + "path": "actor.description" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "PersonComponentQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "person", + "plural": false, + "selections": [ + (v2/*: any*/), + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('AdminTypeResolvers').name, + "rootFragment": null + } + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('UserTypeResolvers').name, + "rootFragment": null + } + } + ], + "type": "User", + "abstractKey": null + }, + (v3/*: any*/) + ] + } + ], + "storageKey": null + }, + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "actor", + "plural": false, + "selections": [ + (v2/*: any*/), + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "name": "description", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('AdminTypeResolvers').description, + "rootFragment": null + } + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "description", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('UserTypeResolvers').description, + "rootFragment": null + } + } + ], + "type": "User", + "abstractKey": null + }, + (v3/*: any*/) + ] + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "0deed3d106ca29ad927a703c08de07e0", + "id": null, + "metadata": {}, + "name": "PersonComponentQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +(node/*: any*/).hash = "0ed0cac8f6cfaba728cb6608b7840d57"; + +module.exports = ((node/*: any*/)/*: ClientQuery< + PersonComponentQuery$variables, + PersonComponentQuery$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<7ffabc2a97c3589cbfd20a23b3b608ca>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => mixed); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/multiple_resolvers_returns_interfaces_of_all_strong_model_type.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/multiple_resolvers_returns_interfaces_of_all_strong_model_type.input new file mode 100644 index 0000000000000..0a908a56d3ffc --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/multiple_resolvers_returns_interfaces_of_all_strong_model_type.input @@ -0,0 +1,69 @@ +//- PersonComponent.js +graphql`query PersonComponentQuery { + person { + name + } + actor { + description + } +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson & IActor + */ + +/** + * @RelayResolver User.name: String + */ + +/** + * @RelayResolver User.description: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson & IActor + */ + +/** + * @RelayResolver Admin.name: String + */ + +/** + * @RelayResolver Admin.description: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql + +type Query { + person: IPerson + actor: IActor +} + +//- schema-extensions/extension.graphql + +interface IPerson { + id: ID! + name: String +} + +interface IActor { + id: ID! + description: String +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_flow.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_flow.expected new file mode 100644 index 0000000000000..2b08a3cf1f33a --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_flow.expected @@ -0,0 +1,129 @@ +==================================== INPUT ==================================== +//- foo.js +graphql` + query fooQuery @preloadable { + userName + }`; + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "persistConfig": { + "file": "./operations.json" + } +} + +//- operations.json +{} + +//- schema.graphql +type Query { userName: String } +==================================== OUTPUT =================================== +//- __generated__/fooQuery$parameters.js +/** + * SignedSource<<56c59d6aa21f9b768bf09e00494f325e>> + * @relayHash ae6874c86ce5db2df8d6b253a6a0ec13 + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +// @relayRequestID ae6874c86ce5db2df8d6b253a6a0ec13 + +/*:: +import type { PreloadableConcreteRequest } from 'relay-runtime'; +import type { fooQuery } from './fooQuery.graphql'; +*/ + +var node/*: PreloadableConcreteRequest*/ = { + "kind": "PreloadableConcreteRequest", + "params": { + "id": "ae6874c86ce5db2df8d6b253a6a0ec13", + "metadata": {}, + "name": "fooQuery", + "operationKind": "query", + "text": null + } +}; + +export default ((node/*: any*/)/*: PreloadableConcreteRequest*/); + +//- __generated__/fooQuery.graphql.js +/** + * SignedSource<<8cc7b8c559f66dd65618447b4688e2e9>> + * @relayHash ae6874c86ce5db2df8d6b253a6a0ec13 + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +// @relayRequestID ae6874c86ce5db2df8d6b253a6a0ec13 + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type fooQuery$variables = {||}; +export type fooQuery$data = {| + +userName: ?string, +|}; +export type fooQuery = {| + response: fooQuery$data, + variables: fooQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "userName", + "storageKey": null + } +]; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "fooQuery", + "selections": (v0/*: any*/), + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "fooQuery", + "selections": (v0/*: any*/) + }, + "params": { + "id": "ae6874c86ce5db2df8d6b253a6a0ec13", + "metadata": {}, + "name": "fooQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +(node/*: any*/).hash = "21bf4f020aaeb6ce67d04911a13d42a3"; + +import { PreloadableQueryRegistry } from 'relay-runtime'; +PreloadableQueryRegistry.set((node.params/*: any*/).id, node); + +export default ((node/*: any*/)/*: Query< + fooQuery$variables, + fooQuery$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_flow.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_flow.input new file mode 100644 index 0000000000000..4d16ef06ba05b --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_flow.input @@ -0,0 +1,21 @@ +//- foo.js +graphql` + query fooQuery @preloadable { + userName + }`; + +//- relay.config.json +{ + "language": "flow", + "schema": "./schema.graphql", + "eagerEsModules": true, + "persistConfig": { + "file": "./operations.json" + } +} + +//- operations.json +{} + +//- schema.graphql +type Query { userName: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_javascript.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_javascript.expected new file mode 100644 index 0000000000000..64b42ed3cbcb9 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_javascript.expected @@ -0,0 +1,107 @@ +==================================== INPUT ==================================== +//- foo.js +graphql` + query fooQuery @preloadable { + userName + }`; + +//- relay.config.json +{ + "language": "javascript", + "schema": "./schema.graphql", + "eagerEsModules": true, + "persistConfig": { + "file": "./operations.json" + } +} + +//- operations.json +{} + +//- schema.graphql +type Query { userName: String } +==================================== OUTPUT =================================== +//- __generated__/fooQuery$parameters.js +/** + * SignedSource<<41a51ef922042b40623a9a01fbb5f5a3>> + * @relayHash ae6874c86ce5db2df8d6b253a6a0ec13 + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +// @relayRequestID ae6874c86ce5db2df8d6b253a6a0ec13 + +var node = { + "kind": "PreloadableConcreteRequest", + "params": { + "id": "ae6874c86ce5db2df8d6b253a6a0ec13", + "metadata": {}, + "name": "fooQuery", + "operationKind": "query", + "text": null + } +}; + +export default node; + +//- __generated__/fooQuery.graphql.js +/** + * SignedSource<<4027cbd9de156c0ba9c35382960d28f0>> + * @relayHash ae6874c86ce5db2df8d6b253a6a0ec13 + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +// @relayRequestID ae6874c86ce5db2df8d6b253a6a0ec13 + +var node = (function(){ +var v0 = [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "userName", + "storageKey": null + } +]; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "fooQuery", + "selections": (v0/*: any*/), + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "fooQuery", + "selections": (v0/*: any*/) + }, + "params": { + "id": "ae6874c86ce5db2df8d6b253a6a0ec13", + "metadata": {}, + "name": "fooQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +node.hash = "21bf4f020aaeb6ce67d04911a13d42a3"; + +import { PreloadableQueryRegistry } from 'relay-runtime'; +PreloadableQueryRegistry.set(node.params.id, node); + +export default node; diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_javascript.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_javascript.input new file mode 100644 index 0000000000000..3f9b4d9e869d0 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_javascript.input @@ -0,0 +1,21 @@ +//- foo.js +graphql` + query fooQuery @preloadable { + userName + }`; + +//- relay.config.json +{ + "language": "javascript", + "schema": "./schema.graphql", + "eagerEsModules": true, + "persistConfig": { + "file": "./operations.json" + } +} + +//- operations.json +{} + +//- schema.graphql +type Query { userName: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_typescript.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_typescript.expected new file mode 100644 index 0000000000000..e5fe00c999739 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_typescript.expected @@ -0,0 +1,120 @@ +==================================== INPUT ==================================== +//- foo.ts +graphql` + query fooQuery @preloadable { + userName + }`; + +//- relay.config.json +{ + "language": "typescript", + "schema": "./schema.graphql", + "eagerEsModules": true, + "persistConfig": { + "file": "./operations.json" + } +} + +//- operations.json +{} + +//- schema.graphql +type Query { userName: String } +==================================== OUTPUT =================================== +//- __generated__/fooQuery$parameters.ts +/** + * SignedSource<<7de8baf3626a3ece606b72c2ece21848>> + * @relayHash ae6874c86ce5db2df8d6b253a6a0ec13 + * @lightSyntaxTransform + * @nogrep + */ + +/* tslint:disable */ +/* eslint-disable */ +// @ts-nocheck + +// @relayRequestID ae6874c86ce5db2df8d6b253a6a0ec13 + +import { PreloadableConcreteRequest } from 'relay-runtime'; +import { fooQuery } from './fooQuery.graphql'; + +const node: PreloadableConcreteRequest = { + "kind": "PreloadableConcreteRequest", + "params": { + "id": "ae6874c86ce5db2df8d6b253a6a0ec13", + "metadata": {}, + "name": "fooQuery", + "operationKind": "query", + "text": null + } +}; + +export default node; + +//- __generated__/fooQuery.graphql.ts +/** + * SignedSource<<461989c03655ebfecf253b741f65ef91>> + * @relayHash ae6874c86ce5db2df8d6b253a6a0ec13 + * @lightSyntaxTransform + * @nogrep + */ + +/* tslint:disable */ +/* eslint-disable */ +// @ts-nocheck + +// @relayRequestID ae6874c86ce5db2df8d6b253a6a0ec13 + +import { ConcreteRequest, Query } from 'relay-runtime'; +export type fooQuery$variables = Record; +export type fooQuery$data = { + readonly userName: string | null | undefined; +}; +export type fooQuery = { + response: fooQuery$data; + variables: fooQuery$variables; +}; + +const node: ConcreteRequest = (function(){ +var v0 = [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "userName", + "storageKey": null + } +]; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "fooQuery", + "selections": (v0/*: any*/), + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "fooQuery", + "selections": (v0/*: any*/) + }, + "params": { + "id": "ae6874c86ce5db2df8d6b253a6a0ec13", + "metadata": {}, + "name": "fooQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +(node as any).hash = "21bf4f020aaeb6ce67d04911a13d42a3"; + +import { PreloadableQueryRegistry } from 'relay-runtime'; +PreloadableQueryRegistry.set(node.params.id, node); + +export default node; diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_typescript.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_typescript.input new file mode 100644 index 0000000000000..676812f950e8c --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/preloadable_query_typescript.input @@ -0,0 +1,21 @@ +//- foo.ts +graphql` + query fooQuery @preloadable { + userName + }`; + +//- relay.config.json +{ + "language": "typescript", + "schema": "./schema.graphql", + "eagerEsModules": true, + "persistConfig": { + "file": "./operations.json" + } +} + +//- operations.json +{} + +//- schema.graphql +type Query { userName: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface.expected new file mode 100644 index 0000000000000..d1898d59a8e7b --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface.expected @@ -0,0 +1,375 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + + /** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + + /** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + some_field: Boolean +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! + name: String +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<7fbe3989595c55397f4bcc5c81ec30b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "AdminTypeResolvers"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => mixed); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/PersonComponentFragment.graphql.js +/** + * SignedSource<<281289c09e9f11b0f0fdf7dd357dbe36>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type PersonComponentFragment$fragmentType: FragmentType; +export type PersonComponentFragment$data = {| + +name: ?string, + +$fragmentType: PersonComponentFragment$fragmentType, +|}; +export type PersonComponentFragment$key = { + +$data?: PersonComponentFragment$data, + +$fragmentSpreads: PersonComponentFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "PersonComponentFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('AdminTypeResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('UserTypeResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "type": "IPerson", + "abstractKey": "__isIPerson" +}; + +(node/*: any*/).hash = "a57dd30bd59412781e9566e1553e2d70"; + +module.exports = ((node/*: any*/)/*: Fragment< + PersonComponentFragment$fragmentType, + PersonComponentFragment$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<7ffabc2a97c3589cbfd20a23b3b608ca>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => mixed); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface.input new file mode 100644 index 0000000000000..5b8422ed2cf0a --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface.input @@ -0,0 +1,48 @@ +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + + /** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + + /** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + some_field: Boolean +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! + name: String +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_does_not_pass_schema_validation.invalid.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_does_not_pass_schema_validation.invalid.expected new file mode 100644 index 0000000000000..d6b06a092a12a --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_does_not_pass_schema_validation.invalid.expected @@ -0,0 +1,63 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + + /** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + +# Admin should implement name, but does not! + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + some_field: Boolean +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! + name: String +} +==================================== OUTPUT =================================== +✖︎ Interface field 'IPerson.name' expected but type 'Admin' does not provide it. + + AdminTypeResolvers.js:2:19 + 1 │ * + 2 │ * @RelayResolver Admin implements IPerson + │ ^^^^^ + 3 │ + + ℹ︎ The interface field is defined here: + + schema-extensions/extension.graphql:3:3 + 2 │ id: ID! + 3 │ name: String + │ ^^^^ + 4 │ } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_does_not_pass_schema_validation.invalid.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_does_not_pass_schema_validation.invalid.input new file mode 100644 index 0000000000000..a44721b4a4d1f --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_does_not_pass_schema_validation.invalid.input @@ -0,0 +1,46 @@ +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + + /** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + +# Admin should implement name, but does not! + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + some_field: Boolean +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! + name: String +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type.expected new file mode 100644 index 0000000000000..9327c60ed7dd3 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type.expected @@ -0,0 +1,380 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + +//- IPersonResolvers.js +/** + * @RelayResolver IPerson.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + some_field: Boolean +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<7fbe3989595c55397f4bcc5c81ec30b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "AdminTypeResolvers"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => mixed); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/PersonComponentFragment.graphql.js +/** + * SignedSource<<4ea82f388eeb21ecbc79f404917c194e>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin____relay_model_instance$data } from "Admin____relay_model_instance.graphql"; +import type { User____relay_model_instance$data } from "User____relay_model_instance.graphql"; +import type { FragmentType } from "relay-runtime"; +import {name as iPersonNameResolverType} from "IPersonResolvers"; +// Type assertion validating that `iPersonNameResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(iPersonNameResolverType: ( + model: Admin____relay_model_instance$data['__relay_model_instance'] | User____relay_model_instance$data['__relay_model_instance'], +) => ?string); +declare export opaque type PersonComponentFragment$fragmentType: FragmentType; +export type PersonComponentFragment$data = {| + +name: ?string, + +$fragmentType: PersonComponentFragment$fragmentType, +|}; +export type PersonComponentFragment$key = { + +$data?: PersonComponentFragment$data, + +$fragmentSpreads: PersonComponentFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "PersonComponentFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('IPersonResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('IPersonResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "type": "IPerson", + "abstractKey": "__isIPerson" +}; + +(node/*: any*/).hash = "a57dd30bd59412781e9566e1553e2d70"; + +module.exports = ((node/*: any*/)/*: Fragment< + PersonComponentFragment$fragmentType, + PersonComponentFragment$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<7ffabc2a97c3589cbfd20a23b3b608ca>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => mixed); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type.input new file mode 100644 index 0000000000000..8edd8f9a09a5d --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type.input @@ -0,0 +1,45 @@ +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + +//- IPersonResolvers.js +/** + * @RelayResolver IPerson.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + some_field: Boolean +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_including_cse.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_including_cse.expected new file mode 100644 index 0000000000000..f74c4bc14ecf9 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_including_cse.expected @@ -0,0 +1,399 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + +//- IPersonResolvers.js +/** + * @RelayResolver IPerson.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! +} + +type Visitor implements IPerson { + id: ID! + name: String +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<7fbe3989595c55397f4bcc5c81ec30b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "AdminTypeResolvers"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => mixed); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/PersonComponentFragment.graphql.js +/** + * SignedSource<<2707615d1c65ebada666a7a0d138837e>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin____relay_model_instance$data } from "Admin____relay_model_instance.graphql"; +import type { User____relay_model_instance$data } from "User____relay_model_instance.graphql"; +import type { FragmentType } from "relay-runtime"; +import {name as iPersonNameResolverType} from "IPersonResolvers"; +// Type assertion validating that `iPersonNameResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(iPersonNameResolverType: ( + model: Admin____relay_model_instance$data['__relay_model_instance'] | User____relay_model_instance$data['__relay_model_instance'], +) => ?string); +declare export opaque type PersonComponentFragment$fragmentType: FragmentType; +export type PersonComponentFragment$data = {| + +name: ?string, + +$fragmentType: PersonComponentFragment$fragmentType, +|}; +export type PersonComponentFragment$key = { + +$data?: PersonComponentFragment$data, + +$fragmentSpreads: PersonComponentFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "PersonComponentFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "Visitor", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('IPersonResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('IPersonResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "type": "IPerson", + "abstractKey": "__isIPerson" +}; + +(node/*: any*/).hash = "a57dd30bd59412781e9566e1553e2d70"; + +module.exports = ((node/*: any*/)/*: Fragment< + PersonComponentFragment$fragmentType, + PersonComponentFragment$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<7ffabc2a97c3589cbfd20a23b3b608ca>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => mixed); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_including_cse.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_including_cse.input new file mode 100644 index 0000000000000..c333ccf5ce0d1 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_including_cse.input @@ -0,0 +1,50 @@ +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + +//- IPersonResolvers.js +/** + * @RelayResolver IPerson.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! +} + +type Visitor implements IPerson { + id: ID! + name: String +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_with_root_fragment.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_with_root_fragment.expected new file mode 100644 index 0000000000000..d965a92422ef2 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_with_root_fragment.expected @@ -0,0 +1,396 @@ +==================================== INPUT ==================================== +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + +//- IPersonResolvers.js +/** + * @RelayResolver IPerson.name: String + * @rootFragment IPersonResolversFragment + */ + +graphql`fragment IPersonResolversFragment on IPerson { + name +}` + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! +} + +type Visitor implements IPerson { + id: ID! +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<7fbe3989595c55397f4bcc5c81ec30b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "AdminTypeResolvers"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => mixed); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/IPersonResolversFragment.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +import {name as iPersonNameResolverType} from "IPersonResolvers"; +// Type assertion validating that `iPersonNameResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(iPersonNameResolverType: ( + rootKey: IPersonResolversFragment$key, +) => ?string); +declare export opaque type IPersonResolversFragment$fragmentType: FragmentType; +export type IPersonResolversFragment$data = {| + +name: ?string, + +$fragmentType: IPersonResolversFragment$fragmentType, +|}; +export type IPersonResolversFragment$key = { + +$data?: IPersonResolversFragment$data, + +$fragmentSpreads: IPersonResolversFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = (function(){ +var v0 = { + "args": null, + "kind": "FragmentSpread", + "name": "IPersonResolversFragment" +}; +return { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "IPersonResolversFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": (v0/*: any*/), + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('IPersonResolvers').name, + "path": "name" + } + ], + "type": "Visitor", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": (v0/*: any*/), + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('IPersonResolvers').name, + "path": "name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": (v0/*: any*/), + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('IPersonResolvers').name, + "path": "name" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "type": "IPerson", + "abstractKey": "__isIPerson" +}; +})(); + +(node/*: any*/).hash = "ba873f284ce4d50f2e9204a78f11952a"; + +module.exports = ((node/*: any*/)/*: Fragment< + IPersonResolversFragment$fragmentType, + IPersonResolversFragment$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<7ffabc2a97c3589cbfd20a23b3b608ca>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => mixed); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_with_root_fragment.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_with_root_fragment.input new file mode 100644 index 0000000000000..5c279a7f64ad2 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_with_root_fragment.input @@ -0,0 +1,48 @@ +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + +//- IPersonResolvers.js +/** + * @RelayResolver IPerson.name: String + * @rootFragment IPersonResolversFragment + */ + +graphql`fragment IPersonResolversFragment on IPerson { + name +}` + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! +} + +type Visitor implements IPerson { + id: ID! +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_weak_model_type.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_weak_model_type.expected new file mode 100644 index 0000000000000..c1ddf61bd0762 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_weak_model_type.expected @@ -0,0 +1,256 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + * @weak + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + * @weak + */ + +//- IPersonResolvers.js +/** + * @RelayResolver IPerson.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql + +//- schema-extensions/extension.graphql +type Query { me: IPerson } + +# Fields to be added later by resolvers +interface IPerson +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<523d097198aa2ffa2a1209e24ac2a337>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin } from "AdminTypeResolvers"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: Admin, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/PersonComponentFragment.graphql.js +/** + * SignedSource<<4ea82f388eeb21ecbc79f404917c194e>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin____relay_model_instance$data } from "Admin____relay_model_instance.graphql"; +import type { User____relay_model_instance$data } from "User____relay_model_instance.graphql"; +import type { FragmentType } from "relay-runtime"; +import {name as iPersonNameResolverType} from "IPersonResolvers"; +// Type assertion validating that `iPersonNameResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(iPersonNameResolverType: ( + model: Admin____relay_model_instance$data['__relay_model_instance'] | User____relay_model_instance$data['__relay_model_instance'], +) => ?string); +declare export opaque type PersonComponentFragment$fragmentType: FragmentType; +export type PersonComponentFragment$data = {| + +name: ?string, + +$fragmentType: PersonComponentFragment$fragmentType, +|}; +export type PersonComponentFragment$key = { + +$data?: PersonComponentFragment$data, + +$fragmentSpreads: PersonComponentFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "PersonComponentFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('IPersonResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('IPersonResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "type": "IPerson", + "abstractKey": "__isIPerson" +}; + +(node/*: any*/).hash = "a57dd30bd59412781e9566e1553e2d70"; + +module.exports = ((node/*: any*/)/*: Fragment< + PersonComponentFragment$fragmentType, + PersonComponentFragment$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<9a188c26688bb46f65ed80df4ae938c3>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User } from "UserTypeResolvers"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: User, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_weak_model_type.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_weak_model_type.input new file mode 100644 index 0000000000000..f382623eaa71d --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_of_all_weak_model_type.input @@ -0,0 +1,45 @@ +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + * @weak + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + * @weak + */ + +//- IPersonResolvers.js +/** + * @RelayResolver IPerson.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql + +//- schema-extensions/extension.graphql +type Query { me: IPerson } + +# Fields to be added later by resolvers +interface IPerson diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_returns_custom_scalar.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_returns_custom_scalar.expected new file mode 100644 index 0000000000000..8e13c56d7cf79 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_returns_custom_scalar.expected @@ -0,0 +1,394 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + someComplexObject +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ +// Implementation omitted for test + +/** + * @RelayResolver User.someComplexObject: SomeComplexObject + */ +// Implementation omitted for test + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ +// Implementation omitted for test + +/** + * @RelayResolver Admin.someComplexObject: SomeComplexObject + */ +// Implementation omitted for test + +//- SomeComplexObjectFlowTypeModule.js + +export type SomeComplexObjectFlowType { + // ... Many fields here perhaps +} + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "customScalarTypes": { + "SomeComplexObject": { + "name": "SomeComplexObjectFlowType", + "path": "SomeComplexObjectFlowTypeModule" + } + }, + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +scalar SomeComplexObject + +interface IPerson { + id: ID! + someComplexObject: SomeComplexObject +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<7fbe3989595c55397f4bcc5c81ec30b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "AdminTypeResolvers"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => mixed); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/PersonComponentFragment.graphql.js +/** + * SignedSource<<5a351d5a9beb2093277e17f6efa94c15>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { SomeComplexObjectFlowType } from "SomeComplexObjectFlowTypeModule"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type PersonComponentFragment$fragmentType: FragmentType; +export type PersonComponentFragment$data = {| + +someComplexObject: ?SomeComplexObjectFlowType, + +$fragmentType: PersonComponentFragment$fragmentType, +|}; +export type PersonComponentFragment$key = { + +$data?: PersonComponentFragment$data, + +$fragmentSpreads: PersonComponentFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "PersonComponentFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "someComplexObject", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('AdminTypeResolvers').someComplexObject, '__relay_model_instance', true), + "path": "someComplexObject" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "someComplexObject", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('UserTypeResolvers').someComplexObject, '__relay_model_instance', true), + "path": "someComplexObject" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "type": "IPerson", + "abstractKey": "__isIPerson" +}; + +(node/*: any*/).hash = "d8e63d57ea12bd6248ca0a69e440f37c"; + +module.exports = ((node/*: any*/)/*: Fragment< + PersonComponentFragment$fragmentType, + PersonComponentFragment$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<7ffabc2a97c3589cbfd20a23b3b608ca>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => mixed); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_returns_custom_scalar.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_returns_custom_scalar.input new file mode 100644 index 0000000000000..3040d918e72e0 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_on_interface_returns_custom_scalar.input @@ -0,0 +1,66 @@ +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + someComplexObject +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ +// Implementation omitted for test + +/** + * @RelayResolver User.someComplexObject: SomeComplexObject + */ +// Implementation omitted for test + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ +// Implementation omitted for test + +/** + * @RelayResolver Admin.someComplexObject: SomeComplexObject + */ +// Implementation omitted for test + +//- SomeComplexObjectFlowTypeModule.js + +export type SomeComplexObjectFlowType { + // ... Many fields here perhaps +} + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "customScalarTypes": { + "SomeComplexObject": { + "name": "SomeComplexObjectFlowType", + "path": "SomeComplexObjectFlowTypeModule" + } + }, + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +scalar SomeComplexObject + +interface IPerson { + id: ID! + someComplexObject: SomeComplexObject +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_client_schema_extension_enum.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_client_schema_extension_enum.expected new file mode 100644 index 0000000000000..1aaca05666fdb --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_client_schema_extension_enum.expected @@ -0,0 +1,229 @@ +==================================== INPUT ==================================== +//- UserTypeResolvers.js +/** + * @RelayResolver User + */ +// Impl omitted for test + +type FlowVersionOfStatusEnum = "AMDIN" | "USER"; + +/** + * @RelayResolver User.status: Status + */ +export function status(user: User): FlowVersionOfStatusEnum { + return "ADMIN"; +} + +//- SomeComponent.js +graphql`fragment SomeComponentFragment on User { + status +}` + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +enum Status { + ADMIN + USER +} +==================================== OUTPUT =================================== +//- __generated__/SomeComponentFragment.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User____relay_model_instance$data } from "User____relay_model_instance.graphql"; +export type Status = "ADMIN" | "USER"; +import type { FragmentType } from "relay-runtime"; +import {status as userStatusResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userStatusResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userStatusResolverType: ( + __relay_model_instance: User____relay_model_instance$data['__relay_model_instance'], +) => ?Status); +declare export opaque type SomeComponentFragment$fragmentType: FragmentType; +export type SomeComponentFragment$data = {| + +status: ?Status, + +$fragmentType: SomeComponentFragment$fragmentType, +|}; +export type SomeComponentFragment$key = { + +$data?: SomeComponentFragment$data, + +$fragmentSpreads: SomeComponentFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "SomeComponentFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "status", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('UserTypeResolvers').status, '__relay_model_instance', true), + "path": "status" + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "6bb04088a9e45bd235fc92a69a2f3ef5"; + +module.exports = ((node/*: any*/)/*: Fragment< + SomeComponentFragment$fragmentType, + SomeComponentFragment$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<7ffabc2a97c3589cbfd20a23b3b608ca>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => mixed); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_client_schema_extension_enum.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_client_schema_extension_enum.input new file mode 100644 index 0000000000000..102b955fda290 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_client_schema_extension_enum.input @@ -0,0 +1,44 @@ +//- UserTypeResolvers.js +/** + * @RelayResolver User + */ +// Impl omitted for test + +type FlowVersionOfStatusEnum = "AMDIN" | "USER"; + +/** + * @RelayResolver User.status: Status + */ +export function status(user: User): FlowVersionOfStatusEnum { + return "ADMIN"; +} + +//- SomeComponent.js +graphql`fragment SomeComponentFragment on User { + status +}` + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +enum Status { + ADMIN + USER +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_enum_with_enum_suffix.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_enum_with_enum_suffix.expected new file mode 100644 index 0000000000000..6c8f078e64763 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_enum_with_enum_suffix.expected @@ -0,0 +1,230 @@ +==================================== INPUT ==================================== +//- UserTypeResolvers.js +/** + * @RelayResolver User + */ +// Impl omitted for test + +type FlowVersionOfStatusEnum = "AMDIN" | "USER"; + +/** + * @RelayResolver User.status: Status + */ +export function status(user: User): FlowVersionOfStatusEnum { + return "ADMIN"; +} + +//- SomeComponent.js +graphql`fragment SomeComponentFragment on User { + status +}` + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "enumModuleSuffix": ".LOL_WHAT", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +enum Status { + ADMIN + USER +} +==================================== OUTPUT =================================== +//- __generated__/SomeComponentFragment.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User____relay_model_instance$data } from "User____relay_model_instance.graphql"; +export type Status = "ADMIN" | "USER"; +import type { FragmentType } from "relay-runtime"; +import {status as userStatusResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userStatusResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userStatusResolverType: ( + __relay_model_instance: User____relay_model_instance$data['__relay_model_instance'], +) => ?Status); +declare export opaque type SomeComponentFragment$fragmentType: FragmentType; +export type SomeComponentFragment$data = {| + +status: ?Status, + +$fragmentType: SomeComponentFragment$fragmentType, +|}; +export type SomeComponentFragment$key = { + +$data?: SomeComponentFragment$data, + +$fragmentSpreads: SomeComponentFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "SomeComponentFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "status", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('UserTypeResolvers').status, '__relay_model_instance', true), + "path": "status" + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "6bb04088a9e45bd235fc92a69a2f3ef5"; + +module.exports = ((node/*: any*/)/*: Fragment< + SomeComponentFragment$fragmentType, + SomeComponentFragment$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<7ffabc2a97c3589cbfd20a23b3b608ca>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => mixed); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_enum_with_enum_suffix.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_enum_with_enum_suffix.input new file mode 100644 index 0000000000000..4678e1dd1b636 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_enum_with_enum_suffix.input @@ -0,0 +1,45 @@ +//- UserTypeResolvers.js +/** + * @RelayResolver User + */ +// Impl omitted for test + +type FlowVersionOfStatusEnum = "AMDIN" | "USER"; + +/** + * @RelayResolver User.status: Status + */ +export function status(user: User): FlowVersionOfStatusEnum { + return "ADMIN"; +} + +//- SomeComponent.js +graphql`fragment SomeComponentFragment on User { + status +}` + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "enumModuleSuffix": ".LOL_WHAT", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +enum Status { + ADMIN + USER +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_live_model_type.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_live_model_type.expected new file mode 100644 index 0000000000000..8b835fb6b7816 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_live_model_type.expected @@ -0,0 +1,533 @@ +==================================== INPUT ==================================== +//- QueryComponent.js +graphql`query QueryComponentQuery { + person { + name + } +}` + +//- QueryResolvers.js +/** + * @RelayResolver Query.person: IPerson + */ + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + * @live + */ + + /** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + * @live + */ + + /** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! + name: String +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<43fd74ebd2b94a456ec3c8657edf2868>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { LiveState, FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "AdminTypeResolvers"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => LiveState); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType["read"]>>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayLiveResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/QueryComponentQuery.graphql.js +/** + * SignedSource<<750533be4ec90cc3f3f7cccb2bf849f4>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import {person as queryPersonResolverType} from "QueryResolvers"; +// Type assertion validating that `queryPersonResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryPersonResolverType: () => ?{| + +__typename: "Admin" | "User", + +id: DataID, +|}); +export type QueryComponentQuery$variables = {||}; +export type QueryComponentQuery$data = {| + +person: ?{| + +name: ?string, + |}, +|}; +export type QueryComponentQuery = {| + response: QueryComponentQuery$data, + variables: QueryComponentQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "QueryComponentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": null, + "modelResolvers": { + "Admin": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayLiveResolver", + "name": "person", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "person.__relay_model_instance" + }, + "User": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayLiveResolver", + "name": "person", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "person.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "person", + "resolverModule": require('QueryResolvers').person, + "path": "person" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "person", + "plural": false, + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('AdminTypeResolvers').name, '__relay_model_instance', true), + "path": "person.name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('UserTypeResolvers').name, '__relay_model_instance', true), + "path": "person.name" + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "QueryComponentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "person", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false, + "resolverInfo": { + "resolverFunction": require('QueryResolvers').person, + "rootFragment": null + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "person", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('AdminTypeResolvers').name, + "rootFragment": null + } + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('UserTypeResolvers').name, + "rootFragment": null + } + } + ], + "type": "User", + "abstractKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "0ce65d8a9b6587e620fd0d3e136997d6", + "id": null, + "metadata": {}, + "name": "QueryComponentQuery", + "operationKind": "query", + "text": null + } +}; + +(node/*: any*/).hash = "cc7b67152b1dce33f04a61bea084084f"; + +module.exports = ((node/*: any*/)/*: ClientQuery< + QueryComponentQuery$variables, + QueryComponentQuery$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<5b1064cbbcdf6ad4c972e722067e79b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { LiveState, FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => LiveState); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType["read"]>>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayLiveResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_live_model_type.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_live_model_type.input new file mode 100644 index 0000000000000..c488c7beeab29 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_live_model_type.input @@ -0,0 +1,57 @@ +//- QueryComponent.js +graphql`query QueryComponentQuery { + person { + name + } +}` + +//- QueryResolvers.js +/** + * @RelayResolver Query.person: IPerson + */ + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + * @live + */ + + /** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + * @live + */ + + /** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! + name: String +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type.expected new file mode 100644 index 0000000000000..a6f29d6814161 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type.expected @@ -0,0 +1,531 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`query PersonComponentQuery { + person { + name + } +}` + +//- QueryResolvers.js +/** + * @RelayResolver Query.person: IPerson + */ + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + + /** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + + /** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! + name: String +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<7fbe3989595c55397f4bcc5c81ec30b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "AdminTypeResolvers"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => mixed); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/PersonComponentQuery.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import {person as queryPersonResolverType} from "QueryResolvers"; +// Type assertion validating that `queryPersonResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryPersonResolverType: () => ?{| + +__typename: "Admin" | "User", + +id: DataID, +|}); +export type PersonComponentQuery$variables = {||}; +export type PersonComponentQuery$data = {| + +person: ?{| + +name: ?string, + |}, +|}; +export type PersonComponentQuery = {| + response: PersonComponentQuery$data, + variables: PersonComponentQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "PersonComponentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": null, + "modelResolvers": { + "Admin": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "person", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "person.__relay_model_instance" + }, + "User": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "person", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "person.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "person", + "resolverModule": require('QueryResolvers').person, + "path": "person" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "person", + "plural": false, + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('AdminTypeResolvers').name, '__relay_model_instance', true), + "path": "person.name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('UserTypeResolvers').name, '__relay_model_instance', true), + "path": "person.name" + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "PersonComponentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "person", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false, + "resolverInfo": { + "resolverFunction": require('QueryResolvers').person, + "rootFragment": null + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "person", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('AdminTypeResolvers').name, + "rootFragment": null + } + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('UserTypeResolvers').name, + "rootFragment": null + } + } + ], + "type": "User", + "abstractKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "0deed3d106ca29ad927a703c08de07e0", + "id": null, + "metadata": {}, + "name": "PersonComponentQuery", + "operationKind": "query", + "text": null + } +}; + +(node/*: any*/).hash = "6f362dca4e4d03f5759795a4ce89dee2"; + +module.exports = ((node/*: any*/)/*: ClientQuery< + PersonComponentQuery$variables, + PersonComponentQuery$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<7ffabc2a97c3589cbfd20a23b3b608ca>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => mixed); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type.input new file mode 100644 index 0000000000000..2875b3bcca43d --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type.input @@ -0,0 +1,55 @@ +//- PersonComponent.js +graphql`query PersonComponentQuery { + person { + name + } +}` + +//- QueryResolvers.js +/** + * @RelayResolver Query.person: IPerson + */ + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + + /** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + + /** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! + name: String +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type_including_cse.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type_including_cse.expected new file mode 100644 index 0000000000000..5f57d66d7fb92 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type_including_cse.expected @@ -0,0 +1,554 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`query PersonComponentQuery { + person { + name + } +}` + +//- QueryResolvers.js +/** + * @RelayResolver Query.person: IPerson + */ + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + +/** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + +/** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! + name: String +} + +type Visitor implements IPerson { + id: ID! + name: String +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<7fbe3989595c55397f4bcc5c81ec30b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "AdminTypeResolvers"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => mixed); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/PersonComponentQuery.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import {person as queryPersonResolverType} from "QueryResolvers"; +// Type assertion validating that `queryPersonResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryPersonResolverType: () => ?{| + +__typename: "Admin" | "User" | "Visitor", + +id: DataID, +|}); +export type PersonComponentQuery$variables = {||}; +export type PersonComponentQuery$data = {| + +person: ?{| + +name: ?string, + |}, +|}; +export type PersonComponentQuery = {| + response: PersonComponentQuery$data, + variables: PersonComponentQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "Visitor", + "abstractKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "PersonComponentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": null, + "modelResolvers": { + "Admin": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "person", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "person.__relay_model_instance" + }, + "User": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "person", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "person.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "person", + "resolverModule": require('QueryResolvers').person, + "path": "person" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "person", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('AdminTypeResolvers').name, '__relay_model_instance', true), + "path": "person.name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('UserTypeResolvers').name, '__relay_model_instance', true), + "path": "person.name" + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "PersonComponentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "person", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false, + "resolverInfo": { + "resolverFunction": require('QueryResolvers').person, + "rootFragment": null + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "person", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + (v0/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('AdminTypeResolvers').name, + "rootFragment": null + } + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('UserTypeResolvers').name, + "rootFragment": null + } + } + ], + "type": "User", + "abstractKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "0deed3d106ca29ad927a703c08de07e0", + "id": null, + "metadata": {}, + "name": "PersonComponentQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +(node/*: any*/).hash = "6f362dca4e4d03f5759795a4ce89dee2"; + +module.exports = ((node/*: any*/)/*: ClientQuery< + PersonComponentQuery$variables, + PersonComponentQuery$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<7ffabc2a97c3589cbfd20a23b3b608ca>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => mixed); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type_including_cse.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type_including_cse.input new file mode 100644 index 0000000000000..ce2d41ca7d01e --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type_including_cse.input @@ -0,0 +1,60 @@ +//- PersonComponent.js +graphql`query PersonComponentQuery { + person { + name + } +}` + +//- QueryResolvers.js +/** + * @RelayResolver Query.person: IPerson + */ + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + +/** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + +/** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! + name: String +} + +type Visitor implements IPerson { + id: ID! + name: String +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_weak_model_type.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_weak_model_type.expected new file mode 100644 index 0000000000000..9bd31eda8ea03 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_weak_model_type.expected @@ -0,0 +1,449 @@ +==================================== INPUT ==================================== +//- QueryComponent.js +graphql`query QueryComponentQuery { + person { + name + } +}` + +//- QueryResolvers.js +/** + * @RelayResolver Query.person: IPerson + */ + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + * @weak + */ + + /** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + * @weak + */ + + /** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +interface IPerson { + name: String +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<523d097198aa2ffa2a1209e24ac2a337>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin } from "AdminTypeResolvers"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: Admin, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/QueryComponentQuery.graphql.js +/** + * SignedSource<<4c12cb9f690c4ba88d309a8e9f747867>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import {person as queryPersonResolverType} from "QueryResolvers"; +// Type assertion validating that `queryPersonResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryPersonResolverType: () => ?Query__person$normalization); +import type { Query__person$normalization } from "Query__person$normalization.graphql"; +export type QueryComponentQuery$variables = {||}; +export type QueryComponentQuery$data = {| + +person: ?{| + +name: ?string, + |}, +|}; +export type QueryComponentQuery = {| + response: QueryComponentQuery$data, + variables: QueryComponentQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "QueryComponentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": null, + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "person", + "resolverModule": require('QueryResolvers').person, + "path": "person", + "normalizationInfo": { + "kind": "OutputType", + "concreteType": null, + "plural": false, + "normalizationNode": require('Query__person$normalization.graphql') + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "person", + "plural": false, + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('AdminTypeResolvers').name, '__relay_model_instance', true), + "path": "person.name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('UserTypeResolvers').name, '__relay_model_instance', true), + "path": "person.name" + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "QueryComponentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "person", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('QueryResolvers').person, + "rootFragment": null + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "person", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('AdminTypeResolvers').name, + "rootFragment": null + } + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('UserTypeResolvers').name, + "rootFragment": null + } + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "0ce65d8a9b6587e620fd0d3e136997d6", + "id": null, + "metadata": {}, + "name": "QueryComponentQuery", + "operationKind": "query", + "text": null + } +}; + +(node/*: any*/).hash = "cc7b67152b1dce33f04a61bea084084f"; + +module.exports = ((node/*: any*/)/*: ClientQuery< + QueryComponentQuery$variables, + QueryComponentQuery$data, +>*/); + +//- __generated__/Query__person$normalization.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +import type { Admin } from "AdminTypeResolvers"; +import type { User } from "UserTypeResolvers"; +export type Query__person$normalization = {| + +__typename: "Admin", + +__relay_model_instance: Admin, +|} | {| + +__typename: "User", + +__relay_model_instance: User, +|}; + +*/ + +var node/*: NormalizationSplitOperation*/ = (function(){ +var v0 = [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + } +]; +return { + "kind": "SplitOperation", + "metadata": {}, + "name": "Query__person$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": (v0/*: any*/), + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": (v0/*: any*/), + "type": "User", + "abstractKey": null + } + ] + } + ] +}; +})(); + +module.exports = node; + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<9a188c26688bb46f65ed80df4ae938c3>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User } from "UserTypeResolvers"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: User, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_weak_model_type.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_weak_model_type.input new file mode 100644 index 0000000000000..8752cf375a703 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_all_weak_model_type.input @@ -0,0 +1,56 @@ +//- QueryComponent.js +graphql`query QueryComponentQuery { + person { + name + } +}` + +//- QueryResolvers.js +/** + * @RelayResolver Query.person: IPerson + */ + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + * @weak + */ + + /** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + * @weak + */ + + /** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +interface IPerson { + name: String +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_live_and_non_live_strong_model_type.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_live_and_non_live_strong_model_type.expected new file mode 100644 index 0000000000000..796a41100c23e --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_live_and_non_live_strong_model_type.expected @@ -0,0 +1,532 @@ +==================================== INPUT ==================================== +//- QueryComponent.js +graphql`query QueryComponentQuery { + person { + name + } +}` + +//- QueryResolvers.js +/** + * @RelayResolver Query.person: IPerson + */ + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + * @live + */ + +/** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + +/** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! + name: String +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<7fbe3989595c55397f4bcc5c81ec30b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "AdminTypeResolvers"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => mixed); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/QueryComponentQuery.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import {person as queryPersonResolverType} from "QueryResolvers"; +// Type assertion validating that `queryPersonResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryPersonResolverType: () => ?{| + +__typename: "Admin" | "User", + +id: DataID, +|}); +export type QueryComponentQuery$variables = {||}; +export type QueryComponentQuery$data = {| + +person: ?{| + +name: ?string, + |}, +|}; +export type QueryComponentQuery = {| + response: QueryComponentQuery$data, + variables: QueryComponentQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "QueryComponentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": null, + "modelResolvers": { + "Admin": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "person", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "person.__relay_model_instance" + }, + "User": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayLiveResolver", + "name": "person", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "person.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "person", + "resolverModule": require('QueryResolvers').person, + "path": "person" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "person", + "plural": false, + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('AdminTypeResolvers').name, '__relay_model_instance', true), + "path": "person.name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('UserTypeResolvers').name, '__relay_model_instance', true), + "path": "person.name" + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "QueryComponentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "person", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false, + "resolverInfo": { + "resolverFunction": require('QueryResolvers').person, + "rootFragment": null + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "person", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('AdminTypeResolvers').name, + "rootFragment": null + } + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('UserTypeResolvers').name, + "rootFragment": null + } + } + ], + "type": "User", + "abstractKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "0ce65d8a9b6587e620fd0d3e136997d6", + "id": null, + "metadata": {}, + "name": "QueryComponentQuery", + "operationKind": "query", + "text": null + } +}; + +(node/*: any*/).hash = "cc7b67152b1dce33f04a61bea084084f"; + +module.exports = ((node/*: any*/)/*: ClientQuery< + QueryComponentQuery$variables, + QueryComponentQuery$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<5b1064cbbcdf6ad4c972e722067e79b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { LiveState, FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => LiveState); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType["read"]>>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayLiveResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_live_and_non_live_strong_model_type.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_live_and_non_live_strong_model_type.input new file mode 100644 index 0000000000000..eee5be4c9d50d --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_interface_of_live_and_non_live_strong_model_type.input @@ -0,0 +1,56 @@ +//- QueryComponent.js +graphql`query QueryComponentQuery { + person { + name + } +}` + +//- QueryResolvers.js +/** + * @RelayResolver Query.person: IPerson + */ + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + * @live + */ + +/** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + +/** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +interface IPerson { + id: ID! + name: String +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_union.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_union.expected new file mode 100644 index 0000000000000..b5abcc88f1f61 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_union.expected @@ -0,0 +1,65 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`query PersonComponentQuery { + person { + ... on User { + name + } + ... on Admin { + name + } + } +}` + +//- QueryResolvers.js +/** + * @RelayResolver Query.person: Person + */ + +//- UserTypeResolvers.js +/** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +type User { + id: ID! +} +type Admin { + id: ID! +} + +union Person = User | Admin +==================================== OUTPUT =================================== +✖︎ @RelayResolver union type `Person` for field `person` is not supported as @outputType, yet. + + QueryResolvers.js:2:25 + 1 │ * + 2 │ * @RelayResolver Query.person: Person + │ ^^^^^^ + 3 │ diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_union.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_union.input new file mode 100644 index 0000000000000..15f9da0417b10 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_returns_union.input @@ -0,0 +1,56 @@ +//- PersonComponent.js +graphql`query PersonComponentQuery { + person { + ... on User { + name + } + ... on Admin { + name + } + } +}` + +//- QueryResolvers.js +/** + * @RelayResolver Query.person: Person + */ + +//- UserTypeResolvers.js +/** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql +type User { + id: ID! +} +type Admin { + id: ID! +} + +union Person = User | Admin diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_custom_scalar.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_custom_scalar.expected new file mode 100644 index 0000000000000..6435a47e99ae9 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_custom_scalar.expected @@ -0,0 +1,298 @@ +==================================== INPUT ==================================== +//- User_foo.js +/** + * @RelayResolver User.foo: String @semanticNonNull + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User @throwOnFieldError { + bar + bar_live + bar_plural + bar_live_plural +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: CustomScalarType @semanticNonNull + */ + + /** + * @RelayResolver User.bar_live: CustomScalarType @semanticNonNull + * @live + */ + +/** + * @RelayResolver User.bar_plural: [CustomScalarType] @semanticNonNull(levels: [0, 1]) + */ + + /** + * @RelayResolver User.bar_live_plural: [CustomScalarType] @semanticNonNull(levels: [0, 1]) + * @live + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": true, + "schemaExtensions": [ + "./extensions.graphql" + ], + "customScalarTypes": { + "CustomScalarType": { + "name": "CustomScalarTypeFlowType", + "path": "CustomScalarTypeFlowTypeModule" + } + } +} + +//- extensions.graphql + +scalar CustomScalarType + +//- schema.graphql +type Query { + greeting: String +} + +type User { name: String } +==================================== OUTPUT =================================== +//- __generated__/ResolversSchemaModule.js +/** + * SignedSource<<983b85d6af01334176c1f903eecd63de>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { SchemaResolvers } from 'ReactiveQueryExecutor'; +import type { ResolverFunction, NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var schema_resolvers/*: SchemaResolvers*/ = { + "User": { + "bar": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + }, + "bar_live": { + "resolverFunction": require('User_bar').bar_live, + "rootFragment": null + }, + "bar_live_plural": { + "resolverFunction": require('User_bar').bar_live_plural, + "rootFragment": null + }, + "bar_plural": { + "resolverFunction": require('User_bar').bar_plural, + "rootFragment": null + }, + "foo": { + "resolverFunction": require('User_foo').foo, + "rootFragment": require('UserFooFragment$normalization.graphql') + } + } +}; + +module.exports = schema_resolvers; + +//- __generated__/UserFooFragment$normalization.graphql.js +/** + * SignedSource<<9720117be72c47f602a12640a544c268>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "kind": "SplitOperation", + "metadata": {}, + "name": "UserFooFragment$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "bar", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + } + }, + { + "name": "bar_live", + "args": null, + "kind": "RelayLiveResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar_live, + "rootFragment": null + } + }, + { + "name": "bar_plural", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar_plural, + "rootFragment": null + } + }, + { + "name": "bar_live_plural", + "args": null, + "kind": "RelayLiveResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar_live_plural, + "rootFragment": null + } + } + ] + } + ] +}; + +(node/*: any*/).hash = "85fc07b6873f5892801afc1cb3e30ea1"; + +module.exports = node; + +//- __generated__/UserFooFragment.graphql.js +/** + * SignedSource<<46fde794f6313b1e7b8d607cfe8a711d>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { CustomScalarTypeFlowType } from "CustomScalarTypeFlowTypeModule"; +import type { LiveState, FragmentType } from "relay-runtime"; +import {bar_live_plural as userBarLivePluralResolverType} from "User_bar"; +// Type assertion validating that `userBarLivePluralResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarLivePluralResolverType: () => LiveState<$ReadOnlyArray>); +import {bar_live as userBarLiveResolverType} from "User_bar"; +// Type assertion validating that `userBarLiveResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarLiveResolverType: () => LiveState); +import {bar_plural as userBarPluralResolverType} from "User_bar"; +// Type assertion validating that `userBarPluralResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarPluralResolverType: () => $ReadOnlyArray); +import {bar as userBarResolverType} from "User_bar"; +// Type assertion validating that `userBarResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarResolverType: () => CustomScalarTypeFlowType); +declare export opaque type UserFooFragment$fragmentType: FragmentType; +export type UserFooFragment$data = {| + +bar: CustomScalarTypeFlowType, + +bar_live: CustomScalarTypeFlowType, + +bar_live_plural: $ReadOnlyArray, + +bar_plural: $ReadOnlyArray, + +$fragmentType: UserFooFragment$fragmentType, +|}; +export type UserFooFragment$key = { + +$data?: UserFooFragment$data, + +$fragmentSpreads: UserFooFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "throwOnFieldError": true + }, + "name": "UserFooFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "bar", + "resolverModule": require('User_bar').bar, + "path": "bar" + }, + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayLiveResolver", + "name": "bar_live", + "resolverModule": require('User_bar').bar_live, + "path": "bar_live" + }, + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "bar_plural", + "resolverModule": require('User_bar').bar_plural, + "path": "bar_plural" + }, + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayLiveResolver", + "name": "bar_live_plural", + "resolverModule": require('User_bar').bar_live_plural, + "path": "bar_live_plural" + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "85fc07b6873f5892801afc1cb3e30ea1"; + +module.exports = ((node/*: any*/)/*: Fragment< + UserFooFragment$fragmentType, + UserFooFragment$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_custom_scalar.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_custom_scalar.input new file mode 100644 index 0000000000000..8b139204cd7dd --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_custom_scalar.input @@ -0,0 +1,66 @@ +//- User_foo.js +/** + * @RelayResolver User.foo: String @semanticNonNull + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User @throwOnFieldError { + bar + bar_live + bar_plural + bar_live_plural +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: CustomScalarType @semanticNonNull + */ + + /** + * @RelayResolver User.bar_live: CustomScalarType @semanticNonNull + * @live + */ + +/** + * @RelayResolver User.bar_plural: [CustomScalarType] @semanticNonNull(levels: [0, 1]) + */ + + /** + * @RelayResolver User.bar_live_plural: [CustomScalarType] @semanticNonNull(levels: [0, 1]) + * @live + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": true, + "schemaExtensions": [ + "./extensions.graphql" + ], + "customScalarTypes": { + "CustomScalarType": { + "name": "CustomScalarTypeFlowType", + "path": "CustomScalarTypeFlowTypeModule" + } + } +} + +//- extensions.graphql + +scalar CustomScalarType + +//- schema.graphql +type Query { + greeting: String +} + +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_live.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_live.expected new file mode 100644 index 0000000000000..5e2a25378e2a4 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_live.expected @@ -0,0 +1,180 @@ +==================================== INPUT ==================================== +//- User_foo.js +/** + * @RelayResolver User.foo: String @semanticNonNull + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User @throwOnFieldError { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: String @semanticNonNull + * @live + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { + greeting: String +} +type User { name: String } +==================================== OUTPUT =================================== +//- __generated__/ResolversSchemaModule.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { SchemaResolvers } from 'ReactiveQueryExecutor'; +import type { ResolverFunction, NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var schema_resolvers/*: SchemaResolvers*/ = { + "User": { + "bar": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + }, + "foo": { + "resolverFunction": require('User_foo').foo, + "rootFragment": require('UserFooFragment$normalization.graphql') + } + } +}; + +module.exports = schema_resolvers; + +//- __generated__/UserFooFragment$normalization.graphql.js +/** + * SignedSource<<4dcc21fbaeaa2b274cc36b53f0b0f77a>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "kind": "SplitOperation", + "metadata": {}, + "name": "UserFooFragment$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "bar", + "args": null, + "kind": "RelayLiveResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + } + } + ] + } + ] +}; + +(node/*: any*/).hash = "c571977071bef677ed9b7926d2dad022"; + +module.exports = node; + +//- __generated__/UserFooFragment.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { LiveState, FragmentType } from "relay-runtime"; +import {bar as userBarResolverType} from "User_bar"; +// Type assertion validating that `userBarResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarResolverType: () => LiveState); +declare export opaque type UserFooFragment$fragmentType: FragmentType; +export type UserFooFragment$data = {| + +bar: string, + +$fragmentType: UserFooFragment$fragmentType, +|}; +export type UserFooFragment$key = { + +$data?: UserFooFragment$data, + +$fragmentSpreads: UserFooFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "throwOnFieldError": true + }, + "name": "UserFooFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayLiveResolver", + "name": "bar", + "resolverModule": require('User_bar').bar, + "path": "bar" + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "c571977071bef677ed9b7926d2dad022"; + +module.exports = ((node/*: any*/)/*: Fragment< + UserFooFragment$fragmentType, + UserFooFragment$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_live.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_live.input new file mode 100644 index 0000000000000..739a81b2c0517 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_live.input @@ -0,0 +1,36 @@ +//- User_foo.js +/** + * @RelayResolver User.foo: String @semanticNonNull + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User @throwOnFieldError { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: String @semanticNonNull + * @live + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { + greeting: String +} +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_plural.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_plural.expected new file mode 100644 index 0000000000000..b3f9156fd6fcf --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_plural.expected @@ -0,0 +1,179 @@ +==================================== INPUT ==================================== +//- User_foo.js +/** + * @RelayResolver User.foo: String @semanticNonNull + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User @throwOnFieldError { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: [String] @semanticNonNull(levels: [0,1]) + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { + greeting: String +} +type User { name: String } +==================================== OUTPUT =================================== +//- __generated__/ResolversSchemaModule.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { SchemaResolvers } from 'ReactiveQueryExecutor'; +import type { ResolverFunction, NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var schema_resolvers/*: SchemaResolvers*/ = { + "User": { + "bar": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + }, + "foo": { + "resolverFunction": require('User_foo').foo, + "rootFragment": require('UserFooFragment$normalization.graphql') + } + } +}; + +module.exports = schema_resolvers; + +//- __generated__/UserFooFragment$normalization.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "kind": "SplitOperation", + "metadata": {}, + "name": "UserFooFragment$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "bar", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + } + } + ] + } + ] +}; + +(node/*: any*/).hash = "c571977071bef677ed9b7926d2dad022"; + +module.exports = node; + +//- __generated__/UserFooFragment.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +import {bar as userBarResolverType} from "User_bar"; +// Type assertion validating that `userBarResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarResolverType: () => $ReadOnlyArray); +declare export opaque type UserFooFragment$fragmentType: FragmentType; +export type UserFooFragment$data = {| + +bar: $ReadOnlyArray, + +$fragmentType: UserFooFragment$fragmentType, +|}; +export type UserFooFragment$key = { + +$data?: UserFooFragment$data, + +$fragmentSpreads: UserFooFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "throwOnFieldError": true + }, + "name": "UserFooFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "bar", + "resolverModule": require('User_bar').bar, + "path": "bar" + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "c571977071bef677ed9b7926d2dad022"; + +module.exports = ((node/*: any*/)/*: Fragment< + UserFooFragment$fragmentType, + UserFooFragment$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_plural.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_plural.input new file mode 100644 index 0000000000000..3efb7726ae6fa --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_plural.input @@ -0,0 +1,35 @@ +//- User_foo.js +/** + * @RelayResolver User.foo: String @semanticNonNull + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User @throwOnFieldError { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: [String] @semanticNonNull(levels: [0,1]) + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { + greeting: String +} +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_plural_live.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_plural_live.expected new file mode 100644 index 0000000000000..d7447e2c346e2 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_plural_live.expected @@ -0,0 +1,180 @@ +==================================== INPUT ==================================== +//- User_foo.js +/** + * @RelayResolver User.foo: String @semanticNonNull + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User @throwOnFieldError { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: [String] @semanticNonNull(levels: [0,1]) + * @live + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { + greeting: String +} +type User { name: String } +==================================== OUTPUT =================================== +//- __generated__/ResolversSchemaModule.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { SchemaResolvers } from 'ReactiveQueryExecutor'; +import type { ResolverFunction, NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var schema_resolvers/*: SchemaResolvers*/ = { + "User": { + "bar": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + }, + "foo": { + "resolverFunction": require('User_foo').foo, + "rootFragment": require('UserFooFragment$normalization.graphql') + } + } +}; + +module.exports = schema_resolvers; + +//- __generated__/UserFooFragment$normalization.graphql.js +/** + * SignedSource<<4dcc21fbaeaa2b274cc36b53f0b0f77a>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "kind": "SplitOperation", + "metadata": {}, + "name": "UserFooFragment$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "bar", + "args": null, + "kind": "RelayLiveResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + } + } + ] + } + ] +}; + +(node/*: any*/).hash = "c571977071bef677ed9b7926d2dad022"; + +module.exports = node; + +//- __generated__/UserFooFragment.graphql.js +/** + * SignedSource<<07549999ce219b691cd421ba43c8497a>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { LiveState, FragmentType } from "relay-runtime"; +import {bar as userBarResolverType} from "User_bar"; +// Type assertion validating that `userBarResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarResolverType: () => LiveState<$ReadOnlyArray>); +declare export opaque type UserFooFragment$fragmentType: FragmentType; +export type UserFooFragment$data = {| + +bar: $ReadOnlyArray, + +$fragmentType: UserFooFragment$fragmentType, +|}; +export type UserFooFragment$key = { + +$data?: UserFooFragment$data, + +$fragmentSpreads: UserFooFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "throwOnFieldError": true + }, + "name": "UserFooFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayLiveResolver", + "name": "bar", + "resolverModule": require('User_bar').bar, + "path": "bar" + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "c571977071bef677ed9b7926d2dad022"; + +module.exports = ((node/*: any*/)/*: Fragment< + UserFooFragment$fragmentType, + UserFooFragment$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_plural_live.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_plural_live.input new file mode 100644 index 0000000000000..b1ac93150f078 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_plural_live.input @@ -0,0 +1,36 @@ +//- User_foo.js +/** + * @RelayResolver User.foo: String @semanticNonNull + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User @throwOnFieldError { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: [String] @semanticNonNull(levels: [0,1]) + * @live + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { + greeting: String +} +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue.expected new file mode 100644 index 0000000000000..bdb7065c90238 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue.expected @@ -0,0 +1,284 @@ +==================================== INPUT ==================================== +//- User_foo.js +/** + * @RelayResolver User.foo: String @semanticNonNull + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User @throwOnFieldError { + bar + bar_live + bar_plural + bar_live_plural +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: RelayResolverValue @semanticNonNull + */ + + /** + * @RelayResolver User.bar_live: RelayResolverValue @semanticNonNull + * @live + */ + +/** + * @RelayResolver User.bar_plural: [RelayResolverValue] @semanticNonNull(levels: [0, 1]) + */ + + /** + * @RelayResolver User.bar_live_plural: [RelayResolverValue] @semanticNonNull(levels: [0, 1]) + * @live + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { + greeting: String +} + +type User { name: String } +==================================== OUTPUT =================================== +//- __generated__/ResolversSchemaModule.js +/** + * SignedSource<<983b85d6af01334176c1f903eecd63de>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { SchemaResolvers } from 'ReactiveQueryExecutor'; +import type { ResolverFunction, NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var schema_resolvers/*: SchemaResolvers*/ = { + "User": { + "bar": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + }, + "bar_live": { + "resolverFunction": require('User_bar').bar_live, + "rootFragment": null + }, + "bar_live_plural": { + "resolverFunction": require('User_bar').bar_live_plural, + "rootFragment": null + }, + "bar_plural": { + "resolverFunction": require('User_bar').bar_plural, + "rootFragment": null + }, + "foo": { + "resolverFunction": require('User_foo').foo, + "rootFragment": require('UserFooFragment$normalization.graphql') + } + } +}; + +module.exports = schema_resolvers; + +//- __generated__/UserFooFragment$normalization.graphql.js +/** + * SignedSource<<9720117be72c47f602a12640a544c268>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "kind": "SplitOperation", + "metadata": {}, + "name": "UserFooFragment$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "bar", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + } + }, + { + "name": "bar_live", + "args": null, + "kind": "RelayLiveResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar_live, + "rootFragment": null + } + }, + { + "name": "bar_plural", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar_plural, + "rootFragment": null + } + }, + { + "name": "bar_live_plural", + "args": null, + "kind": "RelayLiveResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar_live_plural, + "rootFragment": null + } + } + ] + } + ] +}; + +(node/*: any*/).hash = "85fc07b6873f5892801afc1cb3e30ea1"; + +module.exports = node; + +//- __generated__/UserFooFragment.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { LiveState, FragmentType } from "relay-runtime"; +import {bar_live_plural as userBarLivePluralResolverType} from "User_bar"; +// Type assertion validating that `userBarLivePluralResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarLivePluralResolverType: () => LiveState<$ReadOnlyArray<$NonMaybeType>>); +import {bar_live as userBarLiveResolverType} from "User_bar"; +// Type assertion validating that `userBarLiveResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarLiveResolverType: () => LiveState<$NonMaybeType>); +import {bar_plural as userBarPluralResolverType} from "User_bar"; +// Type assertion validating that `userBarPluralResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarPluralResolverType: () => $ReadOnlyArray<$NonMaybeType>); +import {bar as userBarResolverType} from "User_bar"; +// Type assertion validating that `userBarResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarResolverType: () => $NonMaybeType); +declare export opaque type UserFooFragment$fragmentType: FragmentType; +export type UserFooFragment$data = {| + +bar: $NonMaybeType>, + +bar_live: $NonMaybeType["read"]>>, + +bar_live_plural: $NonMaybeType["read"]>>, + +bar_plural: $NonMaybeType>, + +$fragmentType: UserFooFragment$fragmentType, +|}; +export type UserFooFragment$key = { + +$data?: UserFooFragment$data, + +$fragmentSpreads: UserFooFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "throwOnFieldError": true + }, + "name": "UserFooFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "bar", + "resolverModule": require('User_bar').bar, + "path": "bar" + }, + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayLiveResolver", + "name": "bar_live", + "resolverModule": require('User_bar').bar_live, + "path": "bar_live" + }, + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "bar_plural", + "resolverModule": require('User_bar').bar_plural, + "path": "bar_plural" + }, + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayLiveResolver", + "name": "bar_live_plural", + "resolverModule": require('User_bar').bar_live_plural, + "path": "bar_live_plural" + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "85fc07b6873f5892801afc1cb3e30ea1"; + +module.exports = ((node/*: any*/)/*: Fragment< + UserFooFragment$fragmentType, + UserFooFragment$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue.input new file mode 100644 index 0000000000000..429f7fcd511cc --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue.input @@ -0,0 +1,53 @@ +//- User_foo.js +/** + * @RelayResolver User.foo: String @semanticNonNull + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User @throwOnFieldError { + bar + bar_live + bar_plural + bar_live_plural +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: RelayResolverValue @semanticNonNull + */ + + /** + * @RelayResolver User.bar_live: RelayResolverValue @semanticNonNull + * @live + */ + +/** + * @RelayResolver User.bar_plural: [RelayResolverValue] @semanticNonNull(levels: [0, 1]) + */ + + /** + * @RelayResolver User.bar_live_plural: [RelayResolverValue] @semanticNonNull(levels: [0, 1]) + * @live + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { + greeting: String +} + +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue_disabled.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue_disabled.expected new file mode 100644 index 0000000000000..d12f3ba444df0 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue_disabled.expected @@ -0,0 +1,282 @@ +==================================== INPUT ==================================== +//- User_foo.js +/** + * @RelayResolver User.foo: String @semanticNonNull + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User { + bar + bar_live + bar_plural + bar_live_plural +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: RelayResolverValue @semanticNonNull + */ + + /** + * @RelayResolver User.bar_live: RelayResolverValue @semanticNonNull + * @live + */ + +/** + * @RelayResolver User.bar_plural: [RelayResolverValue] @semanticNonNull(levels: [0, 1]) + */ + + /** + * @RelayResolver User.bar_live_plural: [RelayResolverValue] @semanticNonNull(levels: [0, 1]) + * @live + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { + greeting: String +} + +type User { name: String } +==================================== OUTPUT =================================== +//- __generated__/ResolversSchemaModule.js +/** + * SignedSource<<983b85d6af01334176c1f903eecd63de>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { SchemaResolvers } from 'ReactiveQueryExecutor'; +import type { ResolverFunction, NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var schema_resolvers/*: SchemaResolvers*/ = { + "User": { + "bar": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + }, + "bar_live": { + "resolverFunction": require('User_bar').bar_live, + "rootFragment": null + }, + "bar_live_plural": { + "resolverFunction": require('User_bar').bar_live_plural, + "rootFragment": null + }, + "bar_plural": { + "resolverFunction": require('User_bar').bar_plural, + "rootFragment": null + }, + "foo": { + "resolverFunction": require('User_foo').foo, + "rootFragment": require('UserFooFragment$normalization.graphql') + } + } +}; + +module.exports = schema_resolvers; + +//- __generated__/UserFooFragment$normalization.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "kind": "SplitOperation", + "metadata": {}, + "name": "UserFooFragment$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "bar", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + } + }, + { + "name": "bar_live", + "args": null, + "kind": "RelayLiveResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar_live, + "rootFragment": null + } + }, + { + "name": "bar_plural", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar_plural, + "rootFragment": null + } + }, + { + "name": "bar_live_plural", + "args": null, + "kind": "RelayLiveResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar_live_plural, + "rootFragment": null + } + } + ] + } + ] +}; + +(node/*: any*/).hash = "64a6f93fd7f3b03cced910bd568d74f0"; + +module.exports = node; + +//- __generated__/UserFooFragment.graphql.js +/** + * SignedSource<<4f80ed9d7cde6b607f5e3173957a4d1d>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { LiveState, FragmentType } from "relay-runtime"; +import {bar_live_plural as userBarLivePluralResolverType} from "User_bar"; +// Type assertion validating that `userBarLivePluralResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarLivePluralResolverType: () => LiveState<$ReadOnlyArray<$NonMaybeType>>); +import {bar_live as userBarLiveResolverType} from "User_bar"; +// Type assertion validating that `userBarLiveResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarLiveResolverType: () => LiveState<$NonMaybeType>); +import {bar_plural as userBarPluralResolverType} from "User_bar"; +// Type assertion validating that `userBarPluralResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarPluralResolverType: () => $ReadOnlyArray<$NonMaybeType>); +import {bar as userBarResolverType} from "User_bar"; +// Type assertion validating that `userBarResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarResolverType: () => $NonMaybeType); +declare export opaque type UserFooFragment$fragmentType: FragmentType; +export type UserFooFragment$data = {| + +bar: ?ReturnType, + +bar_live: ?ReturnType["read"]>, + +bar_live_plural: ?ReturnType["read"]>, + +bar_plural: ?ReturnType, + +$fragmentType: UserFooFragment$fragmentType, +|}; +export type UserFooFragment$key = { + +$data?: UserFooFragment$data, + +$fragmentSpreads: UserFooFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "UserFooFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "bar", + "resolverModule": require('User_bar').bar, + "path": "bar" + }, + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayLiveResolver", + "name": "bar_live", + "resolverModule": require('User_bar').bar_live, + "path": "bar_live" + }, + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "bar_plural", + "resolverModule": require('User_bar').bar_plural, + "path": "bar_plural" + }, + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayLiveResolver", + "name": "bar_live_plural", + "resolverModule": require('User_bar').bar_live_plural, + "path": "bar_live_plural" + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "64a6f93fd7f3b03cced910bd568d74f0"; + +module.exports = ((node/*: any*/)/*: Fragment< + UserFooFragment$fragmentType, + UserFooFragment$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue_disabled.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue_disabled.input new file mode 100644 index 0000000000000..51ca37c892148 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue_disabled.input @@ -0,0 +1,53 @@ +//- User_foo.js +/** + * @RelayResolver User.foo: String @semanticNonNull + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User { + bar + bar_live + bar_plural + bar_live_plural +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: RelayResolverValue @semanticNonNull + */ + + /** + * @RelayResolver User.bar_live: RelayResolverValue @semanticNonNull + * @live + */ + +/** + * @RelayResolver User.bar_plural: [RelayResolverValue] @semanticNonNull(levels: [0, 1]) + */ + + /** + * @RelayResolver User.bar_live_plural: [RelayResolverValue] @semanticNonNull(levels: [0, 1]) + * @live + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { + greeting: String +} + +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar.expected new file mode 100644 index 0000000000000..ed5dea00a166a --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar.expected @@ -0,0 +1,179 @@ +==================================== INPUT ==================================== +//- User_foo.js +/** + * @RelayResolver User.foo: String @semanticNonNull + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User @throwOnFieldError { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: String @semanticNonNull + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { + greeting: String +} +type User { name: String } +==================================== OUTPUT =================================== +//- __generated__/ResolversSchemaModule.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { SchemaResolvers } from 'ReactiveQueryExecutor'; +import type { ResolverFunction, NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var schema_resolvers/*: SchemaResolvers*/ = { + "User": { + "bar": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + }, + "foo": { + "resolverFunction": require('User_foo').foo, + "rootFragment": require('UserFooFragment$normalization.graphql') + } + } +}; + +module.exports = schema_resolvers; + +//- __generated__/UserFooFragment$normalization.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "kind": "SplitOperation", + "metadata": {}, + "name": "UserFooFragment$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "bar", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + } + } + ] + } + ] +}; + +(node/*: any*/).hash = "c571977071bef677ed9b7926d2dad022"; + +module.exports = node; + +//- __generated__/UserFooFragment.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +import {bar as userBarResolverType} from "User_bar"; +// Type assertion validating that `userBarResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarResolverType: () => string); +declare export opaque type UserFooFragment$fragmentType: FragmentType; +export type UserFooFragment$data = {| + +bar: string, + +$fragmentType: UserFooFragment$fragmentType, +|}; +export type UserFooFragment$key = { + +$data?: UserFooFragment$data, + +$fragmentSpreads: UserFooFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "throwOnFieldError": true + }, + "name": "UserFooFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "bar", + "resolverModule": require('User_bar').bar, + "path": "bar" + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "c571977071bef677ed9b7926d2dad022"; + +module.exports = ((node/*: any*/)/*: Fragment< + UserFooFragment$fragmentType, + UserFooFragment$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar.input new file mode 100644 index 0000000000000..a877b1d728c47 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar.input @@ -0,0 +1,35 @@ +//- User_foo.js +/** + * @RelayResolver User.foo: String @semanticNonNull + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User @throwOnFieldError { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: String @semanticNonNull + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": true +} + +//- schema.graphql +type Query { + greeting: String +} +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar_disabled.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar_disabled.expected new file mode 100644 index 0000000000000..904b9424fc8a2 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar_disabled.expected @@ -0,0 +1,43 @@ +==================================== INPUT ==================================== +//- User_foo.js +/** + * @RelayResolver User.foo: String + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User @throwOnFieldError { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: String @semanticNonNull + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": false +} + +//- schema.graphql +type Query { + greeting: String +} +type User { name: String } +==================================== OUTPUT =================================== +✖︎ Unexpected `@throwOnFieldError` directive. The `@throwOnFieldError` directive is not supported unless experimental_emit_semantic_nullability_types is enabled. + + User_foo.js:5:42 + 5 │ fragment UserFooFragment on User @throwOnFieldError { + │ ^^^^^^^^^^^^^^^^^^ + 6 │ bar diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar_disabled.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar_disabled.input new file mode 100644 index 0000000000000..d98682a8aa78a --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar_disabled.input @@ -0,0 +1,35 @@ +//- User_foo.js +/** + * @RelayResolver User.foo: String + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User @throwOnFieldError { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: String @semanticNonNull + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + }, + "experimentalEmitSemanticNullabilityTypes": false +} + +//- schema.graphql +type Query { + greeting: String +} +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_non_nullable.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_non_nullable.expected new file mode 100644 index 0000000000000..3f56838628d6b --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_non_nullable.expected @@ -0,0 +1,174 @@ +==================================== INPUT ==================================== +//- User_foo.js +/** + * @RelayResolver User.foo: RelayResolverValue! + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: RelayResolverValue! + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + } +} + +//- schema.graphql +type Query { me: User } +type User { name: String } +==================================== OUTPUT =================================== +//- __generated__/ResolversSchemaModule.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { SchemaResolvers } from 'ReactiveQueryExecutor'; +import type { ResolverFunction, NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var schema_resolvers/*: SchemaResolvers*/ = { + "User": { + "bar": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + }, + "foo": { + "resolverFunction": require('User_foo').foo, + "rootFragment": require('UserFooFragment$normalization.graphql') + } + } +}; + +module.exports = schema_resolvers; + +//- __generated__/UserFooFragment$normalization.graphql.js +/** + * SignedSource<<79043ca4ab6c3a1b54e21b82211b593c>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "kind": "SplitOperation", + "metadata": {}, + "name": "UserFooFragment$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "bar", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + } + } + ] + } + ] +}; + +(node/*: any*/).hash = "285ee53d00b8def775c9e1ed756743bf"; + +module.exports = node; + +//- __generated__/UserFooFragment.graphql.js +/** + * SignedSource<<2d571ceea775fac44fa1fceb53824891>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +import {bar as userBarResolverType} from "User_bar"; +// Type assertion validating that `userBarResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarResolverType: () => $NonMaybeType); +declare export opaque type UserFooFragment$fragmentType: FragmentType; +export type UserFooFragment$data = {| + +bar: $NonMaybeType>, + +$fragmentType: UserFooFragment$fragmentType, +|}; +export type UserFooFragment$key = { + +$data?: UserFooFragment$data, + +$fragmentSpreads: UserFooFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "UserFooFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "bar", + "resolverModule": require('User_bar').bar, + "path": "bar" + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "285ee53d00b8def775c9e1ed756743bf"; + +module.exports = ((node/*: any*/)/*: Fragment< + UserFooFragment$fragmentType, + UserFooFragment$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_non_nullable.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_non_nullable.input new file mode 100644 index 0000000000000..866f32ba22aed --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_non_nullable.input @@ -0,0 +1,32 @@ +//- User_foo.js +/** + * @RelayResolver User.foo: RelayResolverValue! + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: RelayResolverValue! + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "allow_resolver_non_nullable_return_type": { "kind": "enabled" } + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + } +} + +//- schema.graphql +type Query { me: User } +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_schema_module.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_schema_module.expected new file mode 100644 index 0000000000000..61b80a31d7d2a --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_schema_module.expected @@ -0,0 +1,173 @@ +==================================== INPUT ==================================== +//- User_foo.js +/** + * @RelayResolver User.foo: RelayResolverValue + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: RelayResolverValue + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + } +} + +//- schema.graphql +type Query { me: User } +type User { name: String } +==================================== OUTPUT =================================== +//- __generated__/ResolversSchemaModule.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { SchemaResolvers } from 'ReactiveQueryExecutor'; +import type { ResolverFunction, NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var schema_resolvers/*: SchemaResolvers*/ = { + "User": { + "bar": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + }, + "foo": { + "resolverFunction": require('User_foo').foo, + "rootFragment": require('UserFooFragment$normalization.graphql') + } + } +}; + +module.exports = schema_resolvers; + +//- __generated__/UserFooFragment$normalization.graphql.js +/** + * SignedSource<<79043ca4ab6c3a1b54e21b82211b593c>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "kind": "SplitOperation", + "metadata": {}, + "name": "UserFooFragment$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "bar", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + } + } + ] + } + ] +}; + +(node/*: any*/).hash = "285ee53d00b8def775c9e1ed756743bf"; + +module.exports = node; + +//- __generated__/UserFooFragment.graphql.js +/** + * SignedSource<<8cc1f9903984d3c06d796d4524cf1c23>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +import {bar as userBarResolverType} from "User_bar"; +// Type assertion validating that `userBarResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarResolverType: () => ?mixed); +declare export opaque type UserFooFragment$fragmentType: FragmentType; +export type UserFooFragment$data = {| + +bar: ?ReturnType, + +$fragmentType: UserFooFragment$fragmentType, +|}; +export type UserFooFragment$key = { + +$data?: UserFooFragment$data, + +$fragmentSpreads: UserFooFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "UserFooFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "bar", + "resolverModule": require('User_bar').bar, + "path": "bar" + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "285ee53d00b8def775c9e1ed756743bf"; + +module.exports = ((node/*: any*/)/*: Fragment< + UserFooFragment$fragmentType, + UserFooFragment$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_schema_module.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_schema_module.input new file mode 100644 index 0000000000000..dd55aa000e8ef --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_schema_module.input @@ -0,0 +1,31 @@ +//- User_foo.js +/** + * @RelayResolver User.foo: RelayResolverValue + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: RelayResolverValue + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true + }, + "resolversSchemaModule": { + "path": "__generated__/ResolversSchemaModule.js" + } +} + +//- schema.graphql +type Query { me: User } +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_schema_module_apply_to_normalization_ast.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_schema_module_apply_to_normalization_ast.expected new file mode 100644 index 0000000000000..2234443eb0894 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_schema_module_apply_to_normalization_ast.expected @@ -0,0 +1,171 @@ +==================================== INPUT ==================================== +//- User_foo.js +/** + * @RelayResolver User.foo: RelayResolverValue + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: RelayResolverValue + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true + }, + "resolversSchemaModule": { + "applyToNormalizationAst": true, + "path": "__generated__/ResolversSchemaModule.js" + } +} + +//- schema.graphql +type Query { me: User } +type User { name: String } +==================================== OUTPUT =================================== +//- __generated__/ResolversSchemaModule.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { SchemaResolvers } from 'ReactiveQueryExecutor'; +import type { ResolverFunction, NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var schema_resolvers/*: SchemaResolvers*/ = { + "User": { + "bar": { + "resolverFunction": require('User_bar').bar, + "rootFragment": null + }, + "foo": { + "resolverFunction": require('User_foo').foo, + "rootFragment": require('UserFooFragment$normalization.graphql') + } + } +}; + +module.exports = schema_resolvers; + +//- __generated__/UserFooFragment$normalization.graphql.js +/** + * SignedSource<<0dc5f1959c406808557590206bee9173>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "kind": "SplitOperation", + "metadata": {}, + "name": "UserFooFragment$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "bar", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverReference": { resolverFunctionName: "bar", fieldType: "User" } + } + ] + } + ] +}; + +(node/*: any*/).hash = "285ee53d00b8def775c9e1ed756743bf"; + +module.exports = node; + +//- __generated__/UserFooFragment.graphql.js +/** + * SignedSource<<8cc1f9903984d3c06d796d4524cf1c23>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +import {bar as userBarResolverType} from "User_bar"; +// Type assertion validating that `userBarResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userBarResolverType: () => ?mixed); +declare export opaque type UserFooFragment$fragmentType: FragmentType; +export type UserFooFragment$data = {| + +bar: ?ReturnType, + +$fragmentType: UserFooFragment$fragmentType, +|}; +export type UserFooFragment$key = { + +$data?: UserFooFragment$data, + +$fragmentSpreads: UserFooFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "UserFooFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "bar", + "resolverModule": require('User_bar').bar, + "path": "bar" + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "285ee53d00b8def775c9e1ed756743bf"; + +module.exports = ((node/*: any*/)/*: Fragment< + UserFooFragment$fragmentType, + UserFooFragment$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_schema_module_apply_to_normalization_ast.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_schema_module_apply_to_normalization_ast.input new file mode 100644 index 0000000000000..3d3f42a1f2fa5 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/resolvers_schema_module_apply_to_normalization_ast.input @@ -0,0 +1,32 @@ +//- User_foo.js +/** + * @RelayResolver User.foo: RelayResolverValue + * @rootFragment UserFooFragment + */ +graphql`fragment UserFooFragment on User { + bar +}` + +//- User_bar.js +/** + * @RelayResolver User.bar: RelayResolverValue + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true + }, + "resolversSchemaModule": { + "applyToNormalizationAst": true, + "path": "__generated__/ResolversSchemaModule.js" + } +} + +//- schema.graphql +type Query { me: User } +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/simple_fragment.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/simple_fragment.expected new file mode 100644 index 0000000000000..67dd12c461485 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/simple_fragment.expected @@ -0,0 +1,60 @@ +==================================== INPUT ==================================== +//- foo.js +graphql` + fragment foo on User { + name + }`; + +//- relay.config.json +{ + "language": "typescript", + "schema": "./schema.graphql" +} + +//- schema.graphql +type Query { me: User } +type User { name: String } +==================================== OUTPUT =================================== +//- __generated__/foo.graphql.ts +/** + * SignedSource<> + * @lightSyntaxTransform + * @nogrep + */ + +/* tslint:disable */ +/* eslint-disable */ +// @ts-nocheck + +import { Fragment, ReaderFragment } from 'relay-runtime'; +import { FragmentRefs } from "relay-runtime"; +export type foo$data = { + readonly name: string | null | undefined; + readonly " $fragmentType": "foo"; +}; +export type foo$key = { + readonly " $data"?: foo$data; + readonly " $fragmentSpreads": FragmentRefs<"foo">; +}; + +const node: ReaderFragment = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "foo", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +(node as any).hash = "01d5e51e4b7ff55557834f125c21745d"; + +export default node; diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/simple_fragment.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/simple_fragment.input new file mode 100644 index 0000000000000..58786e6edf69c --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/simple_fragment.input @@ -0,0 +1,15 @@ +//- foo.js +graphql` + fragment foo on User { + name + }`; + +//- relay.config.json +{ + "language": "typescript", + "schema": "./schema.graphql" +} + +//- schema.graphql +type Query { me: User } +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_interface_fragment_on_concrete_type.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_interface_fragment_on_concrete_type.expected new file mode 100644 index 0000000000000..f18c80a5d6fc9 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_interface_fragment_on_concrete_type.expected @@ -0,0 +1,433 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson @relay(plural: true) { + name +}` + +//- SpreadInterfaceFragmentOnConcreteTypeComponent.js +graphql`fragment SpreadInterfaceFragmentOnConcreteTypeComponentFragment on User { + ...PersonComponentFragment +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + +/** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + +/** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql + +//- schema-extensions/extension.graphql +type Query { + me: IPerson +} +interface IPerson { + id: ID! + name: String +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<7fbe3989595c55397f4bcc5c81ec30b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "AdminTypeResolvers"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => mixed); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/PersonComponentFragment.graphql.js +/** + * SignedSource<<08d1886c2f48001c3ee34ede8433c26d>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type PersonComponentFragment$fragmentType: FragmentType; +export type PersonComponentFragment$data = $ReadOnlyArray<{| + +name: ?string, + +$fragmentType: PersonComponentFragment$fragmentType, +|}>; +export type PersonComponentFragment$key = $ReadOnlyArray<{ + +$data?: PersonComponentFragment$data, + +$fragmentSpreads: PersonComponentFragment$fragmentType, + ... +}>; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "plural": true + }, + "name": "PersonComponentFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('AdminTypeResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('UserTypeResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "type": "IPerson", + "abstractKey": "__isIPerson" +}; + +(node/*: any*/).hash = "8be134328a2e066b0a25f03aa1bd8468"; + +module.exports = ((node/*: any*/)/*: Fragment< + PersonComponentFragment$fragmentType, + PersonComponentFragment$data, +>*/); + +//- __generated__/SpreadInterfaceFragmentOnConcreteTypeComponentFragment.graphql.js +/** + * SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { PersonComponentFragment$fragmentType } from "PersonComponentFragment.graphql"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type SpreadInterfaceFragmentOnConcreteTypeComponentFragment$fragmentType: FragmentType; +export type SpreadInterfaceFragmentOnConcreteTypeComponentFragment$data = {| + +$fragmentSpreads: PersonComponentFragment$fragmentType, + +$fragmentType: SpreadInterfaceFragmentOnConcreteTypeComponentFragment$fragmentType, +|}; +export type SpreadInterfaceFragmentOnConcreteTypeComponentFragment$key = { + +$data?: SpreadInterfaceFragmentOnConcreteTypeComponentFragment$data, + +$fragmentSpreads: SpreadInterfaceFragmentOnConcreteTypeComponentFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "SpreadInterfaceFragmentOnConcreteTypeComponentFragment", + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "PersonComponentFragment" + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "fdd82cf9f0e06b12fd7652c210d56984"; + +module.exports = ((node/*: any*/)/*: Fragment< + SpreadInterfaceFragmentOnConcreteTypeComponentFragment$fragmentType, + SpreadInterfaceFragmentOnConcreteTypeComponentFragment$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<7ffabc2a97c3589cbfd20a23b3b608ca>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => mixed); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_interface_fragment_on_concrete_type.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_interface_fragment_on_concrete_type.input new file mode 100644 index 0000000000000..77ba2c74e2ff3 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_interface_fragment_on_concrete_type.input @@ -0,0 +1,53 @@ +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson @relay(plural: true) { + name +}` + +//- SpreadInterfaceFragmentOnConcreteTypeComponent.js +graphql`fragment SpreadInterfaceFragmentOnConcreteTypeComponentFragment on User { + ...PersonComponentFragment +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + */ + +/** + * @RelayResolver User.name: String + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + */ + +/** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql + +//- schema-extensions/extension.graphql +type Query { + me: IPerson +} +interface IPerson { + id: ID! + name: String +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_interface_fragment_on_weak_concrete_type.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_interface_fragment_on_weak_concrete_type.expected new file mode 100644 index 0000000000000..fa1d63a33ce10 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_interface_fragment_on_weak_concrete_type.expected @@ -0,0 +1,445 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + +//- UserQueryComponent.js + +graphql`query UserQueryComponentQuery { + user { + ...PersonComponentFragment + } +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + * @weak + */ + export type User {}; + +/** + * @RelayResolver User.name: String + */ + +/** + * @RelayResolver Query.user: User + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + * @weak + */ + export type Admin {}; + +/** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + person: IPerson +} + +//- schema-extensions/extension.graphql +interface IPerson { + name: String +} +==================================== OUTPUT =================================== +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<523d097198aa2ffa2a1209e24ac2a337>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin } from "AdminTypeResolvers"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: Admin, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/PersonComponentFragment.graphql.js +/** + * SignedSource<<281289c09e9f11b0f0fdf7dd357dbe36>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type PersonComponentFragment$fragmentType: FragmentType; +export type PersonComponentFragment$data = {| + +name: ?string, + +$fragmentType: PersonComponentFragment$fragmentType, +|}; +export type PersonComponentFragment$key = { + +$data?: PersonComponentFragment$data, + +$fragmentSpreads: PersonComponentFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "PersonComponentFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('AdminTypeResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('UserTypeResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "type": "IPerson", + "abstractKey": "__isIPerson" +}; + +(node/*: any*/).hash = "a57dd30bd59412781e9566e1553e2d70"; + +module.exports = ((node/*: any*/)/*: Fragment< + PersonComponentFragment$fragmentType, + PersonComponentFragment$data, +>*/); + +//- __generated__/UserQueryComponentQuery.graphql.js +/** + * SignedSource<<097b4c59a2a87918f0d913802fcac7cb>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { PersonComponentFragment$fragmentType } from "PersonComponentFragment.graphql"; +import {user as queryUserResolverType} from "UserTypeResolvers"; +// Type assertion validating that `queryUserResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryUserResolverType: () => ?User); +import type { User } from "UserTypeResolvers"; +export type UserQueryComponentQuery$variables = {||}; +export type UserQueryComponentQuery$data = {| + +user: ?{| + +$fragmentSpreads: PersonComponentFragment$fragmentType, + |}, +|}; +export type UserQueryComponentQuery = {| + response: UserQueryComponentQuery$data, + variables: UserQueryComponentQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "UserQueryComponentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "User", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "user", + "resolverModule": require('UserTypeResolvers').user, + "path": "user", + "normalizationInfo": { + "kind": "WeakModel", + "concreteType": "User", + "plural": false + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "user", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "PersonComponentFragment" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "UserQueryComponentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "user", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('UserTypeResolvers').user, + "rootFragment": null + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "user", + "plural": false, + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('AdminTypeResolvers').name, + "rootFragment": null + } + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "name", + "args": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true, + "resolverInfo": { + "resolverFunction": require('UserTypeResolvers').name, + "rootFragment": null + } + } + ], + "type": "User", + "abstractKey": null + } + ], + "type": "IPerson", + "abstractKey": "__isIPerson" + } + ], + "storageKey": null + } + } + ], + "clientAbstractTypes": { + "__isIPerson": [ + "Admin", + "User" + ] + } + }, + "params": { + "cacheID": "a0629573b4435fb5bd73c32fd770efcb", + "id": null, + "metadata": {}, + "name": "UserQueryComponentQuery", + "operationKind": "query", + "text": null + } +}; + +(node/*: any*/).hash = "69ab3801e24e1f83ade42fd674df9ce7"; + +module.exports = ((node/*: any*/)/*: ClientQuery< + UserQueryComponentQuery$variables, + UserQueryComponentQuery$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<9a188c26688bb46f65ed80df4ae938c3>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User } from "UserTypeResolvers"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: User, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_interface_fragment_on_weak_concrete_type.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_interface_fragment_on_weak_concrete_type.input new file mode 100644 index 0000000000000..bb6f633f1c964 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_interface_fragment_on_weak_concrete_type.input @@ -0,0 +1,63 @@ +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + +//- UserQueryComponent.js + +graphql`query UserQueryComponentQuery { + user { + ...PersonComponentFragment + } +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson + * @weak + */ + export type User {}; + +/** + * @RelayResolver User.name: String + */ + +/** + * @RelayResolver Query.user: User + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson + * @weak + */ + export type Admin {}; + +/** + * @RelayResolver Admin.name: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + person: IPerson +} + +//- schema-extensions/extension.graphql +interface IPerson { + name: String +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_multiple_interface_fragments_on_concrete_type.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_multiple_interface_fragments_on_concrete_type.expected new file mode 100644 index 0000000000000..3c324e56abef8 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_multiple_interface_fragments_on_concrete_type.expected @@ -0,0 +1,555 @@ +==================================== INPUT ==================================== +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + +//- ActorComponent.js +graphql`fragment ActorComponentFragment on IActor { + description +}` + +//- SpreadInterfaceFragmentOnConcreteTypeComponent.js +graphql`fragment SpreadInterfaceFragmentOnConcreteTypeComponentFragment on User { + ...PersonComponentFragment + ...ActorComponentFragment +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson & IActor + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson & IActor + */ + +//- IPersonResolvers.js +/** + * @RelayResolver IPerson.name: String + */ + +//- IActorResolvers.js +/** + * @RelayResolver IActor.description: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql + +interface IPerson { + id: ID! +} + +interface IActor { + id: ID! +} +==================================== OUTPUT =================================== +//- __generated__/ActorComponentFragment.graphql.js +/** + * SignedSource<<29a6721ae972d24abc6fd33ed4cae7bd>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin____relay_model_instance$data } from "Admin____relay_model_instance.graphql"; +import type { User____relay_model_instance$data } from "User____relay_model_instance.graphql"; +import type { FragmentType } from "relay-runtime"; +import {description as iActorDescriptionResolverType} from "IActorResolvers"; +// Type assertion validating that `iActorDescriptionResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(iActorDescriptionResolverType: ( + model: Admin____relay_model_instance$data['__relay_model_instance'] | User____relay_model_instance$data['__relay_model_instance'], +) => ?string); +declare export opaque type ActorComponentFragment$fragmentType: FragmentType; +export type ActorComponentFragment$data = {| + +description: ?string, + +$fragmentType: ActorComponentFragment$fragmentType, +|}; +export type ActorComponentFragment$key = { + +$data?: ActorComponentFragment$data, + +$fragmentSpreads: ActorComponentFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "ActorComponentFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "description", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('IActorResolvers').description, '__relay_model_instance', true), + "path": "description" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "description", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('IActorResolvers').description, '__relay_model_instance', true), + "path": "description" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "type": "IActor", + "abstractKey": "__isIActor" +}; + +(node/*: any*/).hash = "44e992843e41c24ce4cc28290f4c04c3"; + +module.exports = ((node/*: any*/)/*: Fragment< + ActorComponentFragment$fragmentType, + ActorComponentFragment$data, +>*/); + +//- __generated__/Admin____relay_model_instance.graphql.js +/** + * SignedSource<<7fbe3989595c55397f4bcc5c81ec30b2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin__id$data } from "Admin__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Admin as adminRelayModelInstanceResolverType} from "AdminTypeResolvers"; +// Type assertion validating that `adminRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(adminRelayModelInstanceResolverType: ( + id: Admin__id$data['id'], +) => mixed); +declare export opaque type Admin____relay_model_instance$fragmentType: FragmentType; +export type Admin____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Admin____relay_model_instance$fragmentType, +|}; +export type Admin____relay_model_instance$key = { + +$data?: Admin____relay_model_instance$data, + +$fragmentSpreads: Admin____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin__id.graphql'), require('AdminTypeResolvers').Admin, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin____relay_model_instance$fragmentType, + Admin____relay_model_instance$data, +>*/); + +//- __generated__/Admin__id.graphql.js +/** + * SignedSource<<29acfbf1d6f559b8b77e9cd1f35218c0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Admin__id$fragmentType: FragmentType; +export type Admin__id$data = {| + +id: string, + +$fragmentType: Admin__id$fragmentType, +|}; +export type Admin__id$key = { + +$data?: Admin__id$data, + +$fragmentSpreads: Admin__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Admin__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Admin", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Admin__id$fragmentType, + Admin__id$data, +>*/); + +//- __generated__/PersonComponentFragment.graphql.js +/** + * SignedSource<<4ea82f388eeb21ecbc79f404917c194e>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Admin____relay_model_instance$data } from "Admin____relay_model_instance.graphql"; +import type { User____relay_model_instance$data } from "User____relay_model_instance.graphql"; +import type { FragmentType } from "relay-runtime"; +import {name as iPersonNameResolverType} from "IPersonResolvers"; +// Type assertion validating that `iPersonNameResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(iPersonNameResolverType: ( + model: Admin____relay_model_instance$data['__relay_model_instance'] | User____relay_model_instance$data['__relay_model_instance'], +) => ?string); +declare export opaque type PersonComponentFragment$fragmentType: FragmentType; +export type PersonComponentFragment$data = {| + +name: ?string, + +$fragmentType: PersonComponentFragment$fragmentType, +|}; +export type PersonComponentFragment$key = { + +$data?: PersonComponentFragment$data, + +$fragmentSpreads: PersonComponentFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "PersonComponentFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Admin____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('Admin____relay_model_instance.graphql'), require('IPersonResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "Admin", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User____relay_model_instance.graphql'), require('IPersonResolvers').name, '__relay_model_instance', true), + "path": "name" + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "type": "IPerson", + "abstractKey": "__isIPerson" +}; + +(node/*: any*/).hash = "a57dd30bd59412781e9566e1553e2d70"; + +module.exports = ((node/*: any*/)/*: Fragment< + PersonComponentFragment$fragmentType, + PersonComponentFragment$data, +>*/); + +//- __generated__/SpreadInterfaceFragmentOnConcreteTypeComponentFragment.graphql.js +/** + * SignedSource<<290409f030b9f36f1caae1e20cc4b354>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { ActorComponentFragment$fragmentType } from "ActorComponentFragment.graphql"; +import type { PersonComponentFragment$fragmentType } from "PersonComponentFragment.graphql"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type SpreadInterfaceFragmentOnConcreteTypeComponentFragment$fragmentType: FragmentType; +export type SpreadInterfaceFragmentOnConcreteTypeComponentFragment$data = {| + +$fragmentSpreads: ActorComponentFragment$fragmentType & PersonComponentFragment$fragmentType, + +$fragmentType: SpreadInterfaceFragmentOnConcreteTypeComponentFragment$fragmentType, +|}; +export type SpreadInterfaceFragmentOnConcreteTypeComponentFragment$key = { + +$data?: SpreadInterfaceFragmentOnConcreteTypeComponentFragment$data, + +$fragmentSpreads: SpreadInterfaceFragmentOnConcreteTypeComponentFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "SpreadInterfaceFragmentOnConcreteTypeComponentFragment", + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "PersonComponentFragment" + }, + { + "args": null, + "kind": "FragmentSpread", + "name": "ActorComponentFragment" + } + ], + "type": "User", + "abstractKey": null +}; + +(node/*: any*/).hash = "e2b46497d73b1e146810ae14681ce015"; + +module.exports = ((node/*: any*/)/*: Fragment< + SpreadInterfaceFragmentOnConcreteTypeComponentFragment$fragmentType, + SpreadInterfaceFragmentOnConcreteTypeComponentFragment$data, +>*/); + +//- __generated__/User____relay_model_instance.graphql.js +/** + * SignedSource<<7ffabc2a97c3589cbfd20a23b3b608ca>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { User__id$data } from "User__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {User as userRelayModelInstanceResolverType} from "UserTypeResolvers"; +// Type assertion validating that `userRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userRelayModelInstanceResolverType: ( + id: User__id$data['id'], +) => mixed); +declare export opaque type User____relay_model_instance$fragmentType: FragmentType; +export type User____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: User____relay_model_instance$fragmentType, +|}; +export type User____relay_model_instance$key = { + +$data?: User____relay_model_instance$data, + +$fragmentSpreads: User____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "User__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('User__id.graphql'), require('UserTypeResolvers').User, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User____relay_model_instance$fragmentType, + User____relay_model_instance$data, +>*/); + +//- __generated__/User__id.graphql.js +/** + * SignedSource<<0a0f39eb34bfc882d28378a0b05b3c17>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type User__id$fragmentType: FragmentType; +export type User__id$data = {| + +id: string, + +$fragmentType: User__id$fragmentType, +|}; +export type User__id$key = { + +$data?: User__id$data, + +$fragmentSpreads: User__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "User__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + User__id$fragmentType, + User__id$data, +>*/); diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_multiple_interface_fragments_on_concrete_type.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_multiple_interface_fragments_on_concrete_type.input new file mode 100644 index 0000000000000..15a78558fdd68 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/spread_multiple_interface_fragments_on_concrete_type.input @@ -0,0 +1,65 @@ +//- PersonComponent.js +graphql`fragment PersonComponentFragment on IPerson { + name +}` + +//- ActorComponent.js +graphql`fragment ActorComponentFragment on IActor { + description +}` + +//- SpreadInterfaceFragmentOnConcreteTypeComponent.js +graphql`fragment SpreadInterfaceFragmentOnConcreteTypeComponentFragment on User { + ...PersonComponentFragment + ...ActorComponentFragment +}` + +//- UserTypeResolvers.js +/** + * @RelayResolver User implements IPerson & IActor + */ + +//- AdminTypeResolvers.js +/** + * @RelayResolver Admin implements IPerson & IActor + */ + +//- IPersonResolvers.js +/** + * @RelayResolver IPerson.name: String + */ + +//- IActorResolvers.js +/** + * @RelayResolver IActor.description: String + */ + +//- relay.config.json +{ + "language": "flow", + "jsModuleFormat": "haste", + "schema": "schema.graphql", + "schemaExtensions": [ + "schema-extensions" + ], + "featureFlags": { + "enable_relay_resolver_transform": true, + "enable_resolver_normalization_ast": true, + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } + } +} + +//- schema.graphql +type Query { + greeting: String +} + +//- schema-extensions/extension.graphql + +interface IPerson { + id: ID! +} + +interface IActor { + id: ID! +} diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/typescript_resolver_type_import.expected b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/typescript_resolver_type_import.expected new file mode 100644 index 0000000000000..e8c1d60403663 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/typescript_resolver_type_import.expected @@ -0,0 +1,78 @@ +==================================== INPUT ==================================== +//- foo.ts +/** + * @RelayResolver User.foo: RelayResolverValue + */ + +//- bar.ts +graphql`fragment barFragment on User { + foo +}` + +//- relay.config.json +{ + "language": "typescript", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true + } +} + +//- schema.graphql +type Query { user: User } +type User { name: String } +==================================== OUTPUT =================================== +//- __generated__/barFragment.graphql.ts +/** + * SignedSource<<6c58dff465555ee63fe212edde3a3e01>> + * @lightSyntaxTransform + * @nogrep + */ + +/* tslint:disable */ +/* eslint-disable */ +// @ts-nocheck + +import { Fragment, ReaderFragment } from 'relay-runtime'; +import { FragmentRefs } from "relay-runtime"; +import { foo as userFooResolverType } from "../foo"; +export type barFragment$data = { + readonly foo: ReturnType | null | undefined; + readonly " $fragmentType": "barFragment"; +}; +export type barFragment$key = { + readonly " $data"?: barFragment$data; + readonly " $fragmentSpreads": FragmentRefs<"barFragment">; +}; + +import {foo as userFooResolver} from './../foo'; + +const node: ReaderFragment = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "barFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "foo", + "resolverModule": userFooResolver, + "path": "foo" + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +(node as any).hash = "f60f2dcc6b71a6c9ec170e68dc2c994d"; + +export default node; diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/typescript_resolver_type_import.input b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/typescript_resolver_type_import.input new file mode 100644 index 0000000000000..a8677a89dd860 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration/fixtures/typescript_resolver_type_import.input @@ -0,0 +1,23 @@ +//- foo.ts +/** + * @RelayResolver User.foo: RelayResolverValue + */ + +//- bar.ts +graphql`fragment barFragment on User { + foo +}` + +//- relay.config.json +{ + "language": "typescript", + "schema": "./schema.graphql", + "eagerEsModules": true, + "featureFlags": { + "enable_relay_resolver_transform": true + } +} + +//- schema.graphql +type Query { user: User } +type User { name: String } diff --git a/compiler/crates/relay-compiler/tests/relay_compiler_integration_test.rs b/compiler/crates/relay-compiler/tests/relay_compiler_integration_test.rs new file mode 100644 index 0000000000000..3da02dd3b9004 --- /dev/null +++ b/compiler/crates/relay-compiler/tests/relay_compiler_integration_test.rs @@ -0,0 +1,349 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @generated SignedSource<<748d236bdfe163a6942c2efcf0cafcab>> + */ + +mod relay_compiler_integration; + +use relay_compiler_integration::transform_fixture; +use fixture_tests::test_fixture; + +#[tokio::test] +async fn client_mutation_extension() { + let input = include_str!("relay_compiler_integration/fixtures/client_mutation_extension.input"); + let expected = include_str!("relay_compiler_integration/fixtures/client_mutation_extension.expected"); + test_fixture(transform_fixture, file!(), "client_mutation_extension.input", "relay_compiler_integration/fixtures/client_mutation_extension.expected", input, expected).await; +} + +#[tokio::test] +async fn client_mutation_resolver() { + let input = include_str!("relay_compiler_integration/fixtures/client_mutation_resolver.input"); + let expected = include_str!("relay_compiler_integration/fixtures/client_mutation_resolver.expected"); + test_fixture(transform_fixture, file!(), "client_mutation_resolver.input", "relay_compiler_integration/fixtures/client_mutation_resolver.expected", input, expected).await; +} + +#[tokio::test] +async fn client_mutation_resolver_different_mutation_ok() { + let input = include_str!("relay_compiler_integration/fixtures/client_mutation_resolver_different_mutation_ok.input"); + let expected = include_str!("relay_compiler_integration/fixtures/client_mutation_resolver_different_mutation_ok.expected"); + test_fixture(transform_fixture, file!(), "client_mutation_resolver_different_mutation_ok.input", "relay_compiler_integration/fixtures/client_mutation_resolver_different_mutation_ok.expected", input, expected).await; +} + +#[tokio::test] +async fn client_mutation_resolver_invalid_disabled() { + let input = include_str!("relay_compiler_integration/fixtures/client_mutation_resolver_invalid_disabled.input"); + let expected = include_str!("relay_compiler_integration/fixtures/client_mutation_resolver_invalid_disabled.expected"); + test_fixture(transform_fixture, file!(), "client_mutation_resolver_invalid_disabled.input", "relay_compiler_integration/fixtures/client_mutation_resolver_invalid_disabled.expected", input, expected).await; +} + +#[tokio::test] +async fn client_mutation_resolver_invalid_nonscalar() { + let input = include_str!("relay_compiler_integration/fixtures/client_mutation_resolver_invalid_nonscalar.input"); + let expected = include_str!("relay_compiler_integration/fixtures/client_mutation_resolver_invalid_nonscalar.expected"); + test_fixture(transform_fixture, file!(), "client_mutation_resolver_invalid_nonscalar.input", "relay_compiler_integration/fixtures/client_mutation_resolver_invalid_nonscalar.expected", input, expected).await; +} + +#[tokio::test] +async fn client_schema_extension_interface_uses_resolver_type() { + let input = include_str!("relay_compiler_integration/fixtures/client_schema_extension_interface_uses_resolver_type.input"); + let expected = include_str!("relay_compiler_integration/fixtures/client_schema_extension_interface_uses_resolver_type.expected"); + test_fixture(transform_fixture, file!(), "client_schema_extension_interface_uses_resolver_type.input", "relay_compiler_integration/fixtures/client_schema_extension_interface_uses_resolver_type.expected", input, expected).await; +} + +#[tokio::test] +async fn custom_scalar_variable_default_arg_invalid() { + let input = include_str!("relay_compiler_integration/fixtures/custom_scalar_variable_default_arg.invalid.input"); + let expected = include_str!("relay_compiler_integration/fixtures/custom_scalar_variable_default_arg.invalid.expected"); + test_fixture(transform_fixture, file!(), "custom_scalar_variable_default_arg.invalid.input", "relay_compiler_integration/fixtures/custom_scalar_variable_default_arg.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn custom_scalar_variable_default_arg_non_strict() { + let input = include_str!("relay_compiler_integration/fixtures/custom_scalar_variable_default_arg_non_strict.input"); + let expected = include_str!("relay_compiler_integration/fixtures/custom_scalar_variable_default_arg_non_strict.expected"); + test_fixture(transform_fixture, file!(), "custom_scalar_variable_default_arg_non_strict.input", "relay_compiler_integration/fixtures/custom_scalar_variable_default_arg_non_strict.expected", input, expected).await; +} + +#[tokio::test] +async fn error_handling_fragment() { + let input = include_str!("relay_compiler_integration/fixtures/error_handling_fragment.input"); + let expected = include_str!("relay_compiler_integration/fixtures/error_handling_fragment.expected"); + test_fixture(transform_fixture, file!(), "error_handling_fragment.input", "relay_compiler_integration/fixtures/error_handling_fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn error_handling_query() { + let input = include_str!("relay_compiler_integration/fixtures/error_handling_query.input"); + let expected = include_str!("relay_compiler_integration/fixtures/error_handling_query.expected"); + test_fixture(transform_fixture, file!(), "error_handling_query.input", "relay_compiler_integration/fixtures/error_handling_query.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_arguments() { + let input = include_str!("relay_compiler_integration/fixtures/fragment_arguments.input"); + let expected = include_str!("relay_compiler_integration/fixtures/fragment_arguments.expected"); + test_fixture(transform_fixture, file!(), "fragment_arguments.input", "relay_compiler_integration/fixtures/fragment_arguments.expected", input, expected).await; +} + +#[tokio::test] +async fn live_resolver_implements_interface_field() { + let input = include_str!("relay_compiler_integration/fixtures/live_resolver_implements_interface_field.input"); + let expected = include_str!("relay_compiler_integration/fixtures/live_resolver_implements_interface_field.expected"); + test_fixture(transform_fixture, file!(), "live_resolver_implements_interface_field.input", "relay_compiler_integration/fixtures/live_resolver_implements_interface_field.expected", input, expected).await; +} + +#[tokio::test] +async fn multiple_resolvers_on_interface_of_all_strong_model_type() { + let input = include_str!("relay_compiler_integration/fixtures/multiple_resolvers_on_interface_of_all_strong_model_type.input"); + let expected = include_str!("relay_compiler_integration/fixtures/multiple_resolvers_on_interface_of_all_strong_model_type.expected"); + test_fixture(transform_fixture, file!(), "multiple_resolvers_on_interface_of_all_strong_model_type.input", "relay_compiler_integration/fixtures/multiple_resolvers_on_interface_of_all_strong_model_type.expected", input, expected).await; +} + +#[tokio::test] +async fn multiple_resolvers_returns_interfaces_of_all_strong_model_type() { + let input = include_str!("relay_compiler_integration/fixtures/multiple_resolvers_returns_interfaces_of_all_strong_model_type.input"); + let expected = include_str!("relay_compiler_integration/fixtures/multiple_resolvers_returns_interfaces_of_all_strong_model_type.expected"); + test_fixture(transform_fixture, file!(), "multiple_resolvers_returns_interfaces_of_all_strong_model_type.input", "relay_compiler_integration/fixtures/multiple_resolvers_returns_interfaces_of_all_strong_model_type.expected", input, expected).await; +} + +#[tokio::test] +async fn preloadable_query_flow() { + let input = include_str!("relay_compiler_integration/fixtures/preloadable_query_flow.input"); + let expected = include_str!("relay_compiler_integration/fixtures/preloadable_query_flow.expected"); + test_fixture(transform_fixture, file!(), "preloadable_query_flow.input", "relay_compiler_integration/fixtures/preloadable_query_flow.expected", input, expected).await; +} + +#[tokio::test] +async fn preloadable_query_javascript() { + let input = include_str!("relay_compiler_integration/fixtures/preloadable_query_javascript.input"); + let expected = include_str!("relay_compiler_integration/fixtures/preloadable_query_javascript.expected"); + test_fixture(transform_fixture, file!(), "preloadable_query_javascript.input", "relay_compiler_integration/fixtures/preloadable_query_javascript.expected", input, expected).await; +} + +#[tokio::test] +async fn preloadable_query_typescript() { + let input = include_str!("relay_compiler_integration/fixtures/preloadable_query_typescript.input"); + let expected = include_str!("relay_compiler_integration/fixtures/preloadable_query_typescript.expected"); + test_fixture(transform_fixture, file!(), "preloadable_query_typescript.input", "relay_compiler_integration/fixtures/preloadable_query_typescript.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_on_interface() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_on_interface.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_on_interface.expected"); + test_fixture(transform_fixture, file!(), "resolver_on_interface.input", "relay_compiler_integration/fixtures/resolver_on_interface.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_on_interface_does_not_pass_schema_validation_invalid() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_on_interface_does_not_pass_schema_validation.invalid.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_on_interface_does_not_pass_schema_validation.invalid.expected"); + test_fixture(transform_fixture, file!(), "resolver_on_interface_does_not_pass_schema_validation.invalid.input", "relay_compiler_integration/fixtures/resolver_on_interface_does_not_pass_schema_validation.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_on_interface_of_all_strong_model_type() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type.expected"); + test_fixture(transform_fixture, file!(), "resolver_on_interface_of_all_strong_model_type.input", "relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_on_interface_of_all_strong_model_type_including_cse() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_including_cse.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_including_cse.expected"); + test_fixture(transform_fixture, file!(), "resolver_on_interface_of_all_strong_model_type_including_cse.input", "relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_including_cse.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_on_interface_of_all_strong_model_type_with_root_fragment() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_with_root_fragment.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_with_root_fragment.expected"); + test_fixture(transform_fixture, file!(), "resolver_on_interface_of_all_strong_model_type_with_root_fragment.input", "relay_compiler_integration/fixtures/resolver_on_interface_of_all_strong_model_type_with_root_fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_on_interface_of_all_weak_model_type() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_on_interface_of_all_weak_model_type.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_on_interface_of_all_weak_model_type.expected"); + test_fixture(transform_fixture, file!(), "resolver_on_interface_of_all_weak_model_type.input", "relay_compiler_integration/fixtures/resolver_on_interface_of_all_weak_model_type.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_on_interface_returns_custom_scalar() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_on_interface_returns_custom_scalar.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_on_interface_returns_custom_scalar.expected"); + test_fixture(transform_fixture, file!(), "resolver_on_interface_returns_custom_scalar.input", "relay_compiler_integration/fixtures/resolver_on_interface_returns_custom_scalar.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_returns_client_schema_extension_enum() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_returns_client_schema_extension_enum.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_returns_client_schema_extension_enum.expected"); + test_fixture(transform_fixture, file!(), "resolver_returns_client_schema_extension_enum.input", "relay_compiler_integration/fixtures/resolver_returns_client_schema_extension_enum.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_returns_enum_with_enum_suffix() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_returns_enum_with_enum_suffix.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_returns_enum_with_enum_suffix.expected"); + test_fixture(transform_fixture, file!(), "resolver_returns_enum_with_enum_suffix.input", "relay_compiler_integration/fixtures/resolver_returns_enum_with_enum_suffix.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_returns_interface_of_all_live_model_type() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_returns_interface_of_all_live_model_type.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_returns_interface_of_all_live_model_type.expected"); + test_fixture(transform_fixture, file!(), "resolver_returns_interface_of_all_live_model_type.input", "relay_compiler_integration/fixtures/resolver_returns_interface_of_all_live_model_type.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_returns_interface_of_all_strong_model_type() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type.expected"); + test_fixture(transform_fixture, file!(), "resolver_returns_interface_of_all_strong_model_type.input", "relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_returns_interface_of_all_strong_model_type_including_cse() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type_including_cse.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type_including_cse.expected"); + test_fixture(transform_fixture, file!(), "resolver_returns_interface_of_all_strong_model_type_including_cse.input", "relay_compiler_integration/fixtures/resolver_returns_interface_of_all_strong_model_type_including_cse.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_returns_interface_of_all_weak_model_type() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_returns_interface_of_all_weak_model_type.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_returns_interface_of_all_weak_model_type.expected"); + test_fixture(transform_fixture, file!(), "resolver_returns_interface_of_all_weak_model_type.input", "relay_compiler_integration/fixtures/resolver_returns_interface_of_all_weak_model_type.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_returns_interface_of_live_and_non_live_strong_model_type() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_returns_interface_of_live_and_non_live_strong_model_type.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_returns_interface_of_live_and_non_live_strong_model_type.expected"); + test_fixture(transform_fixture, file!(), "resolver_returns_interface_of_live_and_non_live_strong_model_type.input", "relay_compiler_integration/fixtures/resolver_returns_interface_of_live_and_non_live_strong_model_type.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_returns_union() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_returns_union.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_returns_union.expected"); + test_fixture(transform_fixture, file!(), "resolver_returns_union.input", "relay_compiler_integration/fixtures/resolver_returns_union.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_semantic_non_null_custom_scalar() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_custom_scalar.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_custom_scalar.expected"); + test_fixture(transform_fixture, file!(), "resolver_semantic_non_null_custom_scalar.input", "relay_compiler_integration/fixtures/resolver_semantic_non_null_custom_scalar.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_semantic_non_null_live() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_live.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_live.expected"); + test_fixture(transform_fixture, file!(), "resolver_semantic_non_null_live.input", "relay_compiler_integration/fixtures/resolver_semantic_non_null_live.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_semantic_non_null_plural() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_plural.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_plural.expected"); + test_fixture(transform_fixture, file!(), "resolver_semantic_non_null_plural.input", "relay_compiler_integration/fixtures/resolver_semantic_non_null_plural.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_semantic_non_null_plural_live() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_plural_live.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_plural_live.expected"); + test_fixture(transform_fixture, file!(), "resolver_semantic_non_null_plural_live.input", "relay_compiler_integration/fixtures/resolver_semantic_non_null_plural_live.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_semantic_non_null_relayresolvervalue() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue.expected"); + test_fixture(transform_fixture, file!(), "resolver_semantic_non_null_relayresolvervalue.input", "relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_semantic_non_null_relayresolvervalue_disabled() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue_disabled.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue_disabled.expected"); + test_fixture(transform_fixture, file!(), "resolver_semantic_non_null_relayresolvervalue_disabled.input", "relay_compiler_integration/fixtures/resolver_semantic_non_null_relayresolvervalue_disabled.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_semantic_non_null_scalar() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar.expected"); + test_fixture(transform_fixture, file!(), "resolver_semantic_non_null_scalar.input", "relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_semantic_non_null_scalar_disabled() { + let input = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar_disabled.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar_disabled.expected"); + test_fixture(transform_fixture, file!(), "resolver_semantic_non_null_scalar_disabled.input", "relay_compiler_integration/fixtures/resolver_semantic_non_null_scalar_disabled.expected", input, expected).await; +} + +#[tokio::test] +async fn resolvers_non_nullable() { + let input = include_str!("relay_compiler_integration/fixtures/resolvers_non_nullable.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolvers_non_nullable.expected"); + test_fixture(transform_fixture, file!(), "resolvers_non_nullable.input", "relay_compiler_integration/fixtures/resolvers_non_nullable.expected", input, expected).await; +} + +#[tokio::test] +async fn resolvers_schema_module() { + let input = include_str!("relay_compiler_integration/fixtures/resolvers_schema_module.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolvers_schema_module.expected"); + test_fixture(transform_fixture, file!(), "resolvers_schema_module.input", "relay_compiler_integration/fixtures/resolvers_schema_module.expected", input, expected).await; +} + +#[tokio::test] +async fn resolvers_schema_module_apply_to_normalization_ast() { + let input = include_str!("relay_compiler_integration/fixtures/resolvers_schema_module_apply_to_normalization_ast.input"); + let expected = include_str!("relay_compiler_integration/fixtures/resolvers_schema_module_apply_to_normalization_ast.expected"); + test_fixture(transform_fixture, file!(), "resolvers_schema_module_apply_to_normalization_ast.input", "relay_compiler_integration/fixtures/resolvers_schema_module_apply_to_normalization_ast.expected", input, expected).await; +} + +#[tokio::test] +async fn simple_fragment() { + let input = include_str!("relay_compiler_integration/fixtures/simple_fragment.input"); + let expected = include_str!("relay_compiler_integration/fixtures/simple_fragment.expected"); + test_fixture(transform_fixture, file!(), "simple_fragment.input", "relay_compiler_integration/fixtures/simple_fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn spread_interface_fragment_on_concrete_type() { + let input = include_str!("relay_compiler_integration/fixtures/spread_interface_fragment_on_concrete_type.input"); + let expected = include_str!("relay_compiler_integration/fixtures/spread_interface_fragment_on_concrete_type.expected"); + test_fixture(transform_fixture, file!(), "spread_interface_fragment_on_concrete_type.input", "relay_compiler_integration/fixtures/spread_interface_fragment_on_concrete_type.expected", input, expected).await; +} + +#[tokio::test] +async fn spread_interface_fragment_on_weak_concrete_type() { + let input = include_str!("relay_compiler_integration/fixtures/spread_interface_fragment_on_weak_concrete_type.input"); + let expected = include_str!("relay_compiler_integration/fixtures/spread_interface_fragment_on_weak_concrete_type.expected"); + test_fixture(transform_fixture, file!(), "spread_interface_fragment_on_weak_concrete_type.input", "relay_compiler_integration/fixtures/spread_interface_fragment_on_weak_concrete_type.expected", input, expected).await; +} + +#[tokio::test] +async fn spread_multiple_interface_fragments_on_concrete_type() { + let input = include_str!("relay_compiler_integration/fixtures/spread_multiple_interface_fragments_on_concrete_type.input"); + let expected = include_str!("relay_compiler_integration/fixtures/spread_multiple_interface_fragments_on_concrete_type.expected"); + test_fixture(transform_fixture, file!(), "spread_multiple_interface_fragments_on_concrete_type.input", "relay_compiler_integration/fixtures/spread_multiple_interface_fragments_on_concrete_type.expected", input, expected).await; +} + +#[tokio::test] +async fn typescript_resolver_type_import() { + let input = include_str!("relay_compiler_integration/fixtures/typescript_resolver_type_import.input"); + let expected = include_str!("relay_compiler_integration/fixtures/typescript_resolver_type_import.expected"); + test_fixture(transform_fixture, file!(), "typescript_resolver_type_import.input", "relay_compiler_integration/fixtures/typescript_resolver_type_import.expected", input, expected).await; +} diff --git a/compiler/crates/relay-config/Cargo.toml b/compiler/crates/relay-config/Cargo.toml index e030b7f1e3910..882f42d1c1455 100644 --- a/compiler/crates/relay-config/Cargo.toml +++ b/compiler/crates/relay-config/Cargo.toml @@ -1,19 +1,20 @@ # @generated by autocargo from //relay/oss/crates/relay-config:relay-config + [package] name = "relay-config" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] common = { path = "../common" } fnv = "1.0" -indexmap = { version = "1.9.2", features = ["arbitrary", "rayon", "serde-1"] } +indexmap = { version = "2.2.6", features = ["arbitrary", "rayon", "serde"] } intern = { path = "../intern" } pathdiff = "0.2" -regex = "1.6.0" -serde = { version = "1.0.136", features = ["derive", "rc"] } -serde_json = { version = "1.0.79", features = ["float_roundtrip", "unbounded_depth"] } -strum = "0.21" -strum_macros = "0.21" +regex = "1.9.2" +serde = { version = "1.0.185", features = ["derive", "rc"] } +serde_json = { version = "1.0.100", features = ["float_roundtrip", "unbounded_depth"] } +strum = { version = "0.26.2", features = ["derive"] } diff --git a/compiler/crates/relay-config/src/defer_stream_interface.rs b/compiler/crates/relay-config/src/defer_stream_interface.rs new file mode 100644 index 0000000000000..58474cc97be10 --- /dev/null +++ b/compiler/crates/relay-config/src/defer_stream_interface.rs @@ -0,0 +1,37 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::ArgumentName; +use common::DirectiveName; +use intern::string_key::Intern; +use serde::Deserialize; +use serde::Serialize; + +/// Configuration where Relay should expect some fields in the schema. +#[derive(Copy, Clone, Debug, Serialize, Deserialize)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub struct DeferStreamInterface { + pub defer_name: DirectiveName, + pub stream_name: DirectiveName, + pub if_arg: ArgumentName, + pub label_arg: ArgumentName, + pub initial_count_arg: ArgumentName, + pub use_customized_batch_arg: ArgumentName, +} + +impl Default for DeferStreamInterface { + fn default() -> Self { + DeferStreamInterface { + defer_name: DirectiveName("defer".intern()), + stream_name: DirectiveName("stream".intern()), + if_arg: ArgumentName("if".intern()), + label_arg: ArgumentName("label".intern()), + initial_count_arg: ArgumentName("initialCount".intern()), + use_customized_batch_arg: ArgumentName("useCustomizedBatch".intern()), + } + } +} diff --git a/compiler/crates/relay-config/src/js_module_format.rs b/compiler/crates/relay-config/src/js_module_format.rs index 1ceac0327d638..fe6203e4eb0a0 100644 --- a/compiler/crates/relay-config/src/js_module_format.rs +++ b/compiler/crates/relay-config/src/js_module_format.rs @@ -11,15 +11,11 @@ use serde::Serialize; /// Formatting style for generated files. #[derive(Copy, Clone, Debug, Deserialize, Serialize)] #[serde(rename_all = "lowercase")] +#[derive(Default)] pub enum JsModuleFormat { /// Common JS style, e.g. `require('../path/MyModule')` + #[default] CommonJS, /// Facebook style, e.g. `require('MyModule')` Haste, } - -impl Default for JsModuleFormat { - fn default() -> Self { - JsModuleFormat::CommonJS - } -} diff --git a/compiler/crates/relay-config/src/lib.rs b/compiler/crates/relay-config/src/lib.rs index 90409ed10ff16..51ac59e5baef9 100644 --- a/compiler/crates/relay-config/src/lib.rs +++ b/compiler/crates/relay-config/src/lib.rs @@ -10,30 +10,35 @@ #![deny(clippy::all)] mod connection_interface; +mod defer_stream_interface; mod diagnostic_report_config; mod js_module_format; mod module_import_config; mod non_node_id_fields_config; mod project_config; +mod project_name; +mod resolvers_schema_module_config; mod typegen_config; pub use connection_interface::ConnectionInterface; +pub use defer_stream_interface::DeferStreamInterface; pub use diagnostic_report_config::DiagnosticLevel; pub use diagnostic_report_config::DiagnosticReportConfig; pub use js_module_format::JsModuleFormat; pub use module_import_config::DynamicModuleProvider; pub use module_import_config::ModuleImportConfig; pub use non_node_id_fields_config::NonNodeIdFieldsConfig; +pub use project_config::ExtraArtifactsConfig; pub use project_config::LocalPersistAlgorithm; pub use project_config::LocalPersistConfig; pub use project_config::PersistConfig; pub use project_config::ProjectConfig; -pub use project_config::ProjectName; pub use project_config::RemotePersistConfig; pub use project_config::SchemaConfig; pub use project_config::SchemaLocation; +pub use project_name::ProjectName; +pub use resolvers_schema_module_config::ResolversSchemaModuleConfig; pub use typegen_config::CustomScalarType; pub use typegen_config::CustomScalarTypeImport; -pub use typegen_config::FlowTypegenConfig; pub use typegen_config::TypegenConfig; pub use typegen_config::TypegenLanguage; diff --git a/compiler/crates/relay-config/src/project_config.rs b/compiler/crates/relay-config/src/project_config.rs index d76dcbce01ab3..d1293ed63b40c 100644 --- a/compiler/crates/relay-config/src/project_config.rs +++ b/compiler/crates/relay-config/src/project_config.rs @@ -8,7 +8,7 @@ use std::fmt; use std::path::Path; use std::path::PathBuf; -use std::str::FromStr; +use std::path::MAIN_SEPARATOR; use std::sync::Arc; use std::usize; @@ -22,7 +22,6 @@ use fnv::FnvBuildHasher; use indexmap::IndexMap; use intern::string_key::Intern; use intern::string_key::StringKey; -use intern::Lookup; use regex::Regex; use serde::de::Error; use serde::Deserialize; @@ -31,19 +30,20 @@ use serde::Serialize; use serde_json::Value; use crate::connection_interface::ConnectionInterface; +use crate::defer_stream_interface::DeferStreamInterface; use crate::diagnostic_report_config::DiagnosticReportConfig; use crate::module_import_config::ModuleImportConfig; use crate::non_node_id_fields_config::NonNodeIdFieldsConfig; +use crate::resolvers_schema_module_config::ResolversSchemaModuleConfig; use crate::JsModuleFormat; +use crate::ProjectName; use crate::TypegenConfig; use crate::TypegenLanguage; type FnvIndexMap = IndexMap; -pub type ProjectName = StringKey; - #[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] pub struct RemotePersistConfig { /// URL to send a POST request to to persist. pub url: String, @@ -62,6 +62,9 @@ pub struct RemotePersistConfig { deserialize_with = "deserialize_semaphore_permits" )] pub semaphore_permits: Option, + + #[serde(default)] + pub include_query_text: bool, } fn deserialize_semaphore_permits<'de, D>(d: D) -> Result, D::Error> @@ -98,6 +101,9 @@ pub struct LocalPersistConfig { #[serde(default)] pub algorithm: LocalPersistAlgorithm, + + #[serde(default)] + pub include_query_text: bool, } #[derive(Debug, Serialize, Clone)] @@ -107,6 +113,15 @@ pub enum PersistConfig { Local(LocalPersistConfig), } +impl PersistConfig { + pub fn include_query_text(&self) -> bool { + match self { + PersistConfig::Remote(remote_config) => remote_config.include_query_text, + PersistConfig::Local(local_config) => local_config.include_query_text, + } + } +} + impl<'de> Deserialize<'de> for PersistConfig { fn deserialize>(deserializer: D) -> std::result::Result { let value = Value::deserialize(deserializer)?; @@ -146,6 +161,20 @@ pub enum SchemaLocation { Directory(PathBuf), } +pub struct ExtraArtifactsConfig { + pub filename_for_artifact: Box String) + Send + Sync>, + pub skip_types_for_artifact: Box bool) + Send + Sync>, +} + +impl Debug for ExtraArtifactsConfig { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ExtraArtifactsConfig") + .field("filename_for_artifact", &"Fn") + .field("skip_types_for_artifact", &"Fn") + .finish() + } +} + #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct SchemaConfig { @@ -156,6 +185,13 @@ pub struct SchemaConfig { #[serde(default = "default_node_interface_id_field")] pub node_interface_id_field: StringKey, + #[serde(default)] + pub defer_stream_interface: DeferStreamInterface, + + /// The name of the variable expected by the `node` query. + #[serde(default = "default_node_interface_id_variable_name")] + pub node_interface_id_variable_name: StringKey, + #[serde(default)] pub non_node_id_fields: Option, @@ -168,6 +204,10 @@ fn default_node_interface_id_field() -> StringKey { "id".intern() } +fn default_node_interface_id_variable_name() -> StringKey { + "id".intern() +} + fn default_unselectable_directive_name() -> DirectiveName { DirectiveName("unselectable".intern()) } @@ -176,7 +216,9 @@ impl Default for SchemaConfig { fn default() -> Self { Self { connection_interface: ConnectionInterface::default(), + defer_stream_interface: DeferStreamInterface::default(), node_interface_id_field: default_node_interface_id_field(), + node_interface_id_variable_name: default_node_interface_id_variable_name(), non_node_id_fields: None, unselectable_directive_name: default_unselectable_directive_name(), } @@ -186,8 +228,9 @@ impl Default for SchemaConfig { pub struct ProjectConfig { pub name: ProjectName, pub base: Option, - pub output: Option, pub extra_artifacts_output: Option, + pub extra_artifacts_config: Option, + pub output: Option, pub shard_output: bool, pub shard_strip_regex: Option, pub schema_extensions: Vec, @@ -200,23 +243,23 @@ pub struct ProjectConfig { pub extra: serde_json::Value, pub feature_flags: Arc, pub test_path_regex: Option, - pub filename_for_artifact: - Option String) + Send + Sync>>, - pub skip_types_for_artifact: Option bool) + Send + Sync>>, pub rollout: Rollout, pub js_module_format: JsModuleFormat, pub module_import_config: ModuleImportConfig, pub diagnostic_report_config: DiagnosticReportConfig, + pub resolvers_schema_module: Option, + pub codegen_command: Option, } impl Default for ProjectConfig { fn default() -> Self { Self { - name: "default".intern(), + name: ProjectName::default(), feature_flags: Default::default(), base: None, - output: None, extra_artifacts_output: None, + extra_artifacts_config: None, + output: None, shard_output: false, shard_strip_regex: None, schema_extensions: vec![], @@ -228,12 +271,12 @@ impl Default for ProjectConfig { variable_names_comment: false, extra: Default::default(), test_path_regex: None, - filename_for_artifact: None, - skip_types_for_artifact: None, rollout: Default::default(), js_module_format: Default::default(), module_import_config: Default::default(), diagnostic_report_config: Default::default(), + resolvers_schema_module: Default::default(), + codegen_command: Default::default(), } } } @@ -243,8 +286,9 @@ impl Debug for ProjectConfig { let ProjectConfig { name, base, - output, extra_artifacts_output, + extra_artifacts_config, + output, shard_output, shard_strip_regex, schema_extensions, @@ -257,17 +301,18 @@ impl Debug for ProjectConfig { extra, feature_flags, test_path_regex, - filename_for_artifact, - skip_types_for_artifact, rollout, js_module_format, module_import_config, diagnostic_report_config, + resolvers_schema_module, + codegen_command, } = self; f.debug_struct("ProjectConfig") .field("name", name) .field("base", base) .field("output", output) + .field("extra_artifacts_config", extra_artifacts_config) .field("extra_artifacts_output", extra_artifacts_output) .field("shard_output", shard_output) .field("shard_strip_regex", shard_strip_regex) @@ -281,26 +326,12 @@ impl Debug for ProjectConfig { .field("extra", extra) .field("feature_flags", feature_flags) .field("test_path_regex", test_path_regex) - .field( - "filename_for_artifact", - &if filename_for_artifact.is_some() { - "Some" - } else { - "None" - }, - ) - .field( - "skip_types_for_artifact", - &if skip_types_for_artifact.is_some() { - "Some" - } else { - "None" - }, - ) .field("rollout", rollout) .field("js_module_format", js_module_format) .field("module_import_config", module_import_config) .field("diagnostic_report_config", diagnostic_report_config) + .field("resolvers_schema_module", resolvers_schema_module) + .field("codegen_command", codegen_command) .finish() } } @@ -336,71 +367,74 @@ impl ProjectConfig { } } - pub fn path_for_artifact( + pub fn artifact_path_for_definition( &self, - source_file: SourceLocationKey, - definition_name: StringKey, + definition_name: WithLocation>, ) -> PathBuf { - let filename = if let Some(filename_for_artifact) = &self.filename_for_artifact { - filename_for_artifact(source_file, definition_name) + let source_location = definition_name.location.source_location(); + let artifact_name = definition_name.item.into(); + if let Some(extra_artifacts_config) = &self.extra_artifacts_config { + let filename = + (extra_artifacts_config.filename_for_artifact)(source_location, artifact_name); + + self.create_path_for_artifact(source_location, filename) } else { - match &self.typegen_config.language { - TypegenLanguage::Flow | TypegenLanguage::JavaScript => { - format!("{}.graphql.js", definition_name) - } - TypegenLanguage::TypeScript => format!("{}.graphql.ts", definition_name), + self.path_for_language_specific_artifact( + source_location, + format!("{}.graphql", artifact_name), + ) + } + } + + pub fn path_for_language_specific_artifact( + &self, + source_file: SourceLocationKey, + artifact_file_name: String, + ) -> PathBuf { + let filename = match &self.typegen_config.language { + TypegenLanguage::Flow | TypegenLanguage::JavaScript => { + format!("{}.js", artifact_file_name) } + TypegenLanguage::TypeScript => format!("{}.ts", artifact_file_name), }; + self.create_path_for_artifact(source_file, filename) } - /// Generates a relative import path in Common JS projects, and a module name in Haste projects. - pub fn js_module_import_path( + /// Generates identifier for importing module at `target_module_path` from module at `importing_artifact_path`. + /// Import Identifier is a relative path in CommonJS projects and a module name in Haste projects. + pub fn js_module_import_identifier( &self, - definition_source_location: WithLocation, - target_module: StringKey, + importing_artifact_path: &PathBuf, + target_module_path: &PathBuf, ) -> StringKey { match self.js_module_format { JsModuleFormat::CommonJS => { - let definition_artifact_location = self.path_for_artifact( - definition_source_location.location.source_location(), - definition_source_location.item, - ); - - let module_location = - PathBuf::from_str(target_module.lookup()).unwrap_or_else(|_| { - panic!( - "expected to be able to build a path from target_module : {}", - target_module.lookup() - ); - }); - - let module_path = module_location.parent().unwrap_or_else(||{ + let importing_artifact_directory = importing_artifact_path.parent().unwrap_or_else(||{ panic!( - "expected module_location: {:?} to have a parent path, maybe it's not a file?", - module_location + "expected importing_artifact_path: {:?} to have a parent path, maybe it's not a file?", + importing_artifact_path ); }); - - let definition_artifact_location_path = definition_artifact_location.parent().unwrap_or_else(||{panic!("expected definition_artifact_location: {:?} to have a parent path, maybe it's not a file?", definition_artifact_location); - }); - - let resolver_module_location = - pathdiff::diff_paths(module_path, definition_artifact_location_path).unwrap(); - - let module_file_name = module_location.file_name().unwrap_or_else(|| { + let target_module_directory = target_module_path.parent().unwrap_or_else(||{ + panic!( + "expected target_module_path: {:?} to have a parent path, maybe it's not a file?", + target_module_path + ); + }); + let target_module_file_name = target_module_path.file_name().unwrap_or_else(|| { panic!( - "expected module_location: {:?} to have a file name", - module_location + "expected target_module_path: {:?} to have a file name", + target_module_path ) }); + let relative_path = + pathdiff::diff_paths(target_module_directory, importing_artifact_directory) + .unwrap(); - resolver_module_location - .join(module_file_name) - .to_string_lossy() - .intern() + format_normalized_path(&relative_path.join(target_module_file_name)).intern() } - JsModuleFormat::Haste => Path::new(&target_module.to_string()) + JsModuleFormat::Haste => target_module_path .file_stem() .unwrap() .to_string_lossy() @@ -408,3 +442,10 @@ impl ProjectConfig { } } } + +// Stringify a path such that it is stable across operating systems. +fn format_normalized_path(path: &Path) -> String { + path.to_string_lossy() + .to_string() + .replace(MAIN_SEPARATOR, "/") +} diff --git a/compiler/crates/relay-config/src/project_name.rs b/compiler/crates/relay-config/src/project_name.rs new file mode 100644 index 0000000000000..c6b59747f52e0 --- /dev/null +++ b/compiler/crates/relay-config/src/project_name.rs @@ -0,0 +1,90 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::fmt; + +use intern::string_key::Intern; +use intern::string_key::StringKey; +use intern::Lookup; +use serde::Deserialize; +use serde::Deserializer; +use serde::Serialize; +use serde::Serializer; + +#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] +pub enum ProjectName { + Default, + Named(StringKey), +} + +impl ProjectName { + pub fn generate_name_for_object_and_field( + &self, + object_name: StringKey, + field_name: StringKey, + ) -> String { + format!("{}__{}", object_name, field_name) + } +} + +impl Default for ProjectName { + fn default() -> Self { + Self::Default + } +} + +impl fmt::Display for ProjectName { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Self::Named(value) = self { + write!(f, "{}", value) + } else { + write!(f, "default") + } + } +} + +impl From for ProjectName { + fn from(key: StringKey) -> Self { + match key.lookup() { + "default" => Self::Default, + _ => Self::Named(key), + } + } +} + +impl From for StringKey { + fn from(project_name: ProjectName) -> Self { + match project_name { + ProjectName::Default => "default".intern(), + ProjectName::Named(name) => name, + } + } +} + +impl Serialize for ProjectName { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(match self { + ProjectName::Default => "default", + ProjectName::Named(name) => name.lookup(), + }) + } +} + +impl<'de> Deserialize<'de> for ProjectName { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + Deserialize::deserialize(deserializer).map(|s: String| match s.as_str() { + "default" => ProjectName::Default, + s => ProjectName::Named(s.intern()), + }) + } +} diff --git a/compiler/crates/relay-config/src/resolvers_schema_module_config.rs b/compiler/crates/relay-config/src/resolvers_schema_module_config.rs new file mode 100644 index 0000000000000..39576387b6afa --- /dev/null +++ b/compiler/crates/relay-config/src/resolvers_schema_module_config.rs @@ -0,0 +1,21 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::path::PathBuf; + +use serde::Deserialize; +use serde::Serialize; + +/// Configuration for resolvers_schema_module generation +#[derive(Default, Serialize, Deserialize, Debug)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub struct ResolversSchemaModuleConfig { + #[serde(default)] + pub apply_to_normalization_ast: bool, + #[serde(default)] + pub path: PathBuf, +} diff --git a/compiler/crates/relay-config/src/typegen_config.rs b/compiler/crates/relay-config/src/typegen_config.rs index 8f1594fcf7c3b..d5c9e79627336 100644 --- a/compiler/crates/relay-config/src/typegen_config.rs +++ b/compiler/crates/relay-config/src/typegen_config.rs @@ -13,13 +13,13 @@ use indexmap::IndexMap; use intern::string_key::StringKey; use serde::Deserialize; use serde::Serialize; +use strum::EnumIter; use strum::IntoEnumIterator; -use strum_macros::EnumIter; type FnvIndexMap = IndexMap; #[derive( EnumIter, - strum_macros::ToString, + strum::Display, Debug, Copy, Clone, @@ -34,12 +34,6 @@ pub enum TypegenLanguage { Flow, } -impl Default for TypegenLanguage { - fn default() -> Self { - Self::JavaScript - } -} - impl TypegenLanguage { pub fn get_variants_as_string() -> Vec { let mut res = vec![]; @@ -64,7 +58,7 @@ pub struct CustomScalarTypeImport { pub path: PathBuf, } -#[derive(Debug, Serialize, Deserialize, Default)] +#[derive(Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields, rename_all = "camelCase")] pub struct TypegenConfig { /// The desired output language, "flow" or "typescript". @@ -101,22 +95,49 @@ pub struct TypegenConfig { #[serde(default)] pub require_custom_scalar_types: bool, - /// Work in progress new Flow type definitions + /// This option controls whether or not a catch-all entry is added to enum type definitions + /// for values that may be added in the future. Enabling this means you will have to update + /// your application whenever the GraphQL server schema adds new enum values to prevent it + /// from breaking. #[serde(default)] - pub flow_typegen: FlowTypegenConfig, + pub no_future_proof_enums: bool, /// This option enables emitting es modules artifacts. #[serde(default)] pub eager_es_modules: bool, -} -#[derive(Default, Debug, Serialize, Deserialize, Clone, Copy)] -#[serde(deny_unknown_fields, tag = "phase")] -pub struct FlowTypegenConfig { - /// This option controls whether or not a catch-all entry is added to enum type definitions - /// for values that may be added in the future. Enabling this means you will have to update - /// your application whenever the GraphQL server schema adds new enum values to prevent it - /// from breaking. + /// Keep the previous compiler behavior by outputting an union + /// of the raw type and null, and not the **correct** behavior + /// of an union with the raw type, null and undefined. #[serde(default)] - pub no_future_proof_enums: bool, + pub typescript_exclude_undefined_from_nullable_union: bool, + + /// EXPERIMENTAL: If your environment is configured to handles errors out of band, either via + /// a network layer which discards responses with errors, or via enabling strict + /// error handling in the runtime, you can enable this flag to have Relay generate + /// non-null types for fields which are marked as semantically non-null in + /// the schema. + /// + /// Currently semantically non-null fields must be specified in your schema + /// using the `@semanticNonNull` directive as specified in: + /// https://github.com/apollographql/specs/pull/42 + #[serde(default)] + pub experimental_emit_semantic_nullability_types: bool, +} + +impl Default for TypegenConfig { + fn default() -> Self { + TypegenConfig { + language: TypegenLanguage::JavaScript, + enum_module_suffix: Default::default(), + optional_input_fields: Default::default(), + use_import_type_syntax: Default::default(), + custom_scalar_types: Default::default(), + require_custom_scalar_types: Default::default(), + no_future_proof_enums: Default::default(), + eager_es_modules: Default::default(), + typescript_exclude_undefined_from_nullable_union: Default::default(), + experimental_emit_semantic_nullability_types: Default::default(), + } + } } diff --git a/compiler/crates/relay-docblock/Cargo.toml b/compiler/crates/relay-docblock/Cargo.toml index e7c1d25d8e6a5..db138bbbc305c 100644 --- a/compiler/crates/relay-docblock/Cargo.toml +++ b/compiler/crates/relay-docblock/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/relay-docblock:[relay-docblock,relay-docblock_parse_test,relay-to_schema_test] + [package] name = "relay-docblock" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -18,6 +20,7 @@ path = "tests/to_schema_test.rs" common = { path = "../common" } docblock-shared = { path = "../docblock-shared" } docblock-syntax = { path = "../docblock-syntax" } +errors = { path = "../errors" } graphql-ir = { path = "../graphql-ir" } graphql-syntax = { path = "../graphql-syntax" } intern = { path = "../intern" } @@ -25,7 +28,8 @@ lazy_static = "1.4" relay-config = { path = "../relay-config" } relay-schema = { path = "../relay-schema" } schema = { path = "../schema" } -thiserror = "1.0.36" +serde = { version = "1.0.185", features = ["derive", "rc"] } +thiserror = "1.0.49" [dev-dependencies] extract-graphql = { path = "../extract-graphql" } @@ -33,3 +37,4 @@ fixture-tests = { path = "../fixture-tests" } graphql-cli = { path = "../graphql-cli" } graphql-test-helpers = { path = "../graphql-test-helpers" } relay-test-schema = { path = "../relay-test-schema" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/relay-docblock/src/docblock_ir.rs b/compiler/crates/relay-docblock/src/docblock_ir.rs index f61a98bb15d2b..e4198cae164c3 100644 --- a/compiler/crates/relay-docblock/src/docblock_ir.rs +++ b/compiler/crates/relay-docblock/src/docblock_ir.rs @@ -14,11 +14,12 @@ use common::NamedItem; use common::SourceLocationKey; use common::Span; use common::WithLocation; +use docblock_shared::ResolverSourceHash; use docblock_shared::ARGUMENT_DEFINITIONS; use docblock_shared::ARGUMENT_TYPE; use docblock_shared::DEFAULT_VALUE; +use docblock_shared::KEY_RESOLVER_ID_FIELD; use docblock_shared::PROVIDER_ARG_NAME; -use graphql_ir::reexport::Intern; use graphql_ir::reexport::StringKey; use graphql_ir::FragmentDefinitionName; use graphql_syntax::parse_field_definition; @@ -28,12 +29,18 @@ use graphql_syntax::parse_identifier_and_implements_interfaces; use graphql_syntax::parse_type; use graphql_syntax::ConstantValue; use graphql_syntax::ExecutableDefinition; +use graphql_syntax::FieldDefinition; use graphql_syntax::FragmentDefinition; use graphql_syntax::Identifier; use graphql_syntax::InputValueDefinition; use graphql_syntax::List; +use graphql_syntax::StringNode; +use graphql_syntax::Token; +use graphql_syntax::TokenKind; use graphql_syntax::TypeAnnotation; +use intern::string_key::Intern; use intern::Lookup; +use relay_config::ProjectName; use crate::errors::ErrorMessagesWithData; use crate::errors::IrParsingErrorMessages; @@ -48,11 +55,14 @@ use crate::ir::WeakObjectIr; use crate::untyped_representation::AllowedFieldName; use crate::untyped_representation::UntypedDocblockRepresentation; use crate::DocblockIr; +use crate::LegacyVerboseResolverIr; use crate::On; use crate::ParseOptions; -use crate::RelayResolverIr; +use crate::ResolverFieldDocblockIr; +use crate::ResolverTypeDocblockIr; pub(crate) fn parse_docblock_ir( + project_name: &ProjectName, untyped_representation: UntypedDocblockRepresentation, definitions_in_file: Option<&Vec>, parse_options: &ParseOptions<'_>, @@ -80,6 +90,7 @@ pub(crate) fn parse_docblock_ir( let UntypedDocblockRepresentation { description, mut fields, + source_hash, } = untyped_representation; let resolver_field = match fields.remove(&AllowedFieldName::RelayResolverField) { @@ -88,41 +99,60 @@ pub(crate) fn parse_docblock_ir( }; let parsed_docblock_ir = match resolver_field { IrField::UnpopulatedIrField(unpopulated_ir_field) => { - DocblockIr::RelayResolver(parse_relay_resolver_ir( + let legacy_verbose_resolver = parse_relay_resolver_ir( &mut fields, definitions_in_file, description, + None, // This might be necessary for field hack source links docblock_location, unpopulated_ir_field, - parse_options, - )?) + source_hash, + )?; + + DocblockIr::Field(ResolverFieldDocblockIr::LegacyVerboseResolver( + legacy_verbose_resolver, + )) } IrField::PopulatedIrField(populated_ir_field) => { if populated_ir_field.value.item.lookup().contains('.') { - DocblockIr::TerseRelayResolver(parse_terse_relay_resolver_ir( - &mut fields, - populated_ir_field, - definitions_in_file, - docblock_location, - )?) + DocblockIr::Field(ResolverFieldDocblockIr::TerseRelayResolver( + parse_terse_relay_resolver_ir( + &mut fields, + description, + populated_ir_field, + definitions_in_file, + docblock_location, + source_hash, + parse_options, + )?, + )) } else { match get_optional_unpopulated_field_named( &mut fields, AllowedFieldName::WeakField, )? { - Some(weak_field) => DocblockIr::WeakObjectType(parse_weak_object_ir( - &mut fields, - description, - docblock_location, - populated_ir_field, - weak_field, - )?), - None => DocblockIr::StrongObjectResolver(parse_strong_object_ir( - &mut fields, - description, - docblock_location, - populated_ir_field, - )?), + Some(weak_field) => DocblockIr::Type(ResolverTypeDocblockIr::WeakObjectType( + parse_weak_object_ir( + &mut fields, + description, + None, // This might be necessary for field hack source links + docblock_location, + populated_ir_field, + weak_field, + source_hash, + parse_options, + )?, + )), + None => DocblockIr::Type(ResolverTypeDocblockIr::StrongObjectResolver( + parse_strong_object_ir( + project_name, + &mut fields, + description, + docblock_location, + populated_ir_field, + source_hash, + )?, + )), } } } @@ -141,10 +171,11 @@ fn parse_relay_resolver_ir( fields: &mut HashMap, definitions_in_file: Option<&Vec>, description: Option>, + hack_source: Option>, location: Location, _resolver_field: UnpopulatedIrField, - parse_options: &ParseOptions<'_>, -) -> DiagnosticsResult { + source_hash: ResolverSourceHash, +) -> DiagnosticsResult { let root_fragment = get_optional_populated_field_named(fields, AllowedFieldName::RootFragmentField)?; let field_name = @@ -168,17 +199,12 @@ fn parse_relay_resolver_ir( get_optional_populated_field_named(fields, AllowedFieldName::OutputTypeField)?; if let Some(output_type) = output_type_opt { - if !parse_options - .enable_output_type - .is_enabled_for(field_definition_stub.name.value) - { - return Err(vec![Diagnostic::error( - IrParsingErrorMessages::UnexpectedOutputType { - field_name: field_definition_stub.name.value, - }, - output_type.key_location, - )]); - } + return Err(vec![Diagnostic::error( + IrParsingErrorMessages::UnexpectedOutputType { + field_name: field_definition_stub.name.value, + }, + output_type.key_location, + )]); } let output_type = combine_edge_to_and_output_type(edge_to_opt, output_type_opt)?; @@ -194,25 +220,30 @@ fn parse_relay_resolver_ir( validate_field_arguments(&field_definition_stub.arguments, location.source_location())?; - Ok(RelayResolverIr { + Ok(LegacyVerboseResolverIr { live: get_optional_unpopulated_field_named(fields, AllowedFieldName::LiveField)?, on, root_fragment: root_fragment .map(|root_fragment| root_fragment.value.map(FragmentDefinitionName)), description, + hack_source, deprecated: fields.remove(&AllowedFieldName::DeprecatedField), location, field: field_definition_stub, output_type, fragment_arguments, + source_hash, + semantic_non_null: None, }) } fn parse_strong_object_ir( + project_name: &ProjectName, fields: &mut HashMap, description: Option>, location: Location, relay_resolver_field: PopulatedIrField, + source_hash: ResolverSourceHash, ) -> DiagnosticsResult { let type_str = relay_resolver_field.value; let (identifier, implements_interfaces) = parse_identifier_and_implements_interfaces( @@ -221,8 +252,11 @@ fn parse_strong_object_ir( type_str.location.span().start, )?; - let fragment_name = FragmentDefinitionName(format!("{}__id", identifier.value).intern()); - + let fragment_name = FragmentDefinitionName( + project_name + .generate_name_for_object_and_field(identifier.value, *KEY_RESOLVER_ID_FIELD) + .intern(), + ); Ok(StrongObjectIr { type_name: identifier, rhs_location: relay_resolver_field.value.location, @@ -232,37 +266,60 @@ fn parse_strong_object_ir( live: get_optional_unpopulated_field_named(fields, AllowedFieldName::LiveField)?, location, implements_interfaces, + source_hash, + semantic_non_null: None, }) } fn parse_weak_object_ir( fields: &mut HashMap, description: Option>, + hack_source: Option>, location: Location, relay_resolver_field: PopulatedIrField, _weak_field: UnpopulatedIrField, + source_hash: ResolverSourceHash, + parse_options: &ParseOptions<'_>, ) -> DiagnosticsResult { // Validate that the right hand side of the @RelayResolver field is a valid identifier - let identifier = assert_only_identifier(relay_resolver_field)?; + let (identifier, implements_interfaces) = if parse_options + .enable_interface_output_type + .is_fully_enabled() + { + let type_str = relay_resolver_field.value; + parse_identifier_and_implements_interfaces( + type_str.item.lookup(), + type_str.location.source_location(), + type_str.location.span().start, + )? + } else { + (assert_only_identifier(relay_resolver_field)?, vec![]) + }; Ok(WeakObjectIr { type_name: identifier, rhs_location: relay_resolver_field.value.location, description, + hack_source, deprecated: fields.remove(&AllowedFieldName::DeprecatedField), location, + implements_interfaces, + source_hash, }) } fn parse_terse_relay_resolver_ir( fields: &mut HashMap, + description: Option>, relay_resolver_field: PopulatedIrField, definitions_in_file: Option<&Vec>, location: Location, + source_hash: ResolverSourceHash, + parse_options: &ParseOptions<'_>, ) -> DiagnosticsResult { let root_fragment = get_optional_populated_field_named(fields, AllowedFieldName::RootFragmentField)?; - let type_str = relay_resolver_field.value; + let type_str: WithLocation = relay_resolver_field.value; // Validate that the right hand side of the @RelayResolver field is a valid identifier let type_name = extract_identifier(relay_resolver_field)?; @@ -273,7 +330,7 @@ fn parse_terse_relay_resolver_ir( let span_start = type_str.location.span().start + offset as u32; match remaining_source.chars().next() { - Some(dot) if dot == '.' => {} + Some('.') => {} Some(other) => { return Err(vec![Diagnostic::error( IrParsingErrorMessages::UnexpectedNonDot { found: other }, @@ -289,12 +346,21 @@ fn parse_terse_relay_resolver_ir( } }; - let field = parse_field_definition( + let mut field: graphql_syntax::FieldDefinition = parse_field_definition( &remaining_source[1..], type_str.location.source_location(), span_start + 1, )?; + field.description = description.map(|description| StringNode { + token: Token { + span: description.location.span(), + kind: TokenKind::Empty, + }, + value: description.item, + }); + + validate_field_type_annotation(&field, type_str, parse_options)?; validate_field_arguments(&field.arguments, location.source_location())?; let (fragment_type_condition, fragment_arguments) = parse_fragment_definition( @@ -322,6 +388,8 @@ fn parse_terse_relay_resolver_ir( } } + let semantic_non_null = field.directives.named("semanticNonNull".intern()).cloned(); + Ok(TerseRelayResolverIr { field, type_: WithLocation::new(type_str.location.with_span(type_name.span), type_name.value), @@ -330,7 +398,9 @@ fn parse_terse_relay_resolver_ir( location, deprecated: fields.remove(&AllowedFieldName::DeprecatedField), live: get_optional_unpopulated_field_named(fields, AllowedFieldName::LiveField)?, + semantic_non_null, fragment_arguments, + source_hash, }) } @@ -542,7 +612,13 @@ fn parse_fragment_definition( definitions_in_file: Option<&Vec>, ) -> DiagnosticsResult<(Option>, Option>)> { let fragment_definition = root_fragment - .map(|root_fragment| assert_fragment_definition(root_fragment.value, definitions_in_file)) + .map(|root_fragment| { + assert_fragment_definition( + root_fragment.value, + root_fragment.value.item, + definitions_in_file, + ) + }) .transpose()?; let fragment_arguments = fragment_definition @@ -581,14 +657,15 @@ fn parse_fragment_definition( Ok((fragment_type_condition, fragment_arguments)) } -fn assert_fragment_definition( +pub fn assert_fragment_definition( root_fragment: WithLocation, + fragment_name: StringKey, definitions_in_file: Option<&Vec>, ) -> Result { let fragment_definition = definitions_in_file.and_then(|defs| { defs.iter().find(|item| { if let ExecutableDefinition::Fragment(fragment) = item { - fragment.name.value == root_fragment.item + fragment.name.value == fragment_name } else { false } @@ -603,7 +680,7 @@ fn assert_fragment_definition( Err(Diagnostic::error( ErrorMessagesWithData::FragmentNotFound { - fragment_name: root_fragment.item, + fragment_name, suggestions, }, root_fragment.location, @@ -676,6 +753,25 @@ fn extract_fragment_arguments( }) } +fn validate_field_type_annotation( + field: &FieldDefinition, + type_str: WithLocation, + parse_options: &ParseOptions<'_>, +) -> DiagnosticsResult<()> { + if let TypeAnnotation::NonNull(non_null) = &field.type_ { + if !parse_options + .allow_resolver_non_nullable_return_type + .is_enabled_for(field.name.value) + { + return Err(vec![Diagnostic::error( + IrParsingErrorMessages::FieldWithNonNullType, + Location::new(type_str.location.source_location(), non_null.span), + )]); + } + } + Ok(()) +} + fn validate_field_arguments( arguments: &Option>, source_location: SourceLocationKey, @@ -686,7 +782,7 @@ fn validate_field_arguments( if let Some(default_value) = &argument.default_value { errors.push(Diagnostic::error( IrParsingErrorMessages::ArgumentDefaultValuesNoSupported, - Location::new(source_location, default_value.span()), + Location::new(source_location, default_value.value.span()), )); } } diff --git a/compiler/crates/relay-docblock/src/errors.rs b/compiler/crates/relay-docblock/src/errors.rs index 97ce90a2326f6..b782474acede0 100644 --- a/compiler/crates/relay-docblock/src/errors.rs +++ b/compiler/crates/relay-docblock/src/errors.rs @@ -16,7 +16,18 @@ use crate::untyped_representation::AllowedFieldName; use crate::ON_INTERFACE_FIELD; use crate::ON_TYPE_FIELD; -#[derive(Clone, Debug, Error, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +#[serde(tag = "type")] pub enum UntypedRepresentationErrorMessages { #[error("Unexpected docblock field `@{field_name}`")] UnknownField { field_name: StringKey }, @@ -30,7 +41,18 @@ pub enum UntypedRepresentationErrorMessages { MultipleDescriptions, } -#[derive(Clone, Debug, Error, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +#[serde(tag = "type")] pub enum IrParsingErrorMessages { #[error("Missing docblock field `@{field_name}`")] MissingField { field_name: AllowedFieldName }, @@ -74,6 +96,11 @@ pub enum IrParsingErrorMessages { #[error("The `@RelayResolver` field `@{field_name}` requires data.")] FieldWithMissingData { field_name: AllowedFieldName }, + #[error( + "Unexpected Relay Resolver field with non-nullable type. Relay expects all Resolver fields to be nullable since errors thrown by Resolvers are turned into `null` values." + )] + FieldWithNonNullType, + #[error( "The compiler attempted to parse this `@RelayResolver` block as a {resolver_type}, but there were unexpected fields: {field_string}." )] @@ -119,7 +146,18 @@ pub enum IrParsingErrorMessages { UnexpectedOutputType { field_name: StringKey }, } -#[derive(Clone, Debug, Error, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +#[serde(tag = "type")] pub enum SchemaValidationErrorMessages { #[error( "Unexpected plural server type in `@edgeTo` field. Currently Relay Resolvers only support plural `@edgeTo` if the type is defined via Client Schema Extensions." @@ -137,14 +175,6 @@ pub enum SchemaValidationErrorMessages { #[error("Relay Resolvers may not be used to implement the `{id_field_name}` field.")] ResolversCantImplementId { id_field_name: StringKey }, - #[error( - "The type `{non_interface_name}` is {variant_name}. Please use a client-defined interface instead." - )] - UnexpectedNonInterface { - non_interface_name: StringKey, - variant_name: &'static str, - }, - #[error( "The interface `{interface_name}` is not defined in a client schema extension. Resolver types that implement interfaces can only implement client-defined interfaces." )] @@ -160,9 +190,33 @@ pub enum SchemaValidationErrorMessages { interface_name: InterfaceName, invalid_type_string: String, }, + + #[error( + "Resolvers on the mutation type {mutation_type_name} are disallowed without the enable_relay_resolver_mutations feature flag" + )] + DisallowedMutationResolvers { mutation_type_name: StringKey }, + + #[error( + "Mutation resolver {resolver_field_name} must return a scalar or enum type, got {actual_return_type}" + )] + MutationResolverNonScalarReturn { + resolver_field_name: StringKey, + actual_return_type: StringKey, + }, } -#[derive(Clone, Debug, Error, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +#[serde(tag = "type")] pub enum ErrorMessagesWithData { #[error( "Invalid interface given for `@onInterface`. `{interface_name}` is not an existing GraphQL interface.{suggestions}", suggestions = did_you_mean(suggestions))] diff --git a/compiler/crates/relay-docblock/src/ir.rs b/compiler/crates/relay-docblock/src/ir.rs index 22e7cdc220096..631edaf795aa3 100644 --- a/compiler/crates/relay-docblock/src/ir.rs +++ b/compiler/crates/relay-docblock/src/ir.rs @@ -18,15 +18,19 @@ use common::NamedItem; use common::ObjectName; use common::Span; use common::WithLocation; +use docblock_shared::ResolverSourceHash; use docblock_shared::FRAGMENT_KEY_ARGUMENT_NAME; +use docblock_shared::GENERATED_FRAGMENT_ARGUMENT_NAME; use docblock_shared::HAS_OUTPUT_TYPE_ARGUMENT_NAME; use docblock_shared::IMPORT_NAME_ARGUMENT_NAME; use docblock_shared::IMPORT_PATH_ARGUMENT_NAME; use docblock_shared::INJECT_FRAGMENT_DATA_ARGUMENT_NAME; -use docblock_shared::KEY_RESOLVER_ID_FIELD; use docblock_shared::LIVE_ARGUMENT_NAME; use docblock_shared::RELAY_RESOLVER_DIRECTIVE_NAME; use docblock_shared::RELAY_RESOLVER_MODEL_DIRECTIVE_NAME; +use docblock_shared::RELAY_RESOLVER_MODEL_INSTANCE_FIELD; +use docblock_shared::RELAY_RESOLVER_SOURCE_HASH; +use docblock_shared::RELAY_RESOLVER_SOURCE_HASH_VALUE; use docblock_shared::RELAY_RESOLVER_WEAK_OBJECT_DIRECTIVE; use docblock_shared::RESOLVER_VALUE_SCALAR_NAME; use graphql_ir::FragmentDefinitionName; @@ -34,6 +38,7 @@ use graphql_syntax::BooleanNode; use graphql_syntax::ConstantArgument; use graphql_syntax::ConstantDirective; use graphql_syntax::ConstantValue; +use graphql_syntax::DefaultValue; use graphql_syntax::FieldDefinition; use graphql_syntax::FieldDefinitionStub; use graphql_syntax::Identifier; @@ -54,6 +59,7 @@ use graphql_syntax::TypeSystemDefinition; use intern::string_key::Intern; use intern::string_key::StringKey; use lazy_static::lazy_static; +use relay_config::ProjectName; use relay_config::SchemaConfig; use relay_schema::CUSTOM_SCALAR_DIRECTIVE_NAME; use relay_schema::EXPORT_NAME_CUSTOM_SCALAR_ARGUMENT_NAME; @@ -65,7 +71,6 @@ use schema::ObjectID; use schema::SDLSchema; use schema::Schema; use schema::Type; -use schema::TypeReference; use crate::errors::ErrorMessagesWithData; use crate::errors::SchemaValidationErrorMessages; @@ -78,44 +83,118 @@ lazy_static! { static ref DEPRECATED_RESOLVER_DIRECTIVE_NAME: DirectiveName = DirectiveName("deprecated".intern()); static ref DEPRECATED_REASON_ARGUMENT_NAME: ArgumentName = ArgumentName("reason".intern()); - static ref RESOLVER_MODEL_INSTANCE_FIELD_NAME: StringKey = "__relay_model_instance".intern(); static ref MODEL_CUSTOM_SCALAR_TYPE_SUFFIX: StringKey = "Model".intern(); + static ref SEMANTIC_NON_NULL_DIRECTIVE_NAME: DirectiveName = + DirectiveName("semanticNonNull".intern()); } #[derive(Debug, Clone, PartialEq)] -pub enum DocblockIr { - RelayResolver(RelayResolverIr), - TerseRelayResolver(TerseRelayResolverIr), +pub enum ResolverTypeDocblockIr { StrongObjectResolver(StrongObjectIr), WeakObjectType(WeakObjectIr), } +impl ResolverTypeDocblockIr { + pub fn to_graphql_schema_ast(&self, schema_config: &SchemaConfig) -> SchemaDocument { + let definitions = match self { + ResolverTypeDocblockIr::StrongObjectResolver(strong_object_resolver) => { + vec![strong_object_resolver.type_definition(schema_config)] + } + ResolverTypeDocblockIr::WeakObjectType(weak_object) => vec![ + weak_object.instance_scalar_type_definition(), + weak_object.type_definition(schema_config), + ], + }; + SchemaDocument { + location: self.location(), + definitions, + } + } + pub fn location(&self) -> Location { + match self { + Self::StrongObjectResolver(strong_object) => strong_object.location(), + Self::WeakObjectType(weak_object) => weak_object.location(), + } + } +} + +#[derive(Debug, Clone, PartialEq)] +pub enum ResolverFieldDocblockIr { + LegacyVerboseResolver(LegacyVerboseResolverIr), + TerseRelayResolver(TerseRelayResolverIr), +} + +impl ResolverFieldDocblockIr { + pub fn to_graphql_schema_ast( + self, + project_name: ProjectName, + schema: &SDLSchema, + schema_config: &SchemaConfig, + ) -> DiagnosticsResult { + let project_config = ResolverProjectConfig { + project_name, + schema, + schema_config, + }; + + let schema_doc = match self { + ResolverFieldDocblockIr::LegacyVerboseResolver(relay_resolver) => { + relay_resolver.to_graphql_schema_ast(project_config) + } + ResolverFieldDocblockIr::TerseRelayResolver(relay_resolver) => { + relay_resolver.to_graphql_schema_ast(project_config) + } + }?; + Ok(schema_doc) + } + pub fn location(&self) -> Location { + match self { + Self::LegacyVerboseResolver(strong_object) => strong_object.location(), + Self::TerseRelayResolver(weak_object) => weak_object.location(), + } + } +} + +#[derive(Debug, Clone, PartialEq)] +pub enum DocblockIr { + Type(ResolverTypeDocblockIr), + Field(ResolverFieldDocblockIr), +} + impl DocblockIr { pub(crate) fn get_variant_name(&self) -> &'static str { match self { - DocblockIr::RelayResolver(_) => "legacy resolver declaration", - DocblockIr::TerseRelayResolver(_) => "terse resolver declaration", - DocblockIr::StrongObjectResolver(_) => "strong object type declaration", - DocblockIr::WeakObjectType(_) => "weak object type declaration", + Self::Field(ResolverFieldDocblockIr::LegacyVerboseResolver(_)) => { + "legacy resolver declaration" + } + Self::Field(ResolverFieldDocblockIr::TerseRelayResolver(_)) => { + "terse resolver declaration" + } + Self::Type(ResolverTypeDocblockIr::StrongObjectResolver(_)) => { + "strong object type declaration" + } + Self::Type(ResolverTypeDocblockIr::WeakObjectType(_)) => "weak object type declaration", } } } /// Wrapper over all schema-related values #[derive(Copy, Clone, Debug)] -struct SchemaInfo<'a, 'b> { +struct ResolverProjectConfig<'a, 'b> { + project_name: ProjectName, schema: &'a SDLSchema, - config: &'b SchemaConfig, + schema_config: &'b SchemaConfig, } impl DocblockIr { pub fn to_sdl_string( self, + project_name: ProjectName, schema: &SDLSchema, schema_config: &SchemaConfig, ) -> DiagnosticsResult { Ok(self - .to_graphql_schema_ast(schema, schema_config)? + .to_graphql_schema_ast(project_name, schema, schema_config)? .definitions .iter() .map(|definition| format!("{}", definition)) @@ -123,32 +202,28 @@ impl DocblockIr { .join("\n\n")) } + pub fn location(&self) -> Location { + match self { + Self::Field(field) => field.location(), + Self::Type(type_) => type_.location(), + } + } + pub fn to_graphql_schema_ast( self, + project_name: ProjectName, schema: &SDLSchema, schema_config: &SchemaConfig, ) -> DiagnosticsResult { - let schema_info = SchemaInfo { - schema, - config: schema_config, - }; - match self { - DocblockIr::RelayResolver(relay_resolver) => { - relay_resolver.to_graphql_schema_ast(schema_info) - } - DocblockIr::TerseRelayResolver(relay_resolver) => { - relay_resolver.to_graphql_schema_ast(schema_info) - } - DocblockIr::StrongObjectResolver(strong_object) => { - strong_object.to_graphql_schema_ast(schema_info) - } - DocblockIr::WeakObjectType(weak_object) => { - weak_object.to_graphql_schema_ast(schema_info) + DocblockIr::Type(type_) => Ok(type_.to_graphql_schema_ast(schema_config)), + DocblockIr::Field(field) => { + field.to_graphql_schema_ast(project_name, schema, schema_config) } } } } + #[derive(Debug, PartialEq, Clone, Copy)] pub enum IrField { PopulatedIrField(PopulatedIrField), @@ -220,6 +295,15 @@ pub enum On { Interface(PopulatedIrField), } +impl On { + pub fn type_name(&self) -> StringKey { + match self { + On::Type(field) => field.value.item, + On::Interface(field) => field.value.item, + } + } +} + #[derive(Debug, Clone, PartialEq)] pub struct Argument { pub name: Identifier, @@ -260,6 +344,7 @@ pub enum FragmentDataInjectionMode { pub struct RootFragment { fragment: WithLocation, + generated: bool, // For Model resolvers, we need to pass the `id` or `__relay_model_instance` field // from the fragment data to the resolver function inject_fragment_data: Option, @@ -268,41 +353,49 @@ pub struct RootFragment { trait ResolverIr: Sized { /// Validate the ResolverIr against the schema and return the TypeSystemDefinition's /// that need to be added to the schema. - fn definitions( + fn field_definitions( self, - schema_info: SchemaInfo<'_, '_>, + project_config: ResolverProjectConfig<'_, '_>, ) -> DiagnosticsResult>; fn location(&self) -> Location; + + fn root_fragment_name(&self) -> Option>; + fn id_fragment(&self, schema_config: &SchemaConfig) -> Option; fn root_fragment( &self, object: Option<&Object>, - schema_info: SchemaInfo<'_, '_>, + project_config: ResolverProjectConfig<'_, '_>, ) -> Option; + fn output_type(&self) -> Option; fn deprecated(&self) -> Option; fn live(&self) -> Option; fn named_import(&self) -> Option; + fn source_hash(&self) -> ResolverSourceHash; + fn semantic_non_null(&self) -> Option; fn to_graphql_schema_ast( self, - schema_info: SchemaInfo<'_, '_>, + project_config: ResolverProjectConfig<'_, '_>, ) -> DiagnosticsResult { Ok(SchemaDocument { location: self.location(), - definitions: self.definitions(schema_info)?, + definitions: self.field_definitions(project_config)?, }) } - fn directives( + fn field_directives( &self, object: Option<&Object>, - schema_info: SchemaInfo<'_, '_>, + project_config: ResolverProjectConfig<'_, '_>, ) -> Vec { - let location = self.location(); - let span = location.span(); - let mut directives = vec![self.directive(object, schema_info)]; + let mut directives: Vec = vec![ + self.field_relay_resolver_directive(object, project_config), + resolver_source_hash_directive(self.source_hash()), + ]; if let Some(deprecated) = self.deprecated() { + let span = deprecated.key_location().span(); directives.push(ConstantDirective { span, at: dummy_token(span), @@ -316,14 +409,21 @@ trait ResolverIr: Sized { }) } + if let Some(semantic_non_null) = self.semantic_non_null() { + directives.push(semantic_non_null) + } + directives } - fn directive( - &self, - object: Option<&Object>, - schema_info: SchemaInfo<'_, '_>, - ) -> ConstantDirective { + fn type_directives(&self, schema_config: &SchemaConfig) -> Vec { + vec![ + self.type_relay_resolver_directive(schema_config), + resolver_source_hash_directive(self.source_hash()), + ] + } + + fn type_relay_resolver_directive(&self, schema_config: &SchemaConfig) -> ConstantDirective { let location = self.location(); let span = location.span(); let import_path = self.location().source_location().path().intern(); @@ -332,12 +432,23 @@ trait ResolverIr: Sized { WithLocation::new(self.location(), import_path), )]; - if let Some(root_fragment) = self.root_fragment(object, schema_info) { + if let Some(live_field) = self.live() { + arguments.push(true_argument(LIVE_ARGUMENT_NAME.0, live_field.key_location)) + } + + if let Some(root_fragment) = self.id_fragment(schema_config) { arguments.push(string_argument( FRAGMENT_KEY_ARGUMENT_NAME.0, root_fragment.fragment.map(|x| x.0), )); + if root_fragment.generated { + arguments.push(true_argument( + GENERATED_FRAGMENT_ARGUMENT_NAME.0, + Location::generated(), + )) + } + if let Some(inject_fragment_data) = root_fragment.inject_fragment_data { match inject_fragment_data { FragmentDataInjectionMode::Field(field_name) => { @@ -350,11 +461,54 @@ trait ResolverIr: Sized { } } - if let Some(live_field) = self.live() { - arguments.push(true_argument(LIVE_ARGUMENT_NAME.0, live_field.key_location)) + if let Some(name) = self.named_import() { + arguments.push(string_argument( + IMPORT_NAME_ARGUMENT_NAME.0, + WithLocation::new(self.location(), name), + )); + } + arguments.sort(); + ConstantDirective { + span, + at: dummy_token(span), + name: string_key_as_identifier(RELAY_RESOLVER_DIRECTIVE_NAME.0), + arguments: Some(List::generated(arguments)), } + } - let schema = schema_info.schema; + fn field_relay_resolver_directive( + &self, + object: Option<&Object>, + project_config: ResolverProjectConfig<'_, '_>, + ) -> ConstantDirective { + let mut arguments = vec![]; + + if let Some(root_fragment) = self.root_fragment(object, project_config) { + arguments.push(string_argument( + FRAGMENT_KEY_ARGUMENT_NAME.0, + root_fragment.fragment.map(|x| x.0), + )); + + if root_fragment.generated { + arguments.push(true_argument( + GENERATED_FRAGMENT_ARGUMENT_NAME.0, + Location::generated(), + )) + } + + if let Some(inject_fragment_data) = root_fragment.inject_fragment_data { + match inject_fragment_data { + FragmentDataInjectionMode::Field(field_name) => { + arguments.push(string_argument( + INJECT_FRAGMENT_DATA_ARGUMENT_NAME.0, + WithLocation::new(root_fragment.fragment.location, field_name), + )); + } + } + } + } + + let schema = project_config.schema; if let Some(output_type) = self.output_type() { match output_type { @@ -374,7 +528,7 @@ trait ResolverIr: Sized { let is_edge_to = fields.map_or(false, |fields| { fields.iter().any(|id| { schema.field(*id).name.item - == schema_info.config.node_interface_id_field + == project_config.schema_config.node_interface_id_field }) }); @@ -394,19 +548,20 @@ trait ResolverIr: Sized { )), } } - if let Some(name) = self.named_import() { - arguments.push(string_argument( - IMPORT_NAME_ARGUMENT_NAME.0, - WithLocation::new(self.location(), name), - )); - } - ConstantDirective { - span, - at: dummy_token(span), - name: string_key_as_identifier(RELAY_RESOLVER_DIRECTIVE_NAME.0), - arguments: Some(List::generated(arguments)), + let mut directive = self.type_relay_resolver_directive(project_config.schema_config); + + match directive.arguments { + Some(ref mut args) => { + args.items.extend(arguments); + args.items.sort(); + } + None => { + arguments.sort(); + directive.arguments = Some(List::generated(arguments)); + } } + directive } } @@ -414,6 +569,7 @@ trait ResolverTypeDefinitionIr: ResolverIr { fn field_name(&self) -> &Identifier; fn field_arguments(&self) -> Option<&List>; fn description(&self) -> Option; + fn hack_source(&self) -> Option; fn fragment_arguments(&self) -> Option<&Vec>; /// Build recursive object/interface extensions to add this field to all @@ -422,12 +578,12 @@ trait ResolverTypeDefinitionIr: ResolverIr { &self, interface_name: WithLocation, interface_id: InterfaceID, - schema_info: SchemaInfo<'_, '_>, + project_config: ResolverProjectConfig<'_, '_>, ) -> Vec { self.interface_definitions_impl( interface_name, interface_id, - schema_info, + project_config, &mut HashSet::default(), &mut HashSet::default(), ) @@ -437,12 +593,12 @@ trait ResolverTypeDefinitionIr: ResolverIr { &self, interface_name: WithLocation, interface_id: InterfaceID, - schema_info: SchemaInfo<'_, '_>, + project_config: ResolverProjectConfig<'_, '_>, seen_objects: &mut HashSet, seen_interfaces: &mut HashSet, ) -> Vec { - let fields = self.fields(None, schema_info); - let schema = schema_info.schema; + let fields = self.fields(None, project_config); + let schema = project_config.schema; // First we extend the interface itself... let mut definitions = vec![TypeSystemDefinition::InterfaceTypeExtension( @@ -451,6 +607,7 @@ trait ResolverTypeDefinitionIr: ResolverIr { interfaces: Vec::new(), directives: vec![], fields: Some(fields), + span: Span::empty(), }, )]; @@ -458,7 +615,10 @@ trait ResolverTypeDefinitionIr: ResolverIr { for object_id in &schema.interface(interface_id).implementing_objects { if !seen_objects.contains(object_id) { seen_objects.insert(*object_id); - definitions.extend(self.object_definitions(schema.object(*object_id), schema_info)); + let object = schema.object(*object_id); + if self.should_extend_interface_field_to_object(project_config, object) { + definitions.extend(self.object_definitions(object, project_config)); + } } } @@ -486,7 +646,7 @@ trait ResolverTypeDefinitionIr: ResolverIr { .unwrap() .get_interface_id() .unwrap(), - schema_info, + project_config, seen_objects, seen_interfaces, ), @@ -496,6 +656,28 @@ trait ResolverTypeDefinitionIr: ResolverIr { definitions } + // To support model resolver fields defined directly on an interface, without @rootFragment: + // e.g. @RelayResolver InterfaceName.fieldName(model) { .. } + // + // Objects defined on server or in client schema extensions don't have a + // corresponding model to pass to such resolver fields. Skip extending the object with these + // resolver fields if a field of the same name is already implemented on the object. + // + // Schema validation should ensure the existing field is compatible with the interface definition. + fn should_extend_interface_field_to_object( + &self, + project_config: ResolverProjectConfig<'_, '_>, + object: &Object, + ) -> bool { + // Check @rootFragment on the interface resolver field + if self.root_fragment_name().is_some() { + return true; + } + object + .named_field(self.field_name().value, project_config.schema) + .is_none() + } + // When defining a resolver on an object or interface, we must be sure that this // field is not defined on any parent interface because this could lead to a case where // someone tries to read the field in an fragment on that interface. In order to support @@ -505,10 +687,10 @@ trait ResolverTypeDefinitionIr: ResolverIr { // Until we decide to support that behavior we'll make it a compiler error. fn validate_singular_implementation( &self, - schema_info: SchemaInfo<'_, '_>, + project_config: ResolverProjectConfig<'_, '_>, interfaces: &[InterfaceID], ) -> DiagnosticsResult<()> { - let schema = schema_info.schema; + let schema = project_config.schema; for interface_id in interfaces { let interface = schema.interface(*interface_id); @@ -531,14 +713,15 @@ trait ResolverTypeDefinitionIr: ResolverIr { fn object_definitions( &self, object: &Object, - schema_info: SchemaInfo<'_, '_>, + project_config: ResolverProjectConfig<'_, '_>, ) -> Vec { vec![TypeSystemDefinition::ObjectTypeExtension( ObjectTypeExtension { name: obj_as_identifier(object.name), interfaces: vec![], directives: vec![], - fields: Some(self.fields(Some(object), schema_info)), + fields: Some(self.fields(Some(object), project_config)), + span: Span::empty(), }, )] } @@ -546,7 +729,7 @@ trait ResolverTypeDefinitionIr: ResolverIr { fn fields( &self, object: Option<&Object>, - schema_info: SchemaInfo<'_, '_>, + project_config: ResolverProjectConfig<'_, '_>, ) -> List { let edge_to = self.output_type().as_ref().map_or_else( || { @@ -568,7 +751,7 @@ trait ResolverTypeDefinitionIr: ResolverIr { (Some(a), Some(b)) => Some(List::generated( a.items .into_iter() - .chain(b.clone().items.into_iter()) + .chain(b.clone().items) .collect::>(), )), }; @@ -577,20 +760,30 @@ trait ResolverTypeDefinitionIr: ResolverIr { name: self.field_name().clone(), type_: edge_to, arguments: args, - directives: self.directives(object, schema_info), + directives: self.field_directives(object, project_config), description: self.description(), + hack_source: self.hack_source(), + span: Span::empty(), }]) } fn fragment_argument_definitions(&self) -> Option> { + let span = Span::empty(); self.fragment_arguments().as_ref().map(|args| { List::generated( args.iter() .map(|arg| InputValueDefinition { name: arg.name.clone(), type_: arg.type_.clone(), - default_value: arg.default_value.clone(), + default_value: arg.default_value.as_ref().map(|default_value| { + DefaultValue { + value: default_value.clone(), + equals: dummy_token(span), + span, + } + }), directives: vec![], + span, }) .collect::>(), ) @@ -604,22 +797,24 @@ pub struct TerseRelayResolverIr { pub type_: WithLocation, pub root_fragment: Option>, pub deprecated: Option, + pub semantic_non_null: Option, pub live: Option, pub location: Location, pub fragment_arguments: Option>, + pub source_hash: ResolverSourceHash, } impl ResolverIr for TerseRelayResolverIr { - fn definitions( + fn field_definitions( self, - schema_info: SchemaInfo<'_, '_>, + project_config: ResolverProjectConfig<'_, '_>, ) -> DiagnosticsResult> { - let schema = schema_info.schema; + let schema = project_config.schema; let name = self .field_name() .name_with_location(self.location.source_location()); - if name.item == schema_info.config.node_interface_id_field { + if name.item == project_config.schema_config.node_interface_id_field { return Err(vec![Diagnostic::error( SchemaValidationErrorMessages::ResolversCantImplementId { id_field_name: name.item, @@ -632,14 +827,14 @@ impl ResolverIr for TerseRelayResolverIr { match type_ { Type::Object(object_id) => { let object = schema.object(object_id); - return Ok(self.object_definitions(object, schema_info)); + return Ok(self.object_definitions(object, project_config)); } Type::Interface(interface_id) => { let interface = schema.interface(interface_id); return Ok(self.interface_definitions( interface.name, interface_id, - schema_info, + project_config, )); } _ => panic!("Terser syntax is only supported on non-input objects or interfaces."), @@ -660,17 +855,26 @@ impl ResolverIr for TerseRelayResolverIr { self.location } + fn root_fragment_name(&self) -> Option> { + self.root_fragment + } + + fn id_fragment(&self, _schema_config: &SchemaConfig) -> Option { + None + } + fn root_fragment( &self, object: Option<&Object>, - _: SchemaInfo<'_, '_>, + project_config: ResolverProjectConfig<'_, '_>, ) -> Option { - get_root_fragment_for_object(object).or_else(|| { - self.root_fragment.map(|fragment| RootFragment { + self.root_fragment + .map(|fragment| RootFragment { fragment, + generated: false, inject_fragment_data: None, }) - }) + .or_else(|| get_root_fragment_for_object(project_config.project_name, object)) } fn output_type(&self) -> Option { @@ -684,6 +888,10 @@ impl ResolverIr for TerseRelayResolverIr { self.deprecated } + fn semantic_non_null(&self) -> Option { + self.semantic_non_null.clone() + } + fn live(&self) -> Option { self.live } @@ -691,6 +899,10 @@ impl ResolverIr for TerseRelayResolverIr { fn named_import(&self) -> Option { Some(self.field.name.value) } + + fn source_hash(&self) -> ResolverSourceHash { + self.source_hash + } } impl ResolverTypeDefinitionIr for TerseRelayResolverIr { @@ -709,32 +921,39 @@ impl ResolverTypeDefinitionIr for TerseRelayResolverIr { fn fragment_arguments(&self) -> Option<&Vec> { self.fragment_arguments.as_ref() } + + fn hack_source(&self) -> Option { + self.field.hack_source.clone() + } } #[derive(Debug, Clone, PartialEq)] -pub struct RelayResolverIr { +pub struct LegacyVerboseResolverIr { pub field: FieldDefinitionStub, pub on: On, pub root_fragment: Option>, pub output_type: Option, pub description: Option>, + pub hack_source: Option>, pub deprecated: Option, + pub semantic_non_null: Option, pub live: Option, pub location: Location, pub fragment_arguments: Option>, + pub source_hash: ResolverSourceHash, } -impl ResolverIr for RelayResolverIr { - fn definitions( +impl ResolverIr for LegacyVerboseResolverIr { + fn field_definitions( self, - schema_info: SchemaInfo<'_, '_>, + project_config: ResolverProjectConfig<'_, '_>, ) -> DiagnosticsResult> { - let schema = schema_info.schema; + let schema = project_config.schema; let name = self .field_name() .name_with_location(self.location.source_location()); - if name.item == schema_info.config.node_interface_id_field { + if name.item == project_config.schema_config.node_interface_id_field { return Err(vec![Diagnostic::error( SchemaValidationErrorMessages::ResolversCantImplementId { id_field_name: name.item, @@ -765,8 +984,11 @@ impl ResolverIr for RelayResolverIr { match type_ { Type::Object(object_id) => { let object = schema.object(object_id); - self.validate_singular_implementation(schema_info, &object.interfaces)?; - return Ok(self.object_definitions(object, schema_info)); + self.validate_singular_implementation( + project_config, + &object.interfaces, + )?; + return Ok(self.object_definitions(object, project_config)); } Type::Interface(_) => { return Err(vec![Diagnostic::error_with_data( @@ -793,13 +1015,13 @@ impl ResolverIr for RelayResolverIr { if let Some(_type) = schema.get_type(value.item) { if let Some(interface_type) = _type.get_interface_id() { self.validate_singular_implementation( - schema_info, + project_config, &schema.interface(interface_type).interfaces, )?; return Ok(self.interface_definitions( value.map(InterfaceName), interface_type, - schema_info, + project_config, )); } else if _type.is_object() { return Err(vec![Diagnostic::error_with_data( @@ -824,17 +1046,26 @@ impl ResolverIr for RelayResolverIr { self.location } + fn id_fragment(&self, _schema_config: &SchemaConfig) -> Option { + None + } + + fn root_fragment_name(&self) -> Option> { + self.root_fragment + } + fn root_fragment( &self, object: Option<&Object>, - _: SchemaInfo<'_, '_>, + project_config: ResolverProjectConfig<'_, '_>, ) -> Option { - get_root_fragment_for_object(object).or_else(|| { - self.root_fragment.map(|fragment| RootFragment { + self.root_fragment + .map(|fragment| RootFragment { fragment, + generated: false, inject_fragment_data: None, }) - }) + .or_else(|| get_root_fragment_for_object(project_config.project_name, object)) } fn output_type(&self) -> Option { @@ -849,12 +1080,20 @@ impl ResolverIr for RelayResolverIr { self.live } + fn semantic_non_null(&self) -> Option { + self.semantic_non_null.clone() + } + fn named_import(&self) -> Option { Some(self.field.name.value) } + + fn source_hash(&self) -> ResolverSourceHash { + self.source_hash + } } -impl ResolverTypeDefinitionIr for RelayResolverIr { +impl ResolverTypeDefinitionIr for LegacyVerboseResolverIr { fn field_name(&self) -> &Identifier { &self.field.name } @@ -870,6 +1109,10 @@ impl ResolverTypeDefinitionIr for RelayResolverIr { fn fragment_arguments(&self) -> Option<&Vec> { self.fragment_arguments.as_ref() } + + fn hack_source(&self) -> Option { + self.hack_source.map(as_string_node) + } } /// Relay Resolver ID representing a "model" of a strong object @@ -884,149 +1127,20 @@ pub struct StrongObjectIr { pub description: Option>, pub deprecated: Option, pub live: Option, + pub semantic_non_null: Option, pub location: Location, /// The interfaces which the newly-created object implements pub implements_interfaces: Vec, + pub source_hash: ResolverSourceHash, } impl StrongObjectIr { - /// Validate that each interface that the StrongObjectIr object implements is client - /// defined and contains an id: ID! field. - /// - /// We are implicitly assuming that the only types that implement this interface are - /// defined in strong resolvers! But, it is possible to implement a client interface - /// for types defined in schema extensions and for server types. This is bad, and we - /// should disallow it. - pub(crate) fn validate_implements_interfaces_against_schema( - &self, - schema: &SDLSchema, - ) -> DiagnosticsResult<()> { - let location = self.rhs_location; - let mut errors = vec![]; - - let id_type = schema - .field(schema.clientid_field()) - .type_ - .inner() - .get_scalar_id() - .expect("Expected __id field to be a scalar"); - let non_null_id_type = - TypeReference::NonNull(Box::new(TypeReference::Named(Type::Scalar(id_type)))); - - for interface in &self.implements_interfaces { - let interface = match schema.get_type(interface.value) { - Some(Type::Interface(id)) => schema.interface(id), - None => { - let suggester = GraphQLSuggestions::new(schema); - errors.push(Diagnostic::error_with_data( - ErrorMessagesWithData::TypeNotFound { - type_name: interface.value, - suggestions: suggester.interface_type_suggestions(interface.value), - }, - location, - )); - continue; - } - Some(t) => { - errors.push( - Diagnostic::error( - SchemaValidationErrorMessages::UnexpectedNonInterface { - non_interface_name: interface.value, - variant_name: t.get_variant_name(), - }, - location, - ) - .annotate_if_location_exists( - "Defined here", - match t { - Type::Enum(enum_id) => schema.enum_(enum_id).name.location, - Type::InputObject(input_object_id) => { - schema.input_object(input_object_id).name.location - } - Type::Object(object_id) => schema.object(object_id).name.location, - Type::Scalar(scalar_id) => schema.scalar(scalar_id).name.location, - Type::Union(union_id) => schema.union(union_id).name.location, - Type::Interface(_) => { - panic!("Just checked this isn't an interface.") - } - }, - ), - ); - continue; - } - }; - - if !interface.is_extension { - errors.push( - Diagnostic::error( - SchemaValidationErrorMessages::UnexpectedServerInterface { - interface_name: interface.name.item, - }, - location, - ) - .annotate_if_location_exists("Defined here", interface.name.location), - ); - } else { - let found_id_field = interface.fields.iter().find_map(|field_id| { - let field = schema.field(*field_id); - if field.name.item == *KEY_RESOLVER_ID_FIELD { - Some(field) - } else { - None - } - }); - match found_id_field { - Some(id_field) => { - if id_field.type_ != non_null_id_type { - let mut invalid_type_string = String::new(); - schema - .write_type_string(&mut invalid_type_string, &id_field.type_) - .expect("Failed to write type to string."); - - errors.push( - Diagnostic::error( - SchemaValidationErrorMessages::InterfaceWithWrongIdField { - interface_name: interface.name.item, - invalid_type_string, - }, - location, - ) - .annotate("Defined here", interface.name.location), - ) - } - } - None => errors.push( - Diagnostic::error( - SchemaValidationErrorMessages::InterfaceWithNoIdField { - interface_name: interface.name.item, - }, - location, - ) - .annotate("Defined here", interface.name.location), - ), - }; - } - } - if errors.is_empty() { - Ok(()) - } else { - Err(errors) - } - } -} - -impl ResolverIr for StrongObjectIr { - fn definitions( - self, - schema_info: SchemaInfo<'_, '_>, - ) -> DiagnosticsResult> { + pub fn type_definition(&self, schema_config: &SchemaConfig) -> TypeSystemDefinition { let span = Span::empty(); - self.validate_implements_interfaces_against_schema(schema_info.schema)?; - let fields = vec![ FieldDefinition { - name: string_key_as_identifier(schema_info.config.node_interface_id_field), + name: string_key_as_identifier(schema_config.node_interface_id_field), type_: TypeAnnotation::NonNull(Box::new(NonNullTypeAnnotation { span, type_: TypeAnnotation::Named(NamedTypeAnnotation { @@ -1037,18 +1151,21 @@ impl ResolverIr for StrongObjectIr { arguments: None, directives: vec![], description: None, + hack_source: None, + span, }, generate_model_instance_field( - schema_info, - *INT_TYPE, + schema_config.unselectable_directive_name, + RESOLVER_VALUE_SCALAR_NAME.0, + None, None, - self.directives(None, schema_info), + self.type_directives(schema_config), self.location(), ), ]; - let type_ = TypeSystemDefinition::ObjectTypeDefinition(ObjectTypeDefinition { + TypeSystemDefinition::ObjectTypeDefinition(ObjectTypeDefinition { name: self.type_name, - interfaces: self.implements_interfaces, + interfaces: self.implements_interfaces.clone(), directives: vec![ConstantDirective { span, at: dummy_token(span), @@ -1056,29 +1173,46 @@ impl ResolverIr for StrongObjectIr { arguments: None, }], fields: Some(List::generated(fields)), - }); + span, + }) + } +} - Ok(vec![type_]) +impl ResolverIr for StrongObjectIr { + fn field_definitions( + self, + _project_config: ResolverProjectConfig<'_, '_>, + ) -> DiagnosticsResult> { + Ok(vec![]) } fn location(&self) -> Location { self.location } - // For Model resolver we always inject the `id` fragment - fn root_fragment( - &self, - _: Option<&Object>, - schema_info: SchemaInfo<'_, '_>, - ) -> Option { + fn root_fragment_name(&self) -> Option> { + Some(self.root_fragment) + } + + fn id_fragment(&self, schema_config: &SchemaConfig) -> Option { Some(RootFragment { fragment: self.root_fragment, + generated: true, inject_fragment_data: Some(FragmentDataInjectionMode::Field( - schema_info.config.node_interface_id_field, + schema_config.node_interface_id_field, )), }) } + // For Model resolver we always inject the `id` fragment + fn root_fragment( + &self, + _: Option<&Object>, + _project_config: ResolverProjectConfig<'_, '_>, + ) -> Option { + None + } + fn output_type(&self) -> Option { None } @@ -1091,9 +1225,17 @@ impl ResolverIr for StrongObjectIr { self.live } + fn semantic_non_null(&self) -> Option { + None + } + fn named_import(&self) -> Option { Some(self.type_name.value) } + + fn source_hash(&self) -> ResolverSourceHash { + self.source_hash + } } /// Relay Resolver docblock representing a "model" type for a weak object @@ -1105,13 +1247,17 @@ pub struct WeakObjectIr { /// It is the location of a longer string, e.g. "Foo implements Bar". pub rhs_location: Location, pub description: Option>, + pub hack_source: Option>, pub deprecated: Option, pub location: Location, + /// The interfaces which the newly-created object implements + pub implements_interfaces: Vec, + pub source_hash: ResolverSourceHash, } impl WeakObjectIr { // Generate the named GraphQL type (with an __relay_model_instance field). - fn type_definition(&self, schema_info: SchemaInfo<'_, '_>) -> TypeSystemDefinition { + pub fn type_definition(&self, schema_config: &SchemaConfig) -> TypeSystemDefinition { let span = self.rhs_location.span(); let mut directives = vec![ @@ -1147,22 +1293,27 @@ impl WeakObjectIr { }), }) } + let type_name = self.model_type_name(); + let source_hash = self.source_hash(); + let location = self.location(); TypeSystemDefinition::ObjectTypeDefinition(ObjectTypeDefinition { name: self.type_name, - interfaces: vec![], + interfaces: self.implements_interfaces.clone(), directives, fields: Some(List::generated(vec![generate_model_instance_field( - schema_info, - self.model_type_name(), + schema_config.unselectable_directive_name, + type_name, self.description.map(as_string_node), - vec![], - self.location(), + self.hack_source.map(as_string_node), + vec![resolver_source_hash_directive(source_hash)], + location, )])), + span, }) } // Generate a custom scalar definition based on the exported type. - fn instance_scalar_type_definition(&self) -> TypeSystemDefinition { + pub fn instance_scalar_type_definition(&self) -> TypeSystemDefinition { let span = self.rhs_location.span(); TypeSystemDefinition::ScalarTypeDefinition(ScalarTypeDefinition { name: Identifier { @@ -1199,6 +1350,7 @@ impl WeakObjectIr { }, ])), }], + span, }) } @@ -1215,13 +1367,13 @@ impl WeakObjectIr { } impl ResolverIr for WeakObjectIr { - fn definitions( + fn field_definitions( self, - schema_info: SchemaInfo<'_, '_>, + project_config: ResolverProjectConfig<'_, '_>, ) -> DiagnosticsResult> { Ok(vec![ self.instance_scalar_type_definition(), - self.type_definition(schema_info), + self.type_definition(project_config.schema_config), ]) } @@ -1229,10 +1381,18 @@ impl ResolverIr for WeakObjectIr { self.location } + fn root_fragment_name(&self) -> Option> { + None + } + + fn id_fragment(&self, _schema_config: &SchemaConfig) -> Option { + None + } + fn root_fragment( &self, _: Option<&Object>, - _schema_info: SchemaInfo<'_, '_>, + _project_config: ResolverProjectConfig<'_, '_>, ) -> Option { None } @@ -1249,9 +1409,17 @@ impl ResolverIr for WeakObjectIr { None } + fn semantic_non_null(&self) -> Option { + None + } + fn named_import(&self) -> Option { None } + + fn source_hash(&self) -> ResolverSourceHash { + self.source_hash + } } fn string_argument(name: StringKey, value: WithLocation) -> ConstantArgument { @@ -1320,7 +1488,10 @@ fn dummy_token(span: Span) -> Token { } } -fn get_root_fragment_for_object(object: Option<&Object>) -> Option { +fn get_root_fragment_for_object( + project_name: ProjectName, + object: Option<&Object>, +) -> Option { if object? .directives .named(*RELAY_RESOLVER_MODEL_DIRECTIVE_NAME) @@ -1328,15 +1499,16 @@ fn get_root_fragment_for_object(object: Option<&Object>) -> Option { Some(RootFragment { fragment: WithLocation::generated(FragmentDefinitionName( - format!( - "{}__{}", - object.unwrap().name.item, - *RESOLVER_MODEL_INSTANCE_FIELD_NAME - ) - .intern(), + project_name + .generate_name_for_object_and_field( + object.unwrap().name.item.0, + *RELAY_RESOLVER_MODEL_INSTANCE_FIELD, + ) + .intern(), )), + generated: true, inject_fragment_data: Some(FragmentDataInjectionMode::Field( - *RESOLVER_MODEL_INSTANCE_FIELD_NAME, + *RELAY_RESOLVER_MODEL_INSTANCE_FIELD, )), }) } else { @@ -1346,9 +1518,10 @@ fn get_root_fragment_for_object(object: Option<&Object>) -> Option /// Generate the internal field for weak and strong model types fn generate_model_instance_field( - schema_info: SchemaInfo<'_, '_>, + unselectable_directive_name: DirectiveName, type_name: StringKey, description: Option, + hack_source: Option, mut directives: Vec, location: Location, ) -> FieldDefinition { @@ -1356,7 +1529,7 @@ fn generate_model_instance_field( directives.push(ConstantDirective { span, at: dummy_token(span), - name: string_key_as_identifier(schema_info.config.unselectable_directive_name.0), + name: string_key_as_identifier(unselectable_directive_name.0), arguments: Some(List::generated(vec![string_argument( DEPRECATED_REASON_ARGUMENT_NAME.0, WithLocation::new( @@ -1367,12 +1540,31 @@ fn generate_model_instance_field( }); FieldDefinition { - name: string_key_as_identifier(*RESOLVER_MODEL_INSTANCE_FIELD_NAME), - type_: TypeAnnotation::Named(NamedTypeAnnotation { - name: string_key_as_identifier(type_name), - }), + name: string_key_as_identifier(*RELAY_RESOLVER_MODEL_INSTANCE_FIELD), + type_: TypeAnnotation::NonNull(Box::new(NonNullTypeAnnotation { + span, + type_: TypeAnnotation::Named(NamedTypeAnnotation { + name: string_key_as_identifier(type_name), + }), + exclamation: dummy_token(span), + })), arguments: None, directives, description, + hack_source, + span, + } +} + +fn resolver_source_hash_directive(source_hash: ResolverSourceHash) -> ConstantDirective { + let span = Span::empty(); + ConstantDirective { + span, + at: dummy_token(span), + name: string_key_as_identifier(RELAY_RESOLVER_SOURCE_HASH.0), + arguments: Some(List::generated(vec![string_argument( + RELAY_RESOLVER_SOURCE_HASH_VALUE.0, + WithLocation::generated(source_hash.value()), + )])), } } diff --git a/compiler/crates/relay-docblock/src/lib.rs b/compiler/crates/relay-docblock/src/lib.rs index 97b59239c7196..48ae22be8c0fe 100644 --- a/compiler/crates/relay-docblock/src/lib.rs +++ b/compiler/crates/relay-docblock/src/lib.rs @@ -9,11 +9,13 @@ mod docblock_ir; mod errors; mod ir; mod untyped_representation; +mod validate_resolver_schema; use common::Diagnostic; use common::DiagnosticsResult; use common::FeatureFlag; use common::Location; +pub use docblock_ir::assert_fragment_definition; use docblock_ir::parse_docblock_ir; use docblock_shared::DEPRECATED_FIELD; use docblock_shared::EDGE_TO_FIELD; @@ -25,31 +27,35 @@ use docblock_shared::ON_TYPE_FIELD; use docblock_shared::OUTPUT_TYPE_FIELD; use docblock_shared::RELAY_RESOLVER_FIELD; use docblock_shared::ROOT_FRAGMENT_FIELD; +use docblock_shared::SEMANTIC_NON_NULL_FIELD; use docblock_shared::WEAK_FIELD; use docblock_syntax::DocblockAST; use graphql_syntax::ExecutableDefinition; use graphql_syntax::TypeSystemDefinition; use intern::Lookup; -pub use ir::DocblockIr; -pub use ir::On; -use ir::RelayResolverIr; +pub use ir::*; +use relay_config::ProjectName; use schema::SDLSchema; use untyped_representation::parse_untyped_docblock_representation; +pub use validate_resolver_schema::validate_resolver_schema; pub struct ParseOptions<'a> { - pub enable_output_type: &'a FeatureFlag, + pub enable_interface_output_type: &'a FeatureFlag, + pub allow_resolver_non_nullable_return_type: &'a FeatureFlag, } pub fn parse_docblock_ast( + project_name: &ProjectName, ast: &DocblockAST, definitions: Option<&Vec>, - parse_options: ParseOptions<'_>, + parse_options: &ParseOptions<'_>, ) -> DiagnosticsResult> { let untyped_representation = parse_untyped_docblock_representation(ast)?; parse_docblock_ir( + project_name, untyped_representation, definitions, - &parse_options, + parse_options, ast.location, ) } @@ -89,7 +95,7 @@ pub fn extend_schema_with_resolver_type_system_definition( schema.add_interface_type_extension(extension, location.source_location())?; } _ => panic!( - "Expected docblocks to only expose object and scalar extensions, and object and interface definitions" + "Expected docblocks to only expose object and scalar definitions, and object and interface extensions." ), }) } diff --git a/compiler/crates/relay-docblock/src/untyped_representation.rs b/compiler/crates/relay-docblock/src/untyped_representation.rs index df4d46c6d811c..bf209a3f9db7a 100644 --- a/compiler/crates/relay-docblock/src/untyped_representation.rs +++ b/compiler/crates/relay-docblock/src/untyped_representation.rs @@ -12,6 +12,7 @@ use std::hash::Hash; use common::Diagnostic; use common::WithLocation; +use docblock_shared::ResolverSourceHash; use docblock_syntax::DocblockAST; use docblock_syntax::DocblockSection; use graphql_ir::reexport::StringKey; @@ -28,10 +29,21 @@ use crate::ON_TYPE_FIELD; use crate::OUTPUT_TYPE_FIELD; use crate::RELAY_RESOLVER_FIELD; use crate::ROOT_FRAGMENT_FIELD; +use crate::SEMANTIC_NON_NULL_FIELD; use crate::WEAK_FIELD; /// All fields which are allowed in RelayResolver docblocks. -#[derive(Clone, Copy, Eq, Debug, PartialEq, Hash, Ord, PartialOrd)] +#[derive( + Clone, + Copy, + Eq, + Debug, + PartialEq, + Hash, + Ord, + PartialOrd, + serde::Serialize +)] pub enum AllowedFieldName { RelayResolverField, FieldNameField, @@ -43,6 +55,7 @@ pub enum AllowedFieldName { LiveField, OutputTypeField, WeakField, + SemanticNonNullField, } impl Display for AllowedFieldName { @@ -58,6 +71,7 @@ impl Display for AllowedFieldName { AllowedFieldName::LiveField => write!(f, "{}", *LIVE_FIELD), AllowedFieldName::OutputTypeField => write!(f, "{}", *OUTPUT_TYPE_FIELD), AllowedFieldName::WeakField => write!(f, "{}", *WEAK_FIELD), + AllowedFieldName::SemanticNonNullField => write!(f, "{}", *SEMANTIC_NON_NULL_FIELD), } } } @@ -75,6 +89,9 @@ impl TryFrom> for AllowedFieldName { value if value == *EDGE_TO_FIELD => Ok(AllowedFieldName::EdgeToField), value if value == *DEPRECATED_FIELD => Ok(AllowedFieldName::DeprecatedField), value if value == *LIVE_FIELD => Ok(AllowedFieldName::LiveField), + value if value == *SEMANTIC_NON_NULL_FIELD => { + Ok(AllowedFieldName::SemanticNonNullField) + } value if value == *OUTPUT_TYPE_FIELD => Ok(AllowedFieldName::OutputTypeField), value if value == *WEAK_FIELD => Ok(AllowedFieldName::WeakField), invalid_value => Err(Diagnostic::error( @@ -94,13 +111,15 @@ impl TryFrom> for AllowedFieldName { pub(crate) struct UntypedDocblockRepresentation { pub(crate) description: Option>, pub(crate) fields: HashMap, + pub(crate) source_hash: ResolverSourceHash, } impl UntypedDocblockRepresentation { - fn new() -> Self { + fn new(source_hash: ResolverSourceHash) -> Self { Self { description: None, fields: HashMap::new(), + source_hash, } } } @@ -108,7 +127,8 @@ impl UntypedDocblockRepresentation { pub(crate) fn parse_untyped_docblock_representation( ast: &DocblockAST, ) -> Result> { - let mut untyped_repr: UntypedDocblockRepresentation = UntypedDocblockRepresentation::new(); + let mut untyped_repr: UntypedDocblockRepresentation = + UntypedDocblockRepresentation::new(ast.source_hash); let mut errors = vec![]; for section in &ast.sections { match section { diff --git a/compiler/crates/relay-docblock/src/validate_resolver_schema.rs b/compiler/crates/relay-docblock/src/validate_resolver_schema.rs new file mode 100644 index 0000000000000..9c6ed78ddbb82 --- /dev/null +++ b/compiler/crates/relay-docblock/src/validate_resolver_schema.rs @@ -0,0 +1,210 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::Diagnostic; +use common::DiagnosticsResult; +use common::FeatureFlags; +use common::NamedItem; +use docblock_shared::KEY_RESOLVER_ID_FIELD; +use docblock_shared::RELAY_RESOLVER_DIRECTIVE_NAME; +use docblock_shared::RELAY_RESOLVER_MODEL_DIRECTIVE_NAME; +use docblock_shared::RELAY_RESOLVER_WEAK_OBJECT_DIRECTIVE; +use errors::try2; +use errors::try_all; +use schema::Object; +use schema::SDLSchema; +use schema::Schema; +use schema::Type; +use schema::TypeReference; + +use crate::errors::SchemaValidationErrorMessages; + +pub fn validate_resolver_schema( + schema: &SDLSchema, + feature_flags: &FeatureFlags, +) -> DiagnosticsResult<()> { + try2( + validate_strong_resolver_types(schema), + validate_mutation_resolvers(schema, feature_flags.enable_relay_resolver_mutations), + )?; + + Ok(()) +} + +fn validate_strong_resolver_types(schema: &SDLSchema) -> DiagnosticsResult<()> { + try_all( + schema + .objects() + .filter(|obj| object_is_strong_model_type(obj)) + .map(|strong_model_object| { + validate_strong_object_implements_client_interface_with_id_field( + strong_model_object, + schema, + ) + }), + )?; + Ok(()) +} + +fn object_is_strong_model_type(object: &Object) -> bool { + if !object.is_extension { + return false; + } + + object + .directives + .named(*RELAY_RESOLVER_MODEL_DIRECTIVE_NAME) + .is_some() + && object + .directives + .named(*RELAY_RESOLVER_WEAK_OBJECT_DIRECTIVE) + .is_none() +} + +fn validate_mutation_resolvers( + schema: &SDLSchema, + mutation_resolvers_enabled: bool, +) -> DiagnosticsResult<()> { + if let Some(mutation_type) = schema.mutation_type() { + let mutation = match mutation_type { + Type::Object(object_id) => schema.object(object_id), + _ => { + // Someone else will report this error + return Ok(()); + } + }; + + try_all( + mutation + .fields + .iter() + .map(|field_id| -> DiagnosticsResult<()> { + let field = schema.field(*field_id); + if field + .directives + .named(*RELAY_RESOLVER_DIRECTIVE_NAME) + .is_none() + { + return Ok(()); + } + + if !mutation_resolvers_enabled { + return DiagnosticsResult::Err(vec![Diagnostic::error( + SchemaValidationErrorMessages::DisallowedMutationResolvers { + mutation_type_name: mutation.name.item.0, + }, + field.name.location, + )]); + } + let field_type = &field.type_; + if !is_valid_mutation_resolver_return_type(field_type) { + return DiagnosticsResult::Err(vec![Diagnostic::error( + SchemaValidationErrorMessages::MutationResolverNonScalarReturn { + resolver_field_name: field.name.item, + actual_return_type: schema.get_type_name(field_type.inner()), + }, + field.name.location, + )]); + } + Ok(()) + }), + )?; + }; + + Ok(()) +} + +fn is_valid_mutation_resolver_return_type(type_: &TypeReference) -> bool { + match type_ { + TypeReference::Named(named_type) => named_type.is_scalar() || named_type.is_enum(), + TypeReference::List(_) => false, + TypeReference::NonNull(non_null_type) => { + // note: this should be unreachable since we already disallow relay resolvers to return non-nullable types + // - implement this anyway in case that changes in the future + return is_valid_mutation_resolver_return_type(non_null_type.as_ref()); + } + } +} + +/// Validate that each interface that the strong object implements is client +/// defined and contains an id: ID! field. +fn validate_strong_object_implements_client_interface_with_id_field( + object: &Object, + schema: &SDLSchema, +) -> DiagnosticsResult<()> { + let location = object.name.location; + let mut errors = vec![]; + + let id_type = schema + .field(schema.clientid_field()) + .type_ + .inner() + .get_scalar_id() + .expect("Expected __id field to be a scalar"); + let non_null_id_type = + TypeReference::NonNull(Box::new(TypeReference::Named(Type::Scalar(id_type)))); + + for interface in &object.interfaces { + let interface = schema.interface(*interface); + + if !interface.is_extension { + errors.push( + Diagnostic::error( + SchemaValidationErrorMessages::UnexpectedServerInterface { + interface_name: interface.name.item, + }, + location, + ) + .annotate_if_location_exists("Defined here", interface.name.location), + ); + } else { + let found_id_field = interface.fields.iter().find_map(|field_id| { + let field = schema.field(*field_id); + if field.name.item == *KEY_RESOLVER_ID_FIELD { + Some(field) + } else { + None + } + }); + match found_id_field { + Some(id_field) => { + if id_field.type_ != non_null_id_type { + let mut invalid_type_string = String::new(); + schema + .write_type_string(&mut invalid_type_string, &id_field.type_) + .expect("Failed to write type to string."); + + errors.push( + Diagnostic::error( + SchemaValidationErrorMessages::InterfaceWithWrongIdField { + interface_name: interface.name.item, + invalid_type_string, + }, + id_field.name.location, + ) + .annotate("required because the interface is implemented by a Relay Resolver type here", object.name.location), + ) + } + } + None => errors.push( + Diagnostic::error( + SchemaValidationErrorMessages::InterfaceWithNoIdField { + interface_name: interface.name.item, + }, + interface.name.location, + ) + .annotate("required because the interface is implemented by a Relay Resolver type here", object.name.location), + ), + }; + } + } + if errors.is_empty() { + Ok(()) + } else { + Err(errors) + } +} diff --git a/compiler/crates/relay-docblock/tests/parse.rs b/compiler/crates/relay-docblock/tests/parse.rs new file mode 100644 index 0000000000000..df04c5a0f0cd1 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse.rs @@ -0,0 +1,116 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::Diagnostic; +use common::FeatureFlag; +use common::SourceLocationKey; +use docblock_syntax::parse_docblock; +use extract_graphql::JavaScriptSourceFeature; +use fixture_tests::Fixture; +use graphql_cli::DiagnosticPrinter; +use graphql_syntax::parse_executable; +use graphql_syntax::ExecutableDefinition; +use intern::string_key::Intern; +use relay_config::ProjectName; +use relay_docblock::parse_docblock_ast; +use relay_docblock::ParseOptions; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let js_features = extract_graphql::extract(fixture.content); + let executable_documents = js_features + .iter() + .enumerate() + .filter_map(|(i, source)| match source { + JavaScriptSourceFeature::GraphQL(source) => Some( + parse_executable( + &source.text_source().text, + SourceLocationKey::Embedded { + path: format!("/path/to/test/fixture/{}", fixture.file_name).intern(), + index: i as u16, + }, + ) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) + .map(|document| document.definitions), + ), + JavaScriptSourceFeature::Docblock(_) => None, + }) + .collect::, String>>()? + .iter() + .flatten() + .cloned() + .collect::>(); + + let irs = js_features + .iter() + .enumerate() + .filter_map(|(i, source)| match source { + JavaScriptSourceFeature::GraphQL(_) => None, + JavaScriptSourceFeature::Docblock(docblock_source) => Some( + parse_docblock( + &docblock_source.text_source().text, + SourceLocationKey::Embedded { + path: format!("/path/to/test/fixture/{}", fixture.file_name).intern(), + index: i as u16, + }, + ) + .and_then(|ast| { + parse_docblock_ast( + &ProjectName::default(), + &ast, + Some(&executable_documents), + &ParseOptions { + enable_interface_output_type: if fixture + .content + .contains("// relay:enable_interface_output_type") + { + &FeatureFlag::Enabled + } else { + &FeatureFlag::Disabled + }, + allow_resolver_non_nullable_return_type: if fixture + .content + .contains("// relay:allow_resolver_non_nullable_return_type") + { + &FeatureFlag::Enabled + } else { + &FeatureFlag::Disabled + }, + }, + ) + }) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)), + ), + }) + .collect::, String>>()?; + + let output = irs + .iter() + .flatten() + .map(|ir| format!("{:#?}", ir)) + .collect::>() + .join("\n\n"); + + Ok(output) +} + +pub fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { + let printer = DiagnosticPrinter::new(|source_location| match source_location { + SourceLocationKey::Embedded { index, .. } => Some( + extract_graphql::extract(source)[index as usize] + .text_source() + .clone(), + ), + SourceLocationKey::Standalone { .. } => None, + SourceLocationKey::Generated => None, + }); + let mut printed = diagnostics + .iter() + .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) + .collect::>(); + printed.sort(); + printed.join("\n\n") +} diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null-plural-item.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null-plural-item.invalid.expected index 9cd364d6e06d8..a33415e47836d 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null-plural-item.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null-plural-item.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver @@ -30,8 +31,8 @@ graphql` ==================================== ERROR ==================================== ✖︎ Unexpected non-nullable item in list type given in `@edgeTo`. - /path/to/test/fixture/edge-to-non-null-plural-item.invalid.js:15:12 - 14 │ * @fieldName favorite_page - 15 │ * @edgeTo [Page!] + /path/to/test/fixture/edge-to-non-null-plural-item.invalid.js:16:12 + 15 │ * @fieldName favorite_page + 16 │ * @edgeTo [Page!] │ ^^^^^^^ - 16 │ * @rootFragment myRootFragment + 17 │ * @rootFragment myRootFragment diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null-plural-item.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null-plural-item.invalid.js index 08f87bc68f426..d2a14c9acf5df 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null-plural-item.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null-plural-item.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null.invalid.expected index ac7e711deb006..0dc6429496979 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver @@ -30,8 +31,8 @@ graphql` ==================================== ERROR ==================================== ✖︎ Unexpected non-nullable type given in `@edgeTo`. - /path/to/test/fixture/edge-to-non-null.invalid.js:15:12 - 14 │ * @fieldName favorite_page - 15 │ * @edgeTo Page! + /path/to/test/fixture/edge-to-non-null.invalid.js:16:12 + 15 │ * @fieldName favorite_page + 16 │ * @edgeTo Page! │ ^^^^^ - 16 │ * @rootFragment myRootFragment + 17 │ * @rootFragment myRootFragment diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null.invalid.js index 4a8a746a064f3..5b6e1b058ff4e 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-non-null.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-not-identifier.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-not-identifier.invalid.expected index fb3c06696a444..cbd7cb8c970e4 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-not-identifier.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-not-identifier.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver @@ -30,35 +31,35 @@ graphql` ==================================== ERROR ==================================== ✖︎ Unsupported character - /path/to/test/fixture/edge-to-not-identifier.invalid.js:15:12 - 14 │ * @fieldName favorite_page - 15 │ * @edgeTo **LOL** + /path/to/test/fixture/edge-to-not-identifier.invalid.js:16:12 + 15 │ * @fieldName favorite_page + 16 │ * @edgeTo **LOL** │ ^ - 16 │ * @rootFragment myRootFragment + 17 │ * @rootFragment myRootFragment ✖︎ Unsupported character - /path/to/test/fixture/edge-to-not-identifier.invalid.js:15:13 - 14 │ * @fieldName favorite_page - 15 │ * @edgeTo **LOL** + /path/to/test/fixture/edge-to-not-identifier.invalid.js:16:13 + 15 │ * @fieldName favorite_page + 16 │ * @edgeTo **LOL** │ ^ - 16 │ * @rootFragment myRootFragment + 17 │ * @rootFragment myRootFragment ✖︎ Unsupported character - /path/to/test/fixture/edge-to-not-identifier.invalid.js:15:17 - 14 │ * @fieldName favorite_page - 15 │ * @edgeTo **LOL** + /path/to/test/fixture/edge-to-not-identifier.invalid.js:16:17 + 15 │ * @fieldName favorite_page + 16 │ * @edgeTo **LOL** │ ^ - 16 │ * @rootFragment myRootFragment + 17 │ * @rootFragment myRootFragment ✖︎ Unsupported character - /path/to/test/fixture/edge-to-not-identifier.invalid.js:15:18 - 14 │ * @fieldName favorite_page - 15 │ * @edgeTo **LOL** + /path/to/test/fixture/edge-to-not-identifier.invalid.js:16:18 + 15 │ * @fieldName favorite_page + 16 │ * @edgeTo **LOL** │ ^ - 16 │ * @rootFragment myRootFragment + 17 │ * @rootFragment myRootFragment diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-not-identifier.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-not-identifier.invalid.js index 2693a1766bc96..876c0bf197929 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-not-identifier.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/edge-to-not-identifier.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-edge-to-invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-edge-to-invalid.expected index 16c200c770e0a..386533c78daee 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-edge-to-invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-edge-to-invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver @@ -31,8 +32,8 @@ graphql` ==================================== ERROR ==================================== ✖︎ Unexpected `@outputType`. The deprecated `@outputType` option is not enabled for the field `favorite_page`. - /path/to/test/fixture/output-type-edge-to-invalid.js:15:5 - 14 │ * @fieldName favorite_page - 15 │ * @outputType ClientPage + /path/to/test/fixture/output-type-edge-to-invalid.js:16:5 + 15 │ * @fieldName favorite_page + 16 │ * @outputType ClientPage │ ^^^^^^^^^^ - 16 │ * @edgeTo Page + 17 │ * @edgeTo Page diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-edge-to-invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-edge-to-invalid.js index 55910e8ed77c7..aa9c6ab64fb16 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-edge-to-invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-edge-to-invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-invalid-type.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-invalid-type.expected index 9584fbeb282a2..3df0fd6cd64ea 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-invalid-type.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-invalid-type.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver @@ -30,8 +31,8 @@ graphql` ==================================== ERROR ==================================== ✖︎ Unexpected `@outputType`. The deprecated `@outputType` option is not enabled for the field `favorite_page`. - /path/to/test/fixture/output-type-invalid-type.js:15:5 - 14 │ * @fieldName favorite_page - 15 │ * @outputType **LOL** + /path/to/test/fixture/output-type-invalid-type.js:16:5 + 15 │ * @fieldName favorite_page + 16 │ * @outputType **LOL** │ ^^^^^^^^^^ - 16 │ * @rootFragment myRootFragment + 17 │ * @rootFragment myRootFragment diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-invalid-type.js b/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-invalid-type.js index f394b4cc19a30..c2fdc34a8d2d7 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-invalid-type.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/output-type-invalid-type.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated-no-description.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated-no-description.expected index dd91f28cb6b5e..dbe97f4120c91 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated-no-description.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated-no-description.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -22,65 +24,72 @@ graphql` } ` ==================================== OUTPUT =================================== -RelayResolver( - RelayResolverIr { - field: FieldDefinitionStub { - name: Identifier { - span: 53:66, - token: Token { +Field( + LegacyVerboseResolver( + LegacyVerboseResolverIr { + field: FieldDefinitionStub { + name: Identifier { span: 53:66, - kind: Identifier, + token: Token { + span: 53:66, + kind: Identifier, + }, + value: "favorite_page", }, - value: "favorite_page", + arguments: None, }, - arguments: None, - }, - on: Type( - PopulatedIrField { - key_location: /path/to/test/fixture/relay-resolver-deprecated-no-description.js:27:33, - value: WithLocation { - location: /path/to/test/fixture/relay-resolver-deprecated-no-description.js:34:38, - item: "User", + on: Type( + PopulatedIrField { + key_location: /path/to/test/fixture/relay-resolver-deprecated-no-description.js:27:33, + value: WithLocation { + location: /path/to/test/fixture/relay-resolver-deprecated-no-description.js:34:38, + item: "User", + }, }, - }, - ), - root_fragment: Some( - WithLocation { - location: /path/to/test/fixture/relay-resolver-deprecated-no-description.js:100:114, - item: FragmentDefinitionName( - "myRootFragment", - ), - }, - ), - output_type: Some( - EdgeTo( + ), + root_fragment: Some( WithLocation { - location: /path/to/test/fixture/relay-resolver-deprecated-no-description.js:78:82, - item: Named( - NamedTypeAnnotation { - name: Identifier { - span: 78:82, - token: Token { + location: /path/to/test/fixture/relay-resolver-deprecated-no-description.js:100:114, + item: FragmentDefinitionName( + "myRootFragment", + ), + }, + ), + output_type: Some( + EdgeTo( + WithLocation { + location: /path/to/test/fixture/relay-resolver-deprecated-no-description.js:78:82, + item: Named( + NamedTypeAnnotation { + name: Identifier { span: 78:82, - kind: Identifier, + token: Token { + span: 78:82, + kind: Identifier, + }, + value: "Page", }, - value: "Page", }, - }, - ), - }, + ), + }, + ), ), - ), - description: None, - deprecated: Some( - UnpopulatedIrField( - UnpopulatedIrField { - key_location: /path/to/test/fixture/relay-resolver-deprecated-no-description.js:119:129, - }, + description: None, + hack_source: None, + deprecated: Some( + UnpopulatedIrField( + UnpopulatedIrField { + key_location: /path/to/test/fixture/relay-resolver-deprecated-no-description.js:119:129, + }, + ), ), - ), - live: None, - location: /path/to/test/fixture/relay-resolver-deprecated-no-description.js:0:130, - fragment_arguments: None, - }, + semantic_non_null: None, + live: None, + location: /path/to/test/fixture/relay-resolver-deprecated-no-description.js:0:130, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "eb46fd9d0828f318bc5613bca9e67e42", + ), + }, + ), ) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated-no-description.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated-no-description.js index b1bcec0a6e2b8..9bcc55f45a9c2 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated-no-description.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated-no-description.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated.expected index 82c8597ab7e3b..ef46f5176bbdf 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -22,69 +24,76 @@ graphql` } ` ==================================== OUTPUT =================================== -RelayResolver( - RelayResolverIr { - field: FieldDefinitionStub { - name: Identifier { - span: 53:66, - token: Token { +Field( + LegacyVerboseResolver( + LegacyVerboseResolverIr { + field: FieldDefinitionStub { + name: Identifier { span: 53:66, - kind: Identifier, + token: Token { + span: 53:66, + kind: Identifier, + }, + value: "favorite_page", }, - value: "favorite_page", + arguments: None, }, - arguments: None, - }, - on: Type( - PopulatedIrField { - key_location: /path/to/test/fixture/relay-resolver-deprecated.js:27:33, - value: WithLocation { - location: /path/to/test/fixture/relay-resolver-deprecated.js:34:38, - item: "User", + on: Type( + PopulatedIrField { + key_location: /path/to/test/fixture/relay-resolver-deprecated.js:27:33, + value: WithLocation { + location: /path/to/test/fixture/relay-resolver-deprecated.js:34:38, + item: "User", + }, }, - }, - ), - root_fragment: Some( - WithLocation { - location: /path/to/test/fixture/relay-resolver-deprecated.js:100:114, - item: FragmentDefinitionName( - "myRootFragment", - ), - }, - ), - output_type: Some( - EdgeTo( + ), + root_fragment: Some( WithLocation { - location: /path/to/test/fixture/relay-resolver-deprecated.js:78:82, - item: Named( - NamedTypeAnnotation { - name: Identifier { - span: 78:82, - token: Token { + location: /path/to/test/fixture/relay-resolver-deprecated.js:100:114, + item: FragmentDefinitionName( + "myRootFragment", + ), + }, + ), + output_type: Some( + EdgeTo( + WithLocation { + location: /path/to/test/fixture/relay-resolver-deprecated.js:78:82, + item: Named( + NamedTypeAnnotation { + name: Identifier { span: 78:82, - kind: Identifier, + token: Token { + span: 78:82, + kind: Identifier, + }, + value: "Page", }, - value: "Page", }, - }, - ), - }, + ), + }, + ), ), - ), - description: None, - deprecated: Some( - PopulatedIrField( - PopulatedIrField { - key_location: /path/to/test/fixture/relay-resolver-deprecated.js:119:129, - value: WithLocation { - location: /path/to/test/fixture/relay-resolver-deprecated.js:130:159, - item: "This one is not used any more", + description: None, + hack_source: None, + deprecated: Some( + PopulatedIrField( + PopulatedIrField { + key_location: /path/to/test/fixture/relay-resolver-deprecated.js:119:129, + value: WithLocation { + location: /path/to/test/fixture/relay-resolver-deprecated.js:130:159, + item: "This one is not used any more", + }, }, - }, + ), + ), + semantic_non_null: None, + live: None, + location: /path/to/test/fixture/relay-resolver-deprecated.js:0:160, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "01183f3ee51f5cb85eec42335548ae0f", ), - ), - live: None, - location: /path/to/test/fixture/relay-resolver-deprecated.js:0:160, - fragment_arguments: None, - }, + }, + ), ) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated.js index 3dc6cfabb9a5a..3fc5fc9b6a103 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-deprecated.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-invalid-field.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-invalid-field.invalid.expected index b0972b372c5be..e6b91e7d776a7 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-invalid-field.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-invalid-field.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver @@ -25,8 +26,8 @@ graphql` ==================================== ERROR ==================================== ✖︎ Unexpected docblock field `@notValid` - /path/to/test/fixture/relay-resolver-invalid-field.invalid.js:16:5 - 15 │ * @rootFragment myRootFragment - 16 │ * @notValid myRootFragment + /path/to/test/fixture/relay-resolver-invalid-field.invalid.js:17:5 + 16 │ * @rootFragment myRootFragment + 17 │ * @notValid myRootFragment │ ^^^^^^^^ - 17 │ + 18 │ diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-invalid-field.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-invalid-field.invalid.js index f63ed662ba59e..245d748ab5776 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-invalid-field.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-invalid-field.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live-with-text.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live-with-text.invalid.expected index 71900cc360586..39d36ec2fe447 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live-with-text.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live-with-text.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver @@ -26,8 +27,8 @@ graphql` ==================================== ERROR ==================================== ✖︎ The `@RelayResolver` field `@live` does not accept data. Remove everything after `@live`. - /path/to/test/fixture/relay-resolver-live-with-text.invalid.js:17:5 - 16 │ * @rootFragment myRootFragment - 17 │ * @live in the moment + /path/to/test/fixture/relay-resolver-live-with-text.invalid.js:18:5 + 17 │ * @rootFragment myRootFragment + 18 │ * @live in the moment │ ^^^^ - 18 │ + 19 │ diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live-with-text.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live-with-text.invalid.js index d92f50bc3954e..435e84def590b 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live-with-text.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live-with-text.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live.expected index 735edd35ae2f8..0e9e80df3477e 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -22,63 +24,70 @@ graphql` } ` ==================================== OUTPUT =================================== -RelayResolver( - RelayResolverIr { - field: FieldDefinitionStub { - name: Identifier { - span: 53:66, - token: Token { +Field( + LegacyVerboseResolver( + LegacyVerboseResolverIr { + field: FieldDefinitionStub { + name: Identifier { span: 53:66, - kind: Identifier, + token: Token { + span: 53:66, + kind: Identifier, + }, + value: "favorite_page", }, - value: "favorite_page", + arguments: None, }, - arguments: None, - }, - on: Type( - PopulatedIrField { - key_location: /path/to/test/fixture/relay-resolver-live.js:27:33, - value: WithLocation { - location: /path/to/test/fixture/relay-resolver-live.js:34:38, - item: "User", + on: Type( + PopulatedIrField { + key_location: /path/to/test/fixture/relay-resolver-live.js:27:33, + value: WithLocation { + location: /path/to/test/fixture/relay-resolver-live.js:34:38, + item: "User", + }, }, - }, - ), - root_fragment: Some( - WithLocation { - location: /path/to/test/fixture/relay-resolver-live.js:100:114, - item: FragmentDefinitionName( - "myRootFragment", - ), - }, - ), - output_type: Some( - EdgeTo( + ), + root_fragment: Some( WithLocation { - location: /path/to/test/fixture/relay-resolver-live.js:78:82, - item: Named( - NamedTypeAnnotation { - name: Identifier { - span: 78:82, - token: Token { + location: /path/to/test/fixture/relay-resolver-live.js:100:114, + item: FragmentDefinitionName( + "myRootFragment", + ), + }, + ), + output_type: Some( + EdgeTo( + WithLocation { + location: /path/to/test/fixture/relay-resolver-live.js:78:82, + item: Named( + NamedTypeAnnotation { + name: Identifier { span: 78:82, - kind: Identifier, + token: Token { + span: 78:82, + kind: Identifier, + }, + value: "Page", }, - value: "Page", }, - }, - ), + ), + }, + ), + ), + description: None, + hack_source: None, + deprecated: None, + semantic_non_null: None, + live: Some( + UnpopulatedIrField { + key_location: /path/to/test/fixture/relay-resolver-live.js:119:123, }, ), - ), - description: None, - deprecated: None, - live: Some( - UnpopulatedIrField { - key_location: /path/to/test/fixture/relay-resolver-live.js:119:123, - }, - ), - location: /path/to/test/fixture/relay-resolver-live.js:0:124, - fragment_arguments: None, - }, + location: /path/to/test/fixture/relay-resolver-live.js:0:124, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "e77e16ef96fd749a5894db7bcb685060", + ), + }, + ), ) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live.js index 598ee5de40351..4685e27732597 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-live.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-named-export.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-named-export.expected index b9bc8692d7d03..692f20af18925 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-named-export.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-named-export.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -25,46 +27,53 @@ graphql` } ` ==================================== OUTPUT =================================== -RelayResolver( - RelayResolverIr { - field: FieldDefinitionStub { - name: Identifier { - span: 53:66, - token: Token { +Field( + LegacyVerboseResolver( + LegacyVerboseResolverIr { + field: FieldDefinitionStub { + name: Identifier { span: 53:66, - kind: Identifier, + token: Token { + span: 53:66, + kind: Identifier, + }, + value: "favorite_page", }, - value: "favorite_page", + arguments: None, }, - arguments: None, - }, - on: Type( - PopulatedIrField { - key_location: /path/to/test/fixture/relay-resolver-named-export.js:27:33, - value: WithLocation { - location: /path/to/test/fixture/relay-resolver-named-export.js:34:38, - item: "User", + on: Type( + PopulatedIrField { + key_location: /path/to/test/fixture/relay-resolver-named-export.js:27:33, + value: WithLocation { + location: /path/to/test/fixture/relay-resolver-named-export.js:34:38, + item: "User", + }, }, - }, - ), - root_fragment: Some( - WithLocation { - location: /path/to/test/fixture/relay-resolver-named-export.js:84:98, - item: FragmentDefinitionName( - "myRootFragment", - ), - }, - ), - output_type: None, - description: Some( - WithLocation { - location: /path/to/test/fixture/relay-resolver-named-export.js:101:392, - item: "\nThe user's favorite page! They probably clicked something in the UI\nto tell us that it was their favorite page and then we put that in a\ndatabase or something. Then we got that info out again and put it out\nagain. Anyway, I'm rambling now. Its a page that the user likes. A lot.", - }, - ), - deprecated: None, - live: None, - location: /path/to/test/fixture/relay-resolver-named-export.js:0:393, - fragment_arguments: None, - }, + ), + root_fragment: Some( + WithLocation { + location: /path/to/test/fixture/relay-resolver-named-export.js:84:98, + item: FragmentDefinitionName( + "myRootFragment", + ), + }, + ), + output_type: None, + description: Some( + WithLocation { + location: /path/to/test/fixture/relay-resolver-named-export.js:101:392, + item: "\nThe user's favorite page! They probably clicked something in the UI\nto tell us that it was their favorite page and then we put that in a\ndatabase or something. Then we got that info out again and put it out\nagain. Anyway, I'm rambling now. Its a page that the user likes. A lot.", + }, + ), + hack_source: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: /path/to/test/fixture/relay-resolver-named-export.js:0:393, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "0a13488b7b321b8d7770f691a668ba44", + ), + }, + ), ) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-named-export.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-named-export.js index 60a25f38e5207..34026ec50e5cd 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-named-export.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-named-export.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-mismatch.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-mismatch.invalid.expected index 8f0aa18ac7584..c397549c40664 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-mismatch.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-mismatch.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver @@ -25,16 +26,16 @@ graphql` ==================================== ERROR ==================================== ✖︎ The type specified in the fragment (`User`) and the type specified in `@onInterface` (`Node`) are different. Please make sure these are exactly the same. - /path/to/test/fixture/relay-resolver-on-interface-mismatch.invalid.js:13:17 - 12 │ * - 13 │ * @onInterface Node + /path/to/test/fixture/relay-resolver-on-interface-mismatch.invalid.js:14:17 + 13 │ * + 14 │ * @onInterface Node │ ^^^^ - 14 │ * @fieldName favorite_page + 15 │ * @fieldName favorite_page ℹ︎ with fragment type condition - /path/to/test/fixture/relay-resolver-on-interface-mismatch.invalid.js:20:27 - 19 │ - 20 │ fragment myRootFragment on User { + /path/to/test/fixture/relay-resolver-on-interface-mismatch.invalid.js:21:27 + 20 │ + 21 │ fragment myRootFragment on User { │ ^^^^^^^ - 21 │ name + 22 │ name diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-mismatch.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-mismatch.invalid.js index 58990b2dd252a..224e2078fcfb9 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-mismatch.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-mismatch.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-no-value.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-no-value.invalid.expected index 2031e632a55fb..2c828765379e0 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-no-value.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-no-value.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver @@ -25,8 +26,8 @@ graphql` ==================================== ERROR ==================================== ✖︎ Expected docblock field `@onInterface` to have specified a value. - /path/to/test/fixture/relay-resolver-on-interface-no-value.invalid.js:13:5 - 12 │ * - 13 │ * @onInterface + /path/to/test/fixture/relay-resolver-on-interface-no-value.invalid.js:14:5 + 13 │ * + 14 │ * @onInterface │ ^^^^^^^^^^^ - 14 │ * @fieldName favorite_page + 15 │ * @fieldName favorite_page diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-no-value.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-no-value.invalid.js index 472c374cc7370..5fb2e84d933fb 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-no-value.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-interface-no-value.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-and-on-interface.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-and-on-interface.invalid.expected index a9e6726f8527f..114a5dbae172f 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-and-on-interface.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-and-on-interface.invalid.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + // expected-to-throw /** @@ -26,16 +28,16 @@ graphql` ==================================== ERROR ==================================== ✖︎ Unexpected `@onType` and `@onInterface`. Only one of these docblock fields should be defined on a given `@RelayResolver`. - /path/to/test/fixture/relay-resolver-on-type-and-on-interface.invalid.js:13:5 - 12 │ * - 13 │ * @onType User + /path/to/test/fixture/relay-resolver-on-type-and-on-interface.invalid.js:15:5 + 14 │ * + 15 │ * @onType User │ ^^^^^^ - 14 │ * @onInterface Node + 16 │ * @onInterface Node ℹ︎ @onInterface - /path/to/test/fixture/relay-resolver-on-type-and-on-interface.invalid.js:14:5 - 13 │ * @onType User - 14 │ * @onInterface Node + /path/to/test/fixture/relay-resolver-on-type-and-on-interface.invalid.js:16:5 + 15 │ * @onType User + 16 │ * @onInterface Node │ ^^^^^^^^^^^ - 15 │ * @fieldName favorite_page + 17 │ * @fieldName favorite_page diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-and-on-interface.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-and-on-interface.invalid.js index 89da6eb96048f..331e0cb6c7e2a 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-and-on-interface.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-and-on-interface.invalid.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + // expected-to-throw /** diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-mismatch.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-mismatch.invalid.expected index 2decf0eeef7dc..15c34c0e5274f 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-mismatch.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-mismatch.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver @@ -25,16 +26,16 @@ graphql` ==================================== ERROR ==================================== ✖︎ The type specified in the fragment (`User`) and the type specified in `@onType` (`Query`) are different. Please make sure these are exactly the same. - /path/to/test/fixture/relay-resolver-on-type-mismatch.invalid.js:13:12 - 12 │ * - 13 │ * @onType Query + /path/to/test/fixture/relay-resolver-on-type-mismatch.invalid.js:14:12 + 13 │ * + 14 │ * @onType Query │ ^^^^^ - 14 │ * @fieldName favorite_page + 15 │ * @fieldName favorite_page ℹ︎ with fragment type condition - /path/to/test/fixture/relay-resolver-on-type-mismatch.invalid.js:20:27 - 19 │ - 20 │ fragment myRootFragment on User { + /path/to/test/fixture/relay-resolver-on-type-mismatch.invalid.js:21:27 + 20 │ + 21 │ fragment myRootFragment on User { │ ^^^^^^^ - 21 │ name + 22 │ name diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-mismatch.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-mismatch.invalid.js index b1f14546977c5..87fdc6082c1d4 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-mismatch.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-mismatch.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-no-value.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-no-value.invalid.expected index 78af772c446dd..40eef300f9a2e 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-no-value.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-no-value.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver @@ -25,8 +26,8 @@ graphql` ==================================== ERROR ==================================== ✖︎ Expected docblock field `@onType` to have specified a value. - /path/to/test/fixture/relay-resolver-on-type-no-value.invalid.js:13:5 - 12 │ * - 13 │ * @onType + /path/to/test/fixture/relay-resolver-on-type-no-value.invalid.js:14:5 + 13 │ * + 14 │ * @onType │ ^^^^^^ - 14 │ * @fieldName favorite_page + 15 │ * @fieldName favorite_page diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-no-value.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-no-value.invalid.js index 76f830aec9e93..b17234c51a92c 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-no-value.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-on-type-no-value.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-strong-object-with-implements.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-strong-object-with-implements.expected index 1a450f568b6b4..5a4836f9b094d 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-strong-object-with-implements.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-strong-object-with-implements.expected @@ -10,36 +10,42 @@ * @RelayResolver ClientUser implements Foo */ ==================================== OUTPUT =================================== -StrongObjectResolver( - StrongObjectIr { - type_name: Identifier { - span: 20:30, - token: Token { +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { span: 20:30, - kind: Identifier, - }, - value: "ClientUser", - }, - rhs_location: /path/to/test/fixture/relay-resolver-strong-object-with-implements.js:20:45, - root_fragment: WithLocation { - location: :0:0, - item: FragmentDefinitionName( - "ClientUser__id", - ), - }, - description: None, - deprecated: None, - live: None, - location: /path/to/test/fixture/relay-resolver-strong-object-with-implements.js:0:46, - implements_interfaces: [ - Identifier { - span: 42:45, token: Token { - span: 42:45, + span: 20:30, kind: Identifier, }, - value: "Foo", + value: "ClientUser", }, - ], - }, + rhs_location: /path/to/test/fixture/relay-resolver-strong-object-with-implements.js:20:45, + root_fragment: WithLocation { + location: :0:0, + item: FragmentDefinitionName( + "ClientUser__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: /path/to/test/fixture/relay-resolver-strong-object-with-implements.js:0:46, + implements_interfaces: [ + Identifier { + span: 42:45, + token: Token { + span: 42:45, + kind: Identifier, + }, + value: "Foo", + }, + ], + source_hash: ResolverSourceHash( + "cbb959799c8b4d36262edfc0c4e8ad7f", + ), + }, + ), ) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-strong-object.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-strong-object.expected index 5365e0791f40e..4f1815eeab14f 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-strong-object.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-strong-object.expected @@ -10,27 +10,33 @@ * @RelayResolver ClientUser */ ==================================== OUTPUT =================================== -StrongObjectResolver( - StrongObjectIr { - type_name: Identifier { - span: 20:30, - token: Token { +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { span: 20:30, - kind: Identifier, + token: Token { + span: 20:30, + kind: Identifier, + }, + value: "ClientUser", }, - value: "ClientUser", - }, - rhs_location: /path/to/test/fixture/relay-resolver-strong-object.js:20:30, - root_fragment: WithLocation { - location: :0:0, - item: FragmentDefinitionName( - "ClientUser__id", + rhs_location: /path/to/test/fixture/relay-resolver-strong-object.js:20:30, + root_fragment: WithLocation { + location: :0:0, + item: FragmentDefinitionName( + "ClientUser__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: /path/to/test/fixture/relay-resolver-strong-object.js:0:31, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "b1c8ae1937aed7425f5a87a4762ad83d", ), }, - description: None, - deprecated: None, - live: None, - location: /path/to/test/fixture/relay-resolver-strong-object.js:0:31, - implements_interfaces: [], - }, + ), ) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-weak-object-with-implements.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-weak-object-with-implements.expected new file mode 100644 index 0000000000000..56934f73d4748 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-weak-object-with-implements.expected @@ -0,0 +1,47 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// relay:enable_interface_output_type + +/** + * @RelayResolver ClientUser implements Foo + * @weak + */ +==================================== OUTPUT =================================== +Type( + WeakObjectType( + WeakObjectIr { + type_name: Identifier { + span: 20:30, + token: Token { + span: 20:30, + kind: Identifier, + }, + value: "ClientUser", + }, + rhs_location: /path/to/test/fixture/relay-resolver-weak-object-with-implements.js:20:45, + description: None, + hack_source: None, + deprecated: None, + location: /path/to/test/fixture/relay-resolver-weak-object-with-implements.js:0:55, + implements_interfaces: [ + Identifier { + span: 42:45, + token: Token { + span: 42:45, + kind: Identifier, + }, + value: "Foo", + }, + ], + source_hash: ResolverSourceHash( + "26ae5e285958dda3877b78aaf5a82b65", + ), + }, + ), +) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-weak-object-with-implements.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-weak-object-with-implements.js new file mode 100644 index 0000000000000..7086b3ffa608a --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-weak-object-with-implements.js @@ -0,0 +1,13 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// relay:enable_interface_output_type + +/** + * @RelayResolver ClientUser implements Foo + * @weak + */ diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-default-value.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-default-value.invalid.expected index 3c1dee099a1e9..bdd509eba9f9f 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-default-value.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-default-value.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver @@ -16,8 +17,8 @@ ==================================== ERROR ==================================== ✖︎ Defining arguments with default values for resolver fields is not supported, yet. - /path/to/test/fixture/relay-resolver-with-args-default-value.invalid.js:13:37 - 12 │ * @onType User - 13 │ * @fieldName hello(world: String = "World") + /path/to/test/fixture/relay-resolver-with-args-default-value.invalid.js:14:37 + 13 │ * @onType User + 14 │ * @fieldName hello(world: String = "World") │ ^^^^^^^ - 14 │ + 15 │ diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-default-value.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-default-value.invalid.js index 7075f7be25750..d21c59bc05f22 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-default-value.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-default-value.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-syntax-error.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-syntax-error.invalid.expected index d439e0e9f9382..6057534b8e0bd 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-syntax-error.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-syntax-error.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver @@ -25,8 +26,8 @@ graphql` ==================================== ERROR ==================================== ✖︎ Expected a colon (':') - /path/to/test/fixture/relay-resolver-with-args-syntax-error.invalid.js:14:34 - 13 │ * @onType User - 14 │ * @fieldName greeting(salutation) + /path/to/test/fixture/relay-resolver-with-args-syntax-error.invalid.js:15:34 + 14 │ * @onType User + 15 │ * @fieldName greeting(salutation) │ ^ - 15 │ * @rootFragment myRootFragment + 16 │ * @rootFragment myRootFragment diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-syntax-error.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-syntax-error.invalid.js index ad274ddd328f4..dec048133aea8 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-syntax-error.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args-syntax-error.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args.expected index f1382907480cd..a603fc772eadc 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -25,114 +27,121 @@ graphql` } ` ==================================== OUTPUT =================================== -RelayResolver( - RelayResolverIr { - field: FieldDefinitionStub { - name: Identifier { - span: 53:66, - token: Token { +Field( + LegacyVerboseResolver( + LegacyVerboseResolverIr { + field: FieldDefinitionStub { + name: Identifier { span: 53:66, - kind: Identifier, + token: Token { + span: 53:66, + kind: Identifier, + }, + value: "favorite_page", }, - value: "favorite_page", + arguments: None, }, - arguments: None, - }, - on: Type( - PopulatedIrField { - key_location: /path/to/test/fixture/relay-resolver-with-args.js:27:33, - value: WithLocation { - location: /path/to/test/fixture/relay-resolver-with-args.js:34:38, - item: "User", + on: Type( + PopulatedIrField { + key_location: /path/to/test/fixture/relay-resolver-with-args.js:27:33, + value: WithLocation { + location: /path/to/test/fixture/relay-resolver-with-args.js:34:38, + item: "User", + }, }, - }, - ), - root_fragment: Some( - WithLocation { - location: /path/to/test/fixture/relay-resolver-with-args.js:84:98, - item: FragmentDefinitionName( - "myRootFragment", - ), - }, - ), - output_type: None, - description: Some( - WithLocation { - location: /path/to/test/fixture/relay-resolver-with-args.js:101:392, - item: "\nThe user's favorite page! They probably clicked something in the UI\nto tell us that it was their favorite page and then we put that in a\ndatabase or something. Then we got that info out again and put it out\nagain. Anyway, I'm rambling now. Its a page that the user likes. A lot.", - }, - ), - deprecated: None, - live: None, - location: /path/to/test/fixture/relay-resolver-with-args.js:0:393, - fragment_arguments: Some( - [ - Argument { - name: Identifier { - span: 56:61, - token: Token { + ), + root_fragment: Some( + WithLocation { + location: /path/to/test/fixture/relay-resolver-with-args.js:84:98, + item: FragmentDefinitionName( + "myRootFragment", + ), + }, + ), + output_type: None, + description: Some( + WithLocation { + location: /path/to/test/fixture/relay-resolver-with-args.js:101:392, + item: "\nThe user's favorite page! They probably clicked something in the UI\nto tell us that it was their favorite page and then we put that in a\ndatabase or something. Then we got that info out again and put it out\nagain. Anyway, I'm rambling now. Its a page that the user likes. A lot.", + }, + ), + hack_source: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: /path/to/test/fixture/relay-resolver-with-args.js:0:393, + fragment_arguments: Some( + [ + Argument { + name: Identifier { span: 56:61, - kind: Identifier, + token: Token { + span: 56:61, + kind: Identifier, + }, + value: "first", }, - value: "first", - }, - type_: Named( - NamedTypeAnnotation { - name: Identifier { - span: 0:3, - token: Token { + type_: Named( + NamedTypeAnnotation { + name: Identifier { span: 0:3, - kind: Identifier, + token: Token { + span: 0:3, + kind: Identifier, + }, + value: "Int", }, - value: "Int", }, - }, - ), - default_value: Some( - Int( - IntNode { - token: Token { - span: 91:93, - kind: IntegerLiteral, + ), + default_value: Some( + Int( + IntNode { + token: Token { + span: 91:93, + kind: IntegerLiteral, + }, + value: 10, }, - value: 10, - }, + ), ), - ), - }, - Argument { - name: Identifier { - span: 96:102, - token: Token { + }, + Argument { + name: Identifier { span: 96:102, - kind: Identifier, + token: Token { + span: 96:102, + kind: Identifier, + }, + value: "userID", }, - value: "userID", - }, - type_: NonNull( - NonNullTypeAnnotation { - span: 0:3, - type_: Named( - NamedTypeAnnotation { - name: Identifier { - span: 0:2, - token: Token { + type_: NonNull( + NonNullTypeAnnotation { + span: 0:3, + type_: Named( + NamedTypeAnnotation { + name: Identifier { span: 0:2, - kind: Identifier, + token: Token { + span: 0:2, + kind: Identifier, + }, + value: "ID", }, - value: "ID", }, + ), + exclamation: Token { + span: 2:3, + kind: Exclamation, }, - ), - exclamation: Token { - span: 2:3, - kind: Exclamation, }, - }, - ), - default_value: None, - }, - ], - ), - }, + ), + default_value: None, + }, + ], + ), + source_hash: ResolverSourceHash( + "0a13488b7b321b8d7770f691a668ba44", + ), + }, + ), ) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args.js index bc914b394cc15..1a2ded6a8df1f 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-args.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-conflicting-args.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-conflicting-args.invalid.expected index fd77d0a1ae234..44a71a86f3248 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-conflicting-args.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-conflicting-args.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver @@ -25,16 +26,16 @@ graphql` ==================================== ERROR ==================================== ✖︎ Unexpected conflicting argument name. This field argument - /path/to/test/fixture/relay-resolver-with-conflicting-args.invalid.js:14:24 - 13 │ * @onType User - 14 │ * @fieldName greeting(salutation: String!) + /path/to/test/fixture/relay-resolver-with-conflicting-args.invalid.js:15:24 + 14 │ * @onType User + 15 │ * @fieldName greeting(salutation: String!) │ ^^^^^^^^^^ - 15 │ * @rootFragment myRootFragment + 16 │ * @rootFragment myRootFragment ℹ︎ conflicts with this fragment argument - /path/to/test/fixture/relay-resolver-with-conflicting-args.invalid.js:20:56 - 19 │ - 20 │ fragment myRootFragment on User @argumentDefinitions(salutation: {type: "String!"}) { + /path/to/test/fixture/relay-resolver-with-conflicting-args.invalid.js:21:56 + 20 │ + 21 │ fragment myRootFragment on User @argumentDefinitions(salutation: {type: "String!"}) { │ ^^^^^^^^^^ - 21 │ name + 22 │ name diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-conflicting-args.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-conflicting-args.invalid.js index 5de99a6a38393..cde25a724db64 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-conflicting-args.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-conflicting-args.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-and-fragment-args.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-and-fragment-args.expected index 3ff9dd363ef2e..ecc7f0fade9a9 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-and-fragment-args.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-and-fragment-args.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -21,156 +23,164 @@ graphql` } ` ==================================== OUTPUT =================================== -RelayResolver( - RelayResolverIr { - field: FieldDefinitionStub { - name: Identifier { - span: 53:61, - token: Token { +Field( + LegacyVerboseResolver( + LegacyVerboseResolverIr { + field: FieldDefinitionStub { + name: Identifier { span: 53:61, - kind: Identifier, - }, - value: "greeting", - }, - arguments: Some( - List { - span: 61:82, - start: Token { - span: 61:62, - kind: OpenParen, + token: Token { + span: 53:61, + kind: Identifier, }, - items: [ - InputValueDefinition { - name: Identifier { - span: 62:72, - token: Token { + value: "greeting", + }, + arguments: Some( + List { + span: 61:82, + start: Token { + span: 61:62, + kind: OpenParen, + }, + items: [ + InputValueDefinition { + name: Identifier { span: 62:72, - kind: Identifier, + token: Token { + span: 62:72, + kind: Identifier, + }, + value: "salutation", }, - value: "salutation", - }, - type_: NonNull( - NonNullTypeAnnotation { - span: 74:81, - type_: Named( - NamedTypeAnnotation { - name: Identifier { - span: 74:80, - token: Token { + type_: NonNull( + NonNullTypeAnnotation { + span: 74:81, + type_: Named( + NamedTypeAnnotation { + name: Identifier { span: 74:80, - kind: Identifier, + token: Token { + span: 74:80, + kind: Identifier, + }, + value: "String", }, - value: "String", }, + ), + exclamation: Token { + span: 80:81, + kind: Exclamation, }, - ), - exclamation: Token { - span: 80:81, - kind: Exclamation, }, - }, - ), - default_value: None, - directives: [], + ), + default_value: None, + directives: [], + span: 62:81, + }, + ], + end: Token { + span: 81:82, + kind: CloseParen, }, - ], - end: Token { - span: 81:82, - kind: CloseParen, + }, + ), + }, + on: Type( + PopulatedIrField { + key_location: /path/to/test/fixture/relay-resolver-with-field-and-fragment-args.js:27:33, + value: WithLocation { + location: /path/to/test/fixture/relay-resolver-with-field-and-fragment-args.js:34:38, + item: "User", }, }, ), - }, - on: Type( - PopulatedIrField { - key_location: /path/to/test/fixture/relay-resolver-with-field-and-fragment-args.js:27:33, - value: WithLocation { - location: /path/to/test/fixture/relay-resolver-with-field-and-fragment-args.js:34:38, - item: "User", + root_fragment: Some( + WithLocation { + location: /path/to/test/fixture/relay-resolver-with-field-and-fragment-args.js:100:114, + item: FragmentDefinitionName( + "myRootFragment", + ), }, - }, - ), - root_fragment: Some( - WithLocation { - location: /path/to/test/fixture/relay-resolver-with-field-and-fragment-args.js:100:114, - item: FragmentDefinitionName( - "myRootFragment", - ), - }, - ), - output_type: None, - description: None, - deprecated: None, - live: None, - location: /path/to/test/fixture/relay-resolver-with-field-and-fragment-args.js:0:118, - fragment_arguments: Some( - [ - Argument { - name: Identifier { - span: 56:61, - token: Token { + ), + output_type: None, + description: None, + hack_source: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: /path/to/test/fixture/relay-resolver-with-field-and-fragment-args.js:0:118, + fragment_arguments: Some( + [ + Argument { + name: Identifier { span: 56:61, - kind: Identifier, + token: Token { + span: 56:61, + kind: Identifier, + }, + value: "first", }, - value: "first", - }, - type_: Named( - NamedTypeAnnotation { - name: Identifier { - span: 0:3, - token: Token { + type_: Named( + NamedTypeAnnotation { + name: Identifier { span: 0:3, - kind: Identifier, + token: Token { + span: 0:3, + kind: Identifier, + }, + value: "Int", }, - value: "Int", }, - }, - ), - default_value: Some( - Int( - IntNode { - token: Token { - span: 91:93, - kind: IntegerLiteral, + ), + default_value: Some( + Int( + IntNode { + token: Token { + span: 91:93, + kind: IntegerLiteral, + }, + value: 10, }, - value: 10, - }, + ), ), - ), - }, - Argument { - name: Identifier { - span: 96:102, - token: Token { + }, + Argument { + name: Identifier { span: 96:102, - kind: Identifier, + token: Token { + span: 96:102, + kind: Identifier, + }, + value: "userID", }, - value: "userID", - }, - type_: NonNull( - NonNullTypeAnnotation { - span: 0:3, - type_: Named( - NamedTypeAnnotation { - name: Identifier { - span: 0:2, - token: Token { + type_: NonNull( + NonNullTypeAnnotation { + span: 0:3, + type_: Named( + NamedTypeAnnotation { + name: Identifier { span: 0:2, - kind: Identifier, + token: Token { + span: 0:2, + kind: Identifier, + }, + value: "ID", }, - value: "ID", }, + ), + exclamation: Token { + span: 2:3, + kind: Exclamation, }, - ), - exclamation: Token { - span: 2:3, - kind: Exclamation, }, - }, - ), - default_value: None, - }, - ], - ), - }, + ), + default_value: None, + }, + ], + ), + source_hash: ResolverSourceHash( + "74022d4ca184b7d99e312af76420f20f", + ), + }, + ), ) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-and-fragment-args.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-and-fragment-args.js index 71b0530919e17..8bbf6b791db3e 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-and-fragment-args.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-and-fragment-args.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-args.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-args.expected index 2865797fa8053..b76db617b3564 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-args.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-args.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -21,88 +23,96 @@ graphql` } ` ==================================== OUTPUT =================================== -RelayResolver( - RelayResolverIr { - field: FieldDefinitionStub { - name: Identifier { - span: 53:61, - token: Token { +Field( + LegacyVerboseResolver( + LegacyVerboseResolverIr { + field: FieldDefinitionStub { + name: Identifier { span: 53:61, - kind: Identifier, - }, - value: "greeting", - }, - arguments: Some( - List { - span: 61:82, - start: Token { - span: 61:62, - kind: OpenParen, + token: Token { + span: 53:61, + kind: Identifier, }, - items: [ - InputValueDefinition { - name: Identifier { - span: 62:72, - token: Token { + value: "greeting", + }, + arguments: Some( + List { + span: 61:82, + start: Token { + span: 61:62, + kind: OpenParen, + }, + items: [ + InputValueDefinition { + name: Identifier { span: 62:72, - kind: Identifier, + token: Token { + span: 62:72, + kind: Identifier, + }, + value: "salutation", }, - value: "salutation", - }, - type_: NonNull( - NonNullTypeAnnotation { - span: 74:81, - type_: Named( - NamedTypeAnnotation { - name: Identifier { - span: 74:80, - token: Token { + type_: NonNull( + NonNullTypeAnnotation { + span: 74:81, + type_: Named( + NamedTypeAnnotation { + name: Identifier { span: 74:80, - kind: Identifier, + token: Token { + span: 74:80, + kind: Identifier, + }, + value: "String", }, - value: "String", }, + ), + exclamation: Token { + span: 80:81, + kind: Exclamation, }, - ), - exclamation: Token { - span: 80:81, - kind: Exclamation, }, - }, - ), - default_value: None, - directives: [], + ), + default_value: None, + directives: [], + span: 62:81, + }, + ], + end: Token { + span: 81:82, + kind: CloseParen, }, - ], - end: Token { - span: 81:82, - kind: CloseParen, + }, + ), + }, + on: Type( + PopulatedIrField { + key_location: /path/to/test/fixture/relay-resolver-with-field-args.js:27:33, + value: WithLocation { + location: /path/to/test/fixture/relay-resolver-with-field-args.js:34:38, + item: "User", }, }, ), - }, - on: Type( - PopulatedIrField { - key_location: /path/to/test/fixture/relay-resolver-with-field-args.js:27:33, - value: WithLocation { - location: /path/to/test/fixture/relay-resolver-with-field-args.js:34:38, - item: "User", + root_fragment: Some( + WithLocation { + location: /path/to/test/fixture/relay-resolver-with-field-args.js:100:114, + item: FragmentDefinitionName( + "myRootFragment", + ), }, - }, - ), - root_fragment: Some( - WithLocation { - location: /path/to/test/fixture/relay-resolver-with-field-args.js:100:114, - item: FragmentDefinitionName( - "myRootFragment", - ), - }, - ), - output_type: None, - description: None, - deprecated: None, - live: None, - location: /path/to/test/fixture/relay-resolver-with-field-args.js:0:118, - fragment_arguments: None, - }, + ), + output_type: None, + description: None, + hack_source: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: /path/to/test/fixture/relay-resolver-with-field-args.js:0:118, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "74022d4ca184b7d99e312af76420f20f", + ), + }, + ), ) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-args.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-args.js index 4c65c814c0d79..ef416893d314d 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-args.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-field-args.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.expected index 418c4fa297caf..d75427d1c6bfa 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -20,41 +22,48 @@ graphql` } ` ==================================== OUTPUT =================================== -RelayResolver( - RelayResolverIr { - field: FieldDefinitionStub { - name: Identifier { - span: 53:61, - token: Token { +Field( + LegacyVerboseResolver( + LegacyVerboseResolverIr { + field: FieldDefinitionStub { + name: Identifier { span: 53:61, - kind: Identifier, + token: Token { + span: 53:61, + kind: Identifier, + }, + value: "my_field", }, - value: "my_field", + arguments: None, }, - arguments: None, - }, - on: Type( - PopulatedIrField { - key_location: /path/to/test/fixture/relay-resolver-with-fragment.js:27:33, - value: WithLocation { - location: /path/to/test/fixture/relay-resolver-with-fragment.js:34:38, - item: "User", + on: Type( + PopulatedIrField { + key_location: /path/to/test/fixture/relay-resolver-with-fragment.js:27:33, + value: WithLocation { + location: /path/to/test/fixture/relay-resolver-with-fragment.js:34:38, + item: "User", + }, }, - }, - ), - root_fragment: Some( - WithLocation { - location: /path/to/test/fixture/relay-resolver-with-fragment.js:79:93, - item: FragmentDefinitionName( - "myRootFragment", - ), - }, - ), - output_type: None, - description: None, - deprecated: None, - live: None, - location: /path/to/test/fixture/relay-resolver-with-fragment.js:0:94, - fragment_arguments: None, - }, + ), + root_fragment: Some( + WithLocation { + location: /path/to/test/fixture/relay-resolver-with-fragment.js:79:93, + item: FragmentDefinitionName( + "myRootFragment", + ), + }, + ), + output_type: None, + description: None, + hack_source: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: /path/to/test/fixture/relay-resolver-with-fragment.js:0:94, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "04fa2c09e68fca302832f1acfb74e2d3", + ), + }, + ), ) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.invalid.expected index 8b61c84eed469..03bdbe1258f9e 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax graphql` fragment myRootFragment on User { @@ -24,8 +25,8 @@ graphql` ==================================== ERROR ==================================== ✖︎ Fragment `missingFragment` not found. Did you mean `myRootFragment`? - /path/to/test/fixture/relay-resolver-with-fragment.invalid.js:21:18 - 20 │ * @fieldName my_field - 21 │ * @rootFragment missingFragment + /path/to/test/fixture/relay-resolver-with-fragment.invalid.js:22:18 + 21 │ * @fieldName my_field + 22 │ * @rootFragment missingFragment │ ^^^^^^^^^^^^^^^ - 22 │ + 23 │ diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.invalid.js index 700dfdb135adb..b793b7e0ec1a1 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax graphql` fragment myRootFragment on User { diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.js index 603d79a39c580..08005c34078e8 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-fragment.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-output-type.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-output-type.expected deleted file mode 100644 index 399baa04f143b..0000000000000 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-output-type.expected +++ /dev/null @@ -1,91 +0,0 @@ -==================================== INPUT ==================================== -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -// relay:enable_output_type - -/** - * @RelayResolver - * - * @onType User - * @fieldName favorite_page - * @outputType ClientPage - * @rootFragment myRootFragment - * - * The user's favorite page! They probably clicked something in the UI - * to tell us that it was their favorite page and then we put that in a - * database or something. Then we got that info out again and put it out - * again. Anyway, I'm rambling now. Its a page that the user likes. A lot. - */ - -graphql` - fragment myRootFragment on User { - name - } -` -==================================== OUTPUT =================================== -RelayResolver( - RelayResolverIr { - field: FieldDefinitionStub { - name: Identifier { - span: 53:66, - token: Token { - span: 53:66, - kind: Identifier, - }, - value: "favorite_page", - }, - arguments: None, - }, - on: Type( - PopulatedIrField { - key_location: /path/to/test/fixture/relay-resolver-with-output-type.js:27:33, - value: WithLocation { - location: /path/to/test/fixture/relay-resolver-with-output-type.js:34:38, - item: "User", - }, - }, - ), - root_fragment: Some( - WithLocation { - location: /path/to/test/fixture/relay-resolver-with-output-type.js:110:124, - item: FragmentDefinitionName( - "myRootFragment", - ), - }, - ), - output_type: Some( - Output( - WithLocation { - location: /path/to/test/fixture/relay-resolver-with-output-type.js:82:92, - item: Named( - NamedTypeAnnotation { - name: Identifier { - span: 82:92, - token: Token { - span: 82:92, - kind: Identifier, - }, - value: "ClientPage", - }, - }, - ), - }, - ), - ), - description: Some( - WithLocation { - location: /path/to/test/fixture/relay-resolver-with-output-type.js:127:418, - item: "\nThe user's favorite page! They probably clicked something in the UI\nto tell us that it was their favorite page and then we put that in a\ndatabase or something. Then we got that info out again and put it out\nagain. Anyway, I'm rambling now. Its a page that the user likes. A lot.", - }, - ), - deprecated: None, - live: None, - location: /path/to/test/fixture/relay-resolver-with-output-type.js:0:419, - fragment_arguments: None, - }, -) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-output-type.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-output-type.js deleted file mode 100644 index 1faff444d1b61..0000000000000 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver-with-output-type.js +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -// relay:enable_output_type - -/** - * @RelayResolver - * - * @onType User - * @fieldName favorite_page - * @outputType ClientPage - * @rootFragment myRootFragment - * - * The user's favorite page! They probably clicked something in the UI - * to tell us that it was their favorite page and then we put that in a - * database or something. Then we got that info out again and put it out - * again. Anyway, I'm rambling now. Its a page that the user likes. A lot. - */ - -graphql` - fragment myRootFragment on User { - name - } -` diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver.expected index 3db03f82fc685..8b00370acc9a2 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -26,64 +28,71 @@ graphql` } ` ==================================== OUTPUT =================================== -RelayResolver( - RelayResolverIr { - field: FieldDefinitionStub { - name: Identifier { - span: 53:66, - token: Token { +Field( + LegacyVerboseResolver( + LegacyVerboseResolverIr { + field: FieldDefinitionStub { + name: Identifier { span: 53:66, - kind: Identifier, + token: Token { + span: 53:66, + kind: Identifier, + }, + value: "favorite_page", }, - value: "favorite_page", + arguments: None, }, - arguments: None, - }, - on: Type( - PopulatedIrField { - key_location: /path/to/test/fixture/relay-resolver.js:27:33, - value: WithLocation { - location: /path/to/test/fixture/relay-resolver.js:34:38, - item: "User", + on: Type( + PopulatedIrField { + key_location: /path/to/test/fixture/relay-resolver.js:27:33, + value: WithLocation { + location: /path/to/test/fixture/relay-resolver.js:34:38, + item: "User", + }, }, - }, - ), - root_fragment: Some( - WithLocation { - location: /path/to/test/fixture/relay-resolver.js:100:114, - item: FragmentDefinitionName( - "myRootFragment", - ), - }, - ), - output_type: Some( - EdgeTo( + ), + root_fragment: Some( WithLocation { - location: /path/to/test/fixture/relay-resolver.js:78:82, - item: Named( - NamedTypeAnnotation { - name: Identifier { - span: 78:82, - token: Token { + location: /path/to/test/fixture/relay-resolver.js:100:114, + item: FragmentDefinitionName( + "myRootFragment", + ), + }, + ), + output_type: Some( + EdgeTo( + WithLocation { + location: /path/to/test/fixture/relay-resolver.js:78:82, + item: Named( + NamedTypeAnnotation { + name: Identifier { span: 78:82, - kind: Identifier, + token: Token { + span: 78:82, + kind: Identifier, + }, + value: "Page", }, - value: "Page", }, - }, - ), + ), + }, + ), + ), + description: Some( + WithLocation { + location: /path/to/test/fixture/relay-resolver.js:117:408, + item: "\nThe user's favorite page! They probably clicked something in the UI\nto tell us that it was their favorite page and then we put that in a\ndatabase or something. Then we got that info out again and put it out\nagain. Anyway, I'm rambling now. Its a page that the user likes. A lot.", }, ), - ), - description: Some( - WithLocation { - location: /path/to/test/fixture/relay-resolver.js:117:408, - item: "\nThe user's favorite page! They probably clicked something in the UI\nto tell us that it was their favorite page and then we put that in a\ndatabase or something. Then we got that info out again and put it out\nagain. Anyway, I'm rambling now. Its a page that the user likes. A lot.", - }, - ), - deprecated: None, - live: None, - location: /path/to/test/fixture/relay-resolver.js:0:409, - fragment_arguments: None, - }, + hack_source: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: /path/to/test/fixture/relay-resolver.js:0:409, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "eeac1664eff365a817d8a2446abe76ce", + ), + }, + ), ) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver.js b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver.js index acaf5a0b9ae9c..9e61999d4fd25 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/relay-resolver.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-args.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-args.expected new file mode 100644 index 0000000000000..130216aede9ed --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-args.expected @@ -0,0 +1,132 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** + * @RelayResolver User.favorite_page(id: ID!): Page + * @rootFragment myRootFragment + * + * The user's favorite page! They probably clicked something in the UI + * to tell us that it was their favorite page and then we put that in a + * database or something. Then we got that info out again and put it out + * again. Anyway, I'm rambling now. Its a page that the user likes. A lot. + */ + graphql` + fragment myRootFragment on User { + name + } +` +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 25:38, + token: Token { + span: 25:38, + kind: Identifier, + }, + value: "favorite_page", + }, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 49:53, + token: Token { + span: 49:53, + kind: Identifier, + }, + value: "Page", + }, + }, + ), + arguments: Some( + List { + span: 38:47, + start: Token { + span: 38:39, + kind: OpenParen, + }, + items: [ + InputValueDefinition { + name: Identifier { + span: 39:41, + token: Token { + span: 39:41, + kind: Identifier, + }, + value: "id", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 43:46, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 43:45, + token: Token { + span: 43:45, + kind: Identifier, + }, + value: "ID", + }, + }, + ), + exclamation: Token { + span: 45:46, + kind: Exclamation, + }, + }, + ), + default_value: None, + directives: [], + span: 39:46, + }, + ], + end: Token { + span: 46:47, + kind: CloseParen, + }, + }, + ), + directives: [], + description: Some( + StringNode { + token: Token { + span: 88:379, + kind: Empty, + }, + value: "\nThe user's favorite page! They probably clicked something in the UI\nto tell us that it was their favorite page and then we put that in a\ndatabase or something. Then we got that info out again and put it out\nagain. Anyway, I'm rambling now. Its a page that the user likes. A lot.", + }, + ), + hack_source: None, + span: 25:53, + }, + type_: WithLocation { + location: /path/to/test/fixture/terse-relay-resolver-args.js:20:24, + item: "User", + }, + root_fragment: Some( + WithLocation { + location: /path/to/test/fixture/terse-relay-resolver-args.js:71:85, + item: FragmentDefinitionName( + "myRootFragment", + ), + }, + ), + deprecated: None, + semantic_non_null: None, + live: None, + location: /path/to/test/fixture/terse-relay-resolver-args.js:0:380, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "ff0b47b51f0011ae9def59af3e3792a3", + ), + }, + ), +) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-args.js b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-args.js new file mode 100644 index 0000000000000..aaae8c910eabe --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-args.js @@ -0,0 +1,21 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** + * @RelayResolver User.favorite_page(id: ID!): Page + * @rootFragment myRootFragment + * + * The user's favorite page! They probably clicked something in the UI + * to tell us that it was their favorite page and then we put that in a + * database or something. Then we got that info out again and put it out + * again. Anyway, I'm rambling now. Its a page that the user likes. A lot. + */ + graphql` + fragment myRootFragment on User { + name + } +` diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable-list-item.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable-list-item.expected new file mode 100644 index 0000000000000..b6d463cc63298 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable-list-item.expected @@ -0,0 +1,89 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** + * @RelayResolver User.mandatory_greeting: [String!] + * Non-nullable + */ +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 25:43, + token: Token { + span: 25:43, + kind: Identifier, + }, + value: "mandatory_greeting", + }, + type_: List( + ListTypeAnnotation { + span: 45:54, + open: Token { + span: 45:46, + kind: OpenBracket, + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 46:53, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 46:52, + token: Token { + span: 46:52, + kind: Identifier, + }, + value: "String", + }, + }, + ), + exclamation: Token { + span: 52:53, + kind: Exclamation, + }, + }, + ), + close: Token { + span: 53:54, + kind: CloseBracket, + }, + }, + ), + arguments: None, + directives: [], + description: Some( + StringNode { + token: Token { + span: 58:70, + kind: Empty, + }, + value: "Non-nullable", + }, + ), + hack_source: None, + span: 25:54, + }, + type_: WithLocation { + location: /path/to/test/fixture/terse-relay-resolver-disallow-non-nullable-list-item.js:20:24, + item: "User", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: /path/to/test/fixture/terse-relay-resolver-disallow-non-nullable-list-item.js:0:71, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "12e28b8739ff3ab018186a28de3ca726", + ), + }, + ), +) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable-list-item.js b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable-list-item.js new file mode 100644 index 0000000000000..39a212060c704 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable-list-item.js @@ -0,0 +1,11 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** + * @RelayResolver User.mandatory_greeting: [String!] + * Non-nullable + */ diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable-list.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable-list.expected new file mode 100644 index 0000000000000..52f6a928f4eef --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable-list.expected @@ -0,0 +1,22 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// expected-to-throw + +/** + * @RelayResolver User.mandatory_greeting: [String]! + * Non-nullable + */ +==================================== ERROR ==================================== +✖︎ Unexpected Relay Resolver field with non-nullable type. Relay expects all Resolver fields to be nullable since errors thrown by Resolvers are turned into `null` values. + + /path/to/test/fixture/terse-relay-resolver-disallow-non-nullable-list.js:11:44 + 10 │ * + 11 │ * @RelayResolver User.mandatory_greeting: [String]! + │ ^^^^^^^^^ + 12 │ * Non-nullable diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable-list.js b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable-list.js new file mode 100644 index 0000000000000..c3f96f333b764 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable-list.js @@ -0,0 +1,13 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// expected-to-throw + +/** + * @RelayResolver User.mandatory_greeting: [String]! + * Non-nullable + */ diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable.expected new file mode 100644 index 0000000000000..8a45b8f42fcf7 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable.expected @@ -0,0 +1,22 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// expected-to-throw + +/** + * @RelayResolver User.mandatory_greeting: String! + * Non-nullable + */ +==================================== ERROR ==================================== +✖︎ Unexpected Relay Resolver field with non-nullable type. Relay expects all Resolver fields to be nullable since errors thrown by Resolvers are turned into `null` values. + + /path/to/test/fixture/terse-relay-resolver-disallow-non-nullable.js:11:44 + 10 │ * + 11 │ * @RelayResolver User.mandatory_greeting: String! + │ ^^^^^^^ + 12 │ * Non-nullable diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable.js b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable.js new file mode 100644 index 0000000000000..fea3863cbc09e --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-disallow-non-nullable.js @@ -0,0 +1,13 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// expected-to-throw + +/** + * @RelayResolver User.mandatory_greeting: String! + * Non-nullable + */ diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-forbidden-fields.invalid.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-forbidden-fields.invalid.expected index 37a185056ec9b..b49dbfea2e950 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-forbidden-fields.invalid.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-forbidden-fields.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver User.favorite_page: Page @@ -31,56 +32,56 @@ ==================================== ERROR ==================================== ✖︎ The compiler attempted to parse this `@RelayResolver` block as a terse resolver declaration, but there were unexpected fields: @fieldName, @onType, @onInterface, @edgeTo, @outputType, and @weak. - /path/to/test/fixture/terse-relay-resolver-forbidden-fields.invalid.js:11:5 - 10 │ * - 11 │ * @RelayResolver User.favorite_page: Page + /path/to/test/fixture/terse-relay-resolver-forbidden-fields.invalid.js:12:5 + 11 │ * + 12 │ * @RelayResolver User.favorite_page: Page │ ^^^^^^^^^^^^^ - 12 │ * @rootFragment myRootFragment + 13 │ * @rootFragment myRootFragment ℹ︎ Unexpected field - /path/to/test/fixture/terse-relay-resolver-forbidden-fields.invalid.js:13:5 - 12 │ * @rootFragment myRootFragment - 13 │ * @onType User + /path/to/test/fixture/terse-relay-resolver-forbidden-fields.invalid.js:14:5 + 13 │ * @rootFragment myRootFragment + 14 │ * @onType User │ ^^^^^^ - 14 │ * @edgeTo User + 15 │ * @edgeTo User ℹ︎ Unexpected field - /path/to/test/fixture/terse-relay-resolver-forbidden-fields.invalid.js:14:5 - 13 │ * @onType User - 14 │ * @edgeTo User + /path/to/test/fixture/terse-relay-resolver-forbidden-fields.invalid.js:15:5 + 14 │ * @onType User + 15 │ * @edgeTo User │ ^^^^^^ - 15 │ * @onInterface User + 16 │ * @onInterface User ℹ︎ Unexpected field - /path/to/test/fixture/terse-relay-resolver-forbidden-fields.invalid.js:15:5 - 14 │ * @edgeTo User - 15 │ * @onInterface User + /path/to/test/fixture/terse-relay-resolver-forbidden-fields.invalid.js:16:5 + 15 │ * @edgeTo User + 16 │ * @onInterface User │ ^^^^^^^^^^^ - 16 │ * @outputType User + 17 │ * @outputType User ℹ︎ Unexpected field - /path/to/test/fixture/terse-relay-resolver-forbidden-fields.invalid.js:16:5 - 15 │ * @onInterface User - 16 │ * @outputType User + /path/to/test/fixture/terse-relay-resolver-forbidden-fields.invalid.js:17:5 + 16 │ * @onInterface User + 17 │ * @outputType User │ ^^^^^^^^^^ - 17 │ * @fieldName my_field + 18 │ * @fieldName my_field ℹ︎ Unexpected field - /path/to/test/fixture/terse-relay-resolver-forbidden-fields.invalid.js:17:5 - 16 │ * @outputType User - 17 │ * @fieldName my_field + /path/to/test/fixture/terse-relay-resolver-forbidden-fields.invalid.js:18:5 + 17 │ * @outputType User + 18 │ * @fieldName my_field │ ^^^^^^^^^ - 18 │ * @weak + 19 │ * @weak ℹ︎ Unexpected field - /path/to/test/fixture/terse-relay-resolver-forbidden-fields.invalid.js:18:5 - 17 │ * @fieldName my_field - 18 │ * @weak + /path/to/test/fixture/terse-relay-resolver-forbidden-fields.invalid.js:19:5 + 18 │ * @fieldName my_field + 19 │ * @weak │ ^^^^ - 19 │ * + 20 │ * diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-forbidden-fields.invalid.js b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-forbidden-fields.invalid.js index e4e91469f3cd9..72e99e458f28e 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-forbidden-fields.invalid.js +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-forbidden-fields.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver User.favorite_page: Page diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-non-nullable.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-non-nullable.expected new file mode 100644 index 0000000000000..59c4b2f1d96ea --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-non-nullable.expected @@ -0,0 +1,78 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// relay:allow_resolver_non_nullable_return_type + +/** + * @RelayResolver User.mandatory_greeting: String! + * Non-nullable + */ +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 25:43, + token: Token { + span: 25:43, + kind: Identifier, + }, + value: "mandatory_greeting", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 45:52, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 45:51, + token: Token { + span: 45:51, + kind: Identifier, + }, + value: "String", + }, + }, + ), + exclamation: Token { + span: 51:52, + kind: Exclamation, + }, + }, + ), + arguments: None, + directives: [], + description: Some( + StringNode { + token: Token { + span: 56:68, + kind: Empty, + }, + value: "Non-nullable", + }, + ), + hack_source: None, + span: 25:52, + }, + type_: WithLocation { + location: /path/to/test/fixture/terse-relay-resolver-non-nullable.js:20:24, + item: "User", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: /path/to/test/fixture/terse-relay-resolver-non-nullable.js:0:69, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "ac789e28bceef3eeaab77ae5203f43a6", + ), + }, + ), +) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-non-nullable.js b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-non-nullable.js new file mode 100644 index 0000000000000..7d8175f59a95e --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-non-nullable.js @@ -0,0 +1,13 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// relay:allow_resolver_non_nullable_return_type + +/** + * @RelayResolver User.mandatory_greeting: String! + * Non-nullable + */ diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-semantic-non-null.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-semantic-non-null.expected new file mode 100644 index 0000000000000..1d89f1b6dcbc2 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-semantic-non-null.expected @@ -0,0 +1,118 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** + * @RelayResolver User.favorite_page: Page @semanticNonNull + * @rootFragment myRootFragment + * + * The user's favorite page! They probably clicked something in the UI + * to tell us that it was their favorite page and then we put that in a + * database or something. Then we got that info out again and put it out + * again. Anyway, I'm rambling now. Its a page that the user likes. A lot. + */ + graphql` + fragment myRootFragment on User { + name + } +` +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 25:38, + token: Token { + span: 25:38, + kind: Identifier, + }, + value: "favorite_page", + }, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 40:44, + token: Token { + span: 40:44, + kind: Identifier, + }, + value: "Page", + }, + }, + ), + arguments: None, + directives: [ + ConstantDirective { + span: 45:61, + at: Token { + span: 45:46, + kind: At, + }, + name: Identifier { + span: 46:61, + token: Token { + span: 46:61, + kind: Identifier, + }, + value: "semanticNonNull", + }, + arguments: None, + }, + ], + description: Some( + StringNode { + token: Token { + span: 96:387, + kind: Empty, + }, + value: "\nThe user's favorite page! They probably clicked something in the UI\nto tell us that it was their favorite page and then we put that in a\ndatabase or something. Then we got that info out again and put it out\nagain. Anyway, I'm rambling now. Its a page that the user likes. A lot.", + }, + ), + hack_source: None, + span: 25:61, + }, + type_: WithLocation { + location: /path/to/test/fixture/terse-relay-resolver-semantic-non-null.js:20:24, + item: "User", + }, + root_fragment: Some( + WithLocation { + location: /path/to/test/fixture/terse-relay-resolver-semantic-non-null.js:79:93, + item: FragmentDefinitionName( + "myRootFragment", + ), + }, + ), + deprecated: None, + semantic_non_null: Some( + ConstantDirective { + span: 45:61, + at: Token { + span: 45:46, + kind: At, + }, + name: Identifier { + span: 46:61, + token: Token { + span: 46:61, + kind: Identifier, + }, + value: "semanticNonNull", + }, + arguments: None, + }, + ), + live: None, + location: /path/to/test/fixture/terse-relay-resolver-semantic-non-null.js:0:388, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "ba2a3b6d7c4294fef33f921df3b20065", + ), + }, + ), +) diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-semantic-non-null.js b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-semantic-non-null.js new file mode 100644 index 0000000000000..d8dab8f328b30 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver-semantic-non-null.js @@ -0,0 +1,21 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** + * @RelayResolver User.favorite_page: Page @semanticNonNull + * @rootFragment myRootFragment + * + * The user's favorite page! They probably clicked something in the UI + * to tell us that it was their favorite page and then we put that in a + * database or something. Then we got that info out again and put it out + * again. Anyway, I'm rambling now. Its a page that the user likes. A lot. + */ + graphql` + fragment myRootFragment on User { + name + } +` diff --git a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver.expected b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver.expected index ce6facb35197a..4422526527074 100644 --- a/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver.expected +++ b/compiler/crates/relay-docblock/tests/parse/fixtures/terse-relay-resolver.expected @@ -21,48 +21,64 @@ } ` ==================================== OUTPUT =================================== -TerseRelayResolver( - TerseRelayResolverIr { - field: FieldDefinition { - name: Identifier { - span: 25:38, - token: Token { +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { span: 25:38, - kind: Identifier, + token: Token { + span: 25:38, + kind: Identifier, + }, + value: "favorite_page", }, - value: "favorite_page", - }, - type_: Named( - NamedTypeAnnotation { - name: Identifier { - span: 40:44, - token: Token { + type_: Named( + NamedTypeAnnotation { + name: Identifier { span: 40:44, - kind: Identifier, + token: Token { + span: 40:44, + kind: Identifier, + }, + value: "Page", + }, + }, + ), + arguments: None, + directives: [], + description: Some( + StringNode { + token: Token { + span: 79:370, + kind: Empty, }, - value: "Page", + value: "\nThe user's favorite page! They probably clicked something in the UI\nto tell us that it was their favorite page and then we put that in a\ndatabase or something. Then we got that info out again and put it out\nagain. Anyway, I'm rambling now. Its a page that the user likes. A lot.", }, + ), + hack_source: None, + span: 25:44, + }, + type_: WithLocation { + location: /path/to/test/fixture/terse-relay-resolver.js:20:24, + item: "User", + }, + root_fragment: Some( + WithLocation { + location: /path/to/test/fixture/terse-relay-resolver.js:62:76, + item: FragmentDefinitionName( + "myRootFragment", + ), }, ), - arguments: None, - directives: [], - description: None, - }, - type_: WithLocation { - location: /path/to/test/fixture/terse-relay-resolver.js:20:24, - item: "User", + deprecated: None, + semantic_non_null: None, + live: None, + location: /path/to/test/fixture/terse-relay-resolver.js:0:371, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "0a9950ad1f952f5777b27604738fcf91", + ), }, - root_fragment: Some( - WithLocation { - location: /path/to/test/fixture/terse-relay-resolver.js:62:76, - item: FragmentDefinitionName( - "myRootFragment", - ), - }, - ), - deprecated: None, - live: None, - location: /path/to/test/fixture/terse-relay-resolver.js:0:371, - fragment_arguments: None, - }, + ), ) diff --git a/compiler/crates/relay-docblock/tests/parse/mod.rs b/compiler/crates/relay-docblock/tests/parse/mod.rs deleted file mode 100644 index 78da7e895e191..0000000000000 --- a/compiler/crates/relay-docblock/tests/parse/mod.rs +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::Diagnostic; -use common::FeatureFlag; -use common::SourceLocationKey; -use docblock_syntax::parse_docblock; -use extract_graphql::JavaScriptSourceFeature; -use fixture_tests::Fixture; -use graphql_cli::DiagnosticPrinter; -use graphql_syntax::parse_executable; -use graphql_syntax::ExecutableDefinition; -use intern::string_key::Intern; -use relay_docblock::parse_docblock_ast; -use relay_docblock::ParseOptions; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let js_features = extract_graphql::extract(fixture.content); - let executable_documents = js_features - .iter() - .enumerate() - .filter_map(|(i, source)| match source { - JavaScriptSourceFeature::GraphQL(source) => Some( - parse_executable( - &source.text_source().text, - SourceLocationKey::Embedded { - path: format!("/path/to/test/fixture/{}", fixture.file_name).intern(), - index: i as u16, - }, - ) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) - .map(|document| document.definitions), - ), - JavaScriptSourceFeature::Docblock(_) => None, - }) - .collect::, String>>()? - .iter() - .flatten() - .cloned() - .collect::>(); - - let irs = js_features - .iter() - .enumerate() - .filter_map(|(i, source)| match source { - JavaScriptSourceFeature::GraphQL(_) => None, - JavaScriptSourceFeature::Docblock(docblock_source) => Some( - parse_docblock( - &docblock_source.text_source().text, - SourceLocationKey::Embedded { - path: format!("/path/to/test/fixture/{}", fixture.file_name).intern(), - index: i as u16, - }, - ) - .and_then(|ast| { - parse_docblock_ast( - &ast, - Some(&executable_documents), - ParseOptions { - enable_output_type: if fixture - .content - .contains("// relay:enable_output_type") - { - &FeatureFlag::Enabled - } else { - &FeatureFlag::Disabled - }, - }, - ) - }) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)), - ), - }) - .collect::, String>>()?; - - let output = irs - .iter() - .flatten() - .map(|ir| format!("{:#?}", ir)) - .collect::>() - .join("\n\n"); - - Ok(output) -} - -pub fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { - let printer = DiagnosticPrinter::new(|source_location| match source_location { - SourceLocationKey::Embedded { index, .. } => Some( - extract_graphql::extract(source)[index as usize] - .text_source() - .clone(), - ), - SourceLocationKey::Standalone { .. } => None, - SourceLocationKey::Generated => None, - }); - let mut printed = diagnostics - .iter() - .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) - .collect::>(); - printed.sort(); - printed.join("\n\n") -} diff --git a/compiler/crates/relay-docblock/tests/parse_test.rs b/compiler/crates/relay-docblock/tests/parse_test.rs index de28f34aacb42..3fed4d407e14a 100644 --- a/compiler/crates/relay-docblock/tests/parse_test.rs +++ b/compiler/crates/relay-docblock/tests/parse_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<808ccebf97208f983fbfd157e12804fe>> + * @generated SignedSource<<15998457fc663de32a7f93c6a8f510e2>> */ mod parse; @@ -12,268 +12,310 @@ mod parse; use parse::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn edge_to_non_null_invalid() { +#[tokio::test] +async fn edge_to_non_null_invalid() { let input = include_str!("parse/fixtures/edge-to-non-null.invalid.js"); let expected = include_str!("parse/fixtures/edge-to-non-null.invalid.expected"); - test_fixture(transform_fixture, "edge-to-non-null.invalid.js", "parse/fixtures/edge-to-non-null.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "edge-to-non-null.invalid.js", "parse/fixtures/edge-to-non-null.invalid.expected", input, expected).await; } -#[test] -fn edge_to_non_null_plural_item_invalid() { +#[tokio::test] +async fn edge_to_non_null_plural_item_invalid() { let input = include_str!("parse/fixtures/edge-to-non-null-plural-item.invalid.js"); let expected = include_str!("parse/fixtures/edge-to-non-null-plural-item.invalid.expected"); - test_fixture(transform_fixture, "edge-to-non-null-plural-item.invalid.js", "parse/fixtures/edge-to-non-null-plural-item.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "edge-to-non-null-plural-item.invalid.js", "parse/fixtures/edge-to-non-null-plural-item.invalid.expected", input, expected).await; } -#[test] -fn edge_to_not_identifier_invalid() { +#[tokio::test] +async fn edge_to_not_identifier_invalid() { let input = include_str!("parse/fixtures/edge-to-not-identifier.invalid.js"); let expected = include_str!("parse/fixtures/edge-to-not-identifier.invalid.expected"); - test_fixture(transform_fixture, "edge-to-not-identifier.invalid.js", "parse/fixtures/edge-to-not-identifier.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "edge-to-not-identifier.invalid.js", "parse/fixtures/edge-to-not-identifier.invalid.expected", input, expected).await; } -#[test] -fn output_type_edge_to_invalid() { +#[tokio::test] +async fn output_type_edge_to_invalid() { let input = include_str!("parse/fixtures/output-type-edge-to-invalid.js"); let expected = include_str!("parse/fixtures/output-type-edge-to-invalid.expected"); - test_fixture(transform_fixture, "output-type-edge-to-invalid.js", "parse/fixtures/output-type-edge-to-invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-edge-to-invalid.js", "parse/fixtures/output-type-edge-to-invalid.expected", input, expected).await; } -#[test] -fn output_type_invalid_type() { +#[tokio::test] +async fn output_type_invalid_type() { let input = include_str!("parse/fixtures/output-type-invalid-type.js"); let expected = include_str!("parse/fixtures/output-type-invalid-type.expected"); - test_fixture(transform_fixture, "output-type-invalid-type.js", "parse/fixtures/output-type-invalid-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-invalid-type.js", "parse/fixtures/output-type-invalid-type.expected", input, expected).await; } -#[test] -fn relay_resolver() { +#[tokio::test] +async fn relay_resolver() { let input = include_str!("parse/fixtures/relay-resolver.js"); let expected = include_str!("parse/fixtures/relay-resolver.expected"); - test_fixture(transform_fixture, "relay-resolver.js", "parse/fixtures/relay-resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver.js", "parse/fixtures/relay-resolver.expected", input, expected).await; } -#[test] -fn relay_resolver_deprecated() { +#[tokio::test] +async fn relay_resolver_deprecated() { let input = include_str!("parse/fixtures/relay-resolver-deprecated.js"); let expected = include_str!("parse/fixtures/relay-resolver-deprecated.expected"); - test_fixture(transform_fixture, "relay-resolver-deprecated.js", "parse/fixtures/relay-resolver-deprecated.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-deprecated.js", "parse/fixtures/relay-resolver-deprecated.expected", input, expected).await; } -#[test] -fn relay_resolver_deprecated_no_description() { +#[tokio::test] +async fn relay_resolver_deprecated_no_description() { let input = include_str!("parse/fixtures/relay-resolver-deprecated-no-description.js"); let expected = include_str!("parse/fixtures/relay-resolver-deprecated-no-description.expected"); - test_fixture(transform_fixture, "relay-resolver-deprecated-no-description.js", "parse/fixtures/relay-resolver-deprecated-no-description.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-deprecated-no-description.js", "parse/fixtures/relay-resolver-deprecated-no-description.expected", input, expected).await; } -#[test] -fn relay_resolver_invalid_field_invalid() { +#[tokio::test] +async fn relay_resolver_invalid_field_invalid() { let input = include_str!("parse/fixtures/relay-resolver-invalid-field.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-invalid-field.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-invalid-field.invalid.js", "parse/fixtures/relay-resolver-invalid-field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-invalid-field.invalid.js", "parse/fixtures/relay-resolver-invalid-field.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_live() { +#[tokio::test] +async fn relay_resolver_live() { let input = include_str!("parse/fixtures/relay-resolver-live.js"); let expected = include_str!("parse/fixtures/relay-resolver-live.expected"); - test_fixture(transform_fixture, "relay-resolver-live.js", "parse/fixtures/relay-resolver-live.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-live.js", "parse/fixtures/relay-resolver-live.expected", input, expected).await; } -#[test] -fn relay_resolver_live_with_text_invalid() { +#[tokio::test] +async fn relay_resolver_live_with_text_invalid() { let input = include_str!("parse/fixtures/relay-resolver-live-with-text.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-live-with-text.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-live-with-text.invalid.js", "parse/fixtures/relay-resolver-live-with-text.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-live-with-text.invalid.js", "parse/fixtures/relay-resolver-live-with-text.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_missing_field_invalid() { +#[tokio::test] +async fn relay_resolver_missing_field_invalid() { let input = include_str!("parse/fixtures/relay-resolver-missing-field.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-missing-field.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-missing-field.invalid.js", "parse/fixtures/relay-resolver-missing-field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-missing-field.invalid.js", "parse/fixtures/relay-resolver-missing-field.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_missing_multiple_fields_invalid() { +#[tokio::test] +async fn relay_resolver_missing_multiple_fields_invalid() { let input = include_str!("parse/fixtures/relay-resolver-missing-multiple-fields.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-missing-multiple-fields.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-missing-multiple-fields.invalid.js", "parse/fixtures/relay-resolver-missing-multiple-fields.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-missing-multiple-fields.invalid.js", "parse/fixtures/relay-resolver-missing-multiple-fields.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_named_export() { +#[tokio::test] +async fn relay_resolver_named_export() { let input = include_str!("parse/fixtures/relay-resolver-named-export.js"); let expected = include_str!("parse/fixtures/relay-resolver-named-export.expected"); - test_fixture(transform_fixture, "relay-resolver-named-export.js", "parse/fixtures/relay-resolver-named-export.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-named-export.js", "parse/fixtures/relay-resolver-named-export.expected", input, expected).await; } -#[test] -fn relay_resolver_on_interface_mismatch_invalid() { +#[tokio::test] +async fn relay_resolver_on_interface_mismatch_invalid() { let input = include_str!("parse/fixtures/relay-resolver-on-interface-mismatch.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-on-interface-mismatch.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-on-interface-mismatch.invalid.js", "parse/fixtures/relay-resolver-on-interface-mismatch.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-on-interface-mismatch.invalid.js", "parse/fixtures/relay-resolver-on-interface-mismatch.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_on_interface_no_value_invalid() { +#[tokio::test] +async fn relay_resolver_on_interface_no_value_invalid() { let input = include_str!("parse/fixtures/relay-resolver-on-interface-no-value.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-on-interface-no-value.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-on-interface-no-value.invalid.js", "parse/fixtures/relay-resolver-on-interface-no-value.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-on-interface-no-value.invalid.js", "parse/fixtures/relay-resolver-on-interface-no-value.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_on_type_and_on_interface_invalid() { +#[tokio::test] +async fn relay_resolver_on_type_and_on_interface_invalid() { let input = include_str!("parse/fixtures/relay-resolver-on-type-and-on-interface.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-on-type-and-on-interface.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-on-type-and-on-interface.invalid.js", "parse/fixtures/relay-resolver-on-type-and-on-interface.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-on-type-and-on-interface.invalid.js", "parse/fixtures/relay-resolver-on-type-and-on-interface.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_on_type_mismatch_invalid() { +#[tokio::test] +async fn relay_resolver_on_type_mismatch_invalid() { let input = include_str!("parse/fixtures/relay-resolver-on-type-mismatch.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-on-type-mismatch.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-on-type-mismatch.invalid.js", "parse/fixtures/relay-resolver-on-type-mismatch.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-on-type-mismatch.invalid.js", "parse/fixtures/relay-resolver-on-type-mismatch.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_on_type_no_value_invalid() { +#[tokio::test] +async fn relay_resolver_on_type_no_value_invalid() { let input = include_str!("parse/fixtures/relay-resolver-on-type-no-value.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-on-type-no-value.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-on-type-no-value.invalid.js", "parse/fixtures/relay-resolver-on-type-no-value.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-on-type-no-value.invalid.js", "parse/fixtures/relay-resolver-on-type-no-value.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_strong_object() { +#[tokio::test] +async fn relay_resolver_strong_object() { let input = include_str!("parse/fixtures/relay-resolver-strong-object.js"); let expected = include_str!("parse/fixtures/relay-resolver-strong-object.expected"); - test_fixture(transform_fixture, "relay-resolver-strong-object.js", "parse/fixtures/relay-resolver-strong-object.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-strong-object.js", "parse/fixtures/relay-resolver-strong-object.expected", input, expected).await; } -#[test] -fn relay_resolver_strong_object_with_gibberish_invalid() { +#[tokio::test] +async fn relay_resolver_strong_object_with_gibberish_invalid() { let input = include_str!("parse/fixtures/relay-resolver-strong-object-with-gibberish.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-strong-object-with-gibberish.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-strong-object-with-gibberish.invalid.js", "parse/fixtures/relay-resolver-strong-object-with-gibberish.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-strong-object-with-gibberish.invalid.js", "parse/fixtures/relay-resolver-strong-object-with-gibberish.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_strong_object_with_implements() { +#[tokio::test] +async fn relay_resolver_strong_object_with_implements() { let input = include_str!("parse/fixtures/relay-resolver-strong-object-with-implements.js"); let expected = include_str!("parse/fixtures/relay-resolver-strong-object-with-implements.expected"); - test_fixture(transform_fixture, "relay-resolver-strong-object-with-implements.js", "parse/fixtures/relay-resolver-strong-object-with-implements.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-strong-object-with-implements.js", "parse/fixtures/relay-resolver-strong-object-with-implements.expected", input, expected).await; } -#[test] -fn relay_resolver_terse_invalid_type_name() { +#[tokio::test] +async fn relay_resolver_terse_invalid_type_name() { let input = include_str!("parse/fixtures/relay-resolver-terse-invalid-type-name.js"); let expected = include_str!("parse/fixtures/relay-resolver-terse-invalid-type-name.expected"); - test_fixture(transform_fixture, "relay-resolver-terse-invalid-type-name.js", "parse/fixtures/relay-resolver-terse-invalid-type-name.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-terse-invalid-type-name.js", "parse/fixtures/relay-resolver-terse-invalid-type-name.expected", input, expected).await; } -#[test] -fn relay_resolver_weak_object_with_gibberish_invalid() { +#[tokio::test] +async fn relay_resolver_weak_object_with_gibberish_invalid() { let input = include_str!("parse/fixtures/relay-resolver-weak-object-with-gibberish.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-weak-object-with-gibberish.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-weak-object-with-gibberish.invalid.js", "parse/fixtures/relay-resolver-weak-object-with-gibberish.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-weak-object-with-gibberish.invalid.js", "parse/fixtures/relay-resolver-weak-object-with-gibberish.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_with_args() { +#[tokio::test] +async fn relay_resolver_weak_object_with_implements() { + let input = include_str!("parse/fixtures/relay-resolver-weak-object-with-implements.js"); + let expected = include_str!("parse/fixtures/relay-resolver-weak-object-with-implements.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-weak-object-with-implements.js", "parse/fixtures/relay-resolver-weak-object-with-implements.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_with_args() { let input = include_str!("parse/fixtures/relay-resolver-with-args.js"); let expected = include_str!("parse/fixtures/relay-resolver-with-args.expected"); - test_fixture(transform_fixture, "relay-resolver-with-args.js", "parse/fixtures/relay-resolver-with-args.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-args.js", "parse/fixtures/relay-resolver-with-args.expected", input, expected).await; } -#[test] -fn relay_resolver_with_args_default_value_invalid() { +#[tokio::test] +async fn relay_resolver_with_args_default_value_invalid() { let input = include_str!("parse/fixtures/relay-resolver-with-args-default-value.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-with-args-default-value.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-with-args-default-value.invalid.js", "parse/fixtures/relay-resolver-with-args-default-value.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-args-default-value.invalid.js", "parse/fixtures/relay-resolver-with-args-default-value.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_with_args_syntax_error_invalid() { +#[tokio::test] +async fn relay_resolver_with_args_syntax_error_invalid() { let input = include_str!("parse/fixtures/relay-resolver-with-args-syntax-error.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-with-args-syntax-error.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-with-args-syntax-error.invalid.js", "parse/fixtures/relay-resolver-with-args-syntax-error.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-args-syntax-error.invalid.js", "parse/fixtures/relay-resolver-with-args-syntax-error.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_with_conflicting_args_invalid() { +#[tokio::test] +async fn relay_resolver_with_conflicting_args_invalid() { let input = include_str!("parse/fixtures/relay-resolver-with-conflicting-args.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-with-conflicting-args.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-with-conflicting-args.invalid.js", "parse/fixtures/relay-resolver-with-conflicting-args.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-conflicting-args.invalid.js", "parse/fixtures/relay-resolver-with-conflicting-args.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_with_field_and_fragment_args() { +#[tokio::test] +async fn relay_resolver_with_field_and_fragment_args() { let input = include_str!("parse/fixtures/relay-resolver-with-field-and-fragment-args.js"); let expected = include_str!("parse/fixtures/relay-resolver-with-field-and-fragment-args.expected"); - test_fixture(transform_fixture, "relay-resolver-with-field-and-fragment-args.js", "parse/fixtures/relay-resolver-with-field-and-fragment-args.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-field-and-fragment-args.js", "parse/fixtures/relay-resolver-with-field-and-fragment-args.expected", input, expected).await; } -#[test] -fn relay_resolver_with_field_args() { +#[tokio::test] +async fn relay_resolver_with_field_args() { let input = include_str!("parse/fixtures/relay-resolver-with-field-args.js"); let expected = include_str!("parse/fixtures/relay-resolver-with-field-args.expected"); - test_fixture(transform_fixture, "relay-resolver-with-field-args.js", "parse/fixtures/relay-resolver-with-field-args.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-field-args.js", "parse/fixtures/relay-resolver-with-field-args.expected", input, expected).await; } -#[test] -fn relay_resolver_with_fragment() { +#[tokio::test] +async fn relay_resolver_with_fragment() { let input = include_str!("parse/fixtures/relay-resolver-with-fragment.js"); let expected = include_str!("parse/fixtures/relay-resolver-with-fragment.expected"); - test_fixture(transform_fixture, "relay-resolver-with-fragment.js", "parse/fixtures/relay-resolver-with-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-fragment.js", "parse/fixtures/relay-resolver-with-fragment.expected", input, expected).await; } -#[test] -fn relay_resolver_with_fragment_invalid() { +#[tokio::test] +async fn relay_resolver_with_fragment_invalid() { let input = include_str!("parse/fixtures/relay-resolver-with-fragment.invalid.js"); let expected = include_str!("parse/fixtures/relay-resolver-with-fragment.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-with-fragment.invalid.js", "parse/fixtures/relay-resolver-with-fragment.invalid.expected", input, expected); -} - -#[test] -fn relay_resolver_with_output_type() { - let input = include_str!("parse/fixtures/relay-resolver-with-output-type.js"); - let expected = include_str!("parse/fixtures/relay-resolver-with-output-type.expected"); - test_fixture(transform_fixture, "relay-resolver-with-output-type.js", "parse/fixtures/relay-resolver-with-output-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-fragment.invalid.js", "parse/fixtures/relay-resolver-with-fragment.invalid.expected", input, expected).await; } -#[test] -fn terse_relay_resolver() { +#[tokio::test] +async fn terse_relay_resolver() { let input = include_str!("parse/fixtures/terse-relay-resolver.js"); let expected = include_str!("parse/fixtures/terse-relay-resolver.expected"); - test_fixture(transform_fixture, "terse-relay-resolver.js", "parse/fixtures/terse-relay-resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "terse-relay-resolver.js", "parse/fixtures/terse-relay-resolver.expected", input, expected).await; +} + +#[tokio::test] +async fn terse_relay_resolver_args() { + let input = include_str!("parse/fixtures/terse-relay-resolver-args.js"); + let expected = include_str!("parse/fixtures/terse-relay-resolver-args.expected"); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-args.js", "parse/fixtures/terse-relay-resolver-args.expected", input, expected).await; +} + +#[tokio::test] +async fn terse_relay_resolver_disallow_non_nullable() { + let input = include_str!("parse/fixtures/terse-relay-resolver-disallow-non-nullable.js"); + let expected = include_str!("parse/fixtures/terse-relay-resolver-disallow-non-nullable.expected"); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-disallow-non-nullable.js", "parse/fixtures/terse-relay-resolver-disallow-non-nullable.expected", input, expected).await; } -#[test] -fn terse_relay_resolver_forbidden_fields_invalid() { +#[tokio::test] +async fn terse_relay_resolver_disallow_non_nullable_list() { + let input = include_str!("parse/fixtures/terse-relay-resolver-disallow-non-nullable-list.js"); + let expected = include_str!("parse/fixtures/terse-relay-resolver-disallow-non-nullable-list.expected"); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-disallow-non-nullable-list.js", "parse/fixtures/terse-relay-resolver-disallow-non-nullable-list.expected", input, expected).await; +} + +#[tokio::test] +async fn terse_relay_resolver_disallow_non_nullable_list_item() { + let input = include_str!("parse/fixtures/terse-relay-resolver-disallow-non-nullable-list-item.js"); + let expected = include_str!("parse/fixtures/terse-relay-resolver-disallow-non-nullable-list-item.expected"); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-disallow-non-nullable-list-item.js", "parse/fixtures/terse-relay-resolver-disallow-non-nullable-list-item.expected", input, expected).await; +} + +#[tokio::test] +async fn terse_relay_resolver_forbidden_fields_invalid() { let input = include_str!("parse/fixtures/terse-relay-resolver-forbidden-fields.invalid.js"); let expected = include_str!("parse/fixtures/terse-relay-resolver-forbidden-fields.invalid.expected"); - test_fixture(transform_fixture, "terse-relay-resolver-forbidden-fields.invalid.js", "parse/fixtures/terse-relay-resolver-forbidden-fields.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-forbidden-fields.invalid.js", "parse/fixtures/terse-relay-resolver-forbidden-fields.invalid.expected", input, expected).await; } -#[test] -fn terse_relay_resolver_fragment_type_does_not_match_parent_invalid() { +#[tokio::test] +async fn terse_relay_resolver_fragment_type_does_not_match_parent_invalid() { let input = include_str!("parse/fixtures/terse-relay-resolver-fragment-type-does-not-match-parent.invalid.js"); let expected = include_str!("parse/fixtures/terse-relay-resolver-fragment-type-does-not-match-parent.invalid.expected"); - test_fixture(transform_fixture, "terse-relay-resolver-fragment-type-does-not-match-parent.invalid.js", "parse/fixtures/terse-relay-resolver-fragment-type-does-not-match-parent.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-fragment-type-does-not-match-parent.invalid.js", "parse/fixtures/terse-relay-resolver-fragment-type-does-not-match-parent.invalid.expected", input, expected).await; } -#[test] -fn terse_relay_resolver_no_dot_2_invalid() { +#[tokio::test] +async fn terse_relay_resolver_no_dot_2_invalid() { let input = include_str!("parse/fixtures/terse-relay-resolver-no-dot-2.invalid.js"); let expected = include_str!("parse/fixtures/terse-relay-resolver-no-dot-2.invalid.expected"); - test_fixture(transform_fixture, "terse-relay-resolver-no-dot-2.invalid.js", "parse/fixtures/terse-relay-resolver-no-dot-2.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-no-dot-2.invalid.js", "parse/fixtures/terse-relay-resolver-no-dot-2.invalid.expected", input, expected).await; } -#[test] -fn terse_relay_resolver_no_dot_invalid() { +#[tokio::test] +async fn terse_relay_resolver_no_dot_invalid() { let input = include_str!("parse/fixtures/terse-relay-resolver-no-dot.invalid.js"); let expected = include_str!("parse/fixtures/terse-relay-resolver-no-dot.invalid.expected"); - test_fixture(transform_fixture, "terse-relay-resolver-no-dot.invalid.js", "parse/fixtures/terse-relay-resolver-no-dot.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-no-dot.invalid.js", "parse/fixtures/terse-relay-resolver-no-dot.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn terse_relay_resolver_non_nullable() { + let input = include_str!("parse/fixtures/terse-relay-resolver-non-nullable.js"); + let expected = include_str!("parse/fixtures/terse-relay-resolver-non-nullable.expected"); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-non-nullable.js", "parse/fixtures/terse-relay-resolver-non-nullable.expected", input, expected).await; +} + +#[tokio::test] +async fn terse_relay_resolver_semantic_non_null() { + let input = include_str!("parse/fixtures/terse-relay-resolver-semantic-non-null.js"); + let expected = include_str!("parse/fixtures/terse-relay-resolver-semantic-non-null.expected"); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-semantic-non-null.js", "parse/fixtures/terse-relay-resolver-semantic-non-null.expected", input, expected).await; } diff --git a/compiler/crates/relay-docblock/tests/to_schema.rs b/compiler/crates/relay-docblock/tests/to_schema.rs new file mode 100644 index 0000000000000..9669b8e3f0993 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/to_schema.rs @@ -0,0 +1,139 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::DiagnosticsResult; +use common::FeatureFlag; +use common::SourceLocationKey; +use docblock_syntax::parse_docblock; +use docblock_syntax::DocblockSource; +use extract_graphql::JavaScriptSourceFeature; +use fixture_tests::Fixture; +use graphql_syntax::parse_executable; +use graphql_syntax::ExecutableDefinition; +use graphql_test_helpers::diagnostics_to_sorted_string; +use intern::string_key::Intern; +use relay_config::ProjectName; +use relay_docblock::extend_schema_with_resolver_type_system_definition; +use relay_docblock::parse_docblock_ast; +use relay_docblock::validate_resolver_schema; +use relay_docblock::ParseOptions; +use relay_test_schema::get_test_schema_with_extensions; +use schema::SDLSchema; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + let (base, mut schema) = match parts.as_slice() { + [base, extensions] => (base, extract_schema_from_js(extensions)), + [base] => (base, get_test_schema_with_extensions("")), + _ => panic!("Invalid fixture input {}", fixture.content), + }; + + let js_features = extract_graphql::extract(base); + let project_name = ProjectName::default(); + + let executable_documents = js_features + .iter() + .enumerate() + .filter_map(|(_, source)| match source { + JavaScriptSourceFeature::GraphQL(source) => Some( + parse_executable(&source.text_source().text, SourceLocationKey::Generated) + .map_err(|diagnostics| { + diagnostics_to_sorted_string(&source.text_source().text, &diagnostics) + }) + .map(|document| document.definitions), + ), + JavaScriptSourceFeature::Docblock(_) => None, + }) + .collect::, String>>()? + .iter() + .flatten() + .cloned() + .collect::>(); + + let mut stringify = |i: usize, source: &DocblockSource| -> DiagnosticsResult { + let ast = parse_docblock( + &source.text_source().text, + SourceLocationKey::Embedded { + path: format!("/path/to/test/fixture/{}", fixture.file_name).intern(), + index: i as u16, + }, + )?; + let ir = parse_docblock_ast( + &project_name, + &ast, + Some(&executable_documents), + &ParseOptions { + enable_interface_output_type: if fixture + .content + .contains("// relay:enable_interface_output_type") + { + &FeatureFlag::Enabled + } else { + &FeatureFlag::Disabled + }, + allow_resolver_non_nullable_return_type: if fixture + .content + .contains("// relay:allow_resolver_non_nullable_return_type") + { + &FeatureFlag::Enabled + } else { + &FeatureFlag::Disabled + }, + }, + )? + .unwrap(); + + // In non-tests, this function (correctly) consumes TypeSystemDefinition when modifying the + // schema. + // In tests, we need to clone, because we **also** want to print the schema changes. + let schema_document = + ir.clone() + .to_graphql_schema_ast(project_name, &schema, &Default::default())?; + for definition in &schema_document.definitions { + extend_schema_with_resolver_type_system_definition( + definition.clone(), + Arc::get_mut(&mut schema) + .expect("Expected to be able to get mutable reference to schema"), + schema_document.location, + )?; + } + + validate_resolver_schema(&schema, &Default::default())?; + + ir.to_sdl_string(project_name, &schema, &Default::default()) + }; + + let schema_strings = js_features + .iter() + .enumerate() + .filter_map(|(i, source)| match source { + JavaScriptSourceFeature::GraphQL(_) => None, + JavaScriptSourceFeature::Docblock(docblock_source) => Some((i, docblock_source)), + }) + .map(|(i, source)| { + stringify(i, source).map_err(|diagnostics| { + diagnostics_to_sorted_string(&source.text_source().text, &diagnostics) + }) + }) + .collect::, String>>()?; + + Ok(schema_strings.join("\n\n")) +} + +fn extract_schema_from_js(js: &str) -> Arc { + let js_features = extract_graphql::extract(js); + let sdl_text = match js_features.as_slice() { + [JavaScriptSourceFeature::GraphQL(source)] => &source.text_source().text, + _ => { + panic!("Expected %extensions% to contain exactly 1 graphql`` tagged template literal.") + } + }; + + get_test_schema_with_extensions(sdl_text) +} diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-relay-resolver.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-relay-resolver.expected index 52a00581b9078..8e876ae8663df 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-relay-resolver.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-relay-resolver.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -27,5 +29,5 @@ graphql` ` ==================================== OUTPUT =================================== extend type User { - favorite_page: Page @relay_resolver(import_path: "/path/to/test/fixture/client-edge-relay-resolver.js", fragment_name: "myRootFragment", import_name: "favorite_page") + favorite_page: Page @relay_resolver(import_name: "favorite_page", import_path: "/path/to/test/fixture/client-edge-relay-resolver.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "eeac1664eff365a817d8a2446abe76ce") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-relay-resolver.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-relay-resolver.js index 01d61ceeeea8d..933ac69a77164 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-relay-resolver.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-relay-resolver.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-non-null-plural-server-object-relay-resolver.invalid.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-non-null-plural-server-object-relay-resolver.invalid.expected index 6da7e125f81f0..a4ed54f64b40d 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-non-null-plural-server-object-relay-resolver.invalid.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-non-null-plural-server-object-relay-resolver.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-non-null-plural-server-object-relay-resolver.invalid.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-non-null-plural-server-object-relay-resolver.invalid.js index 5d991ef9903c1..018ad38b2acfa 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-non-null-plural-server-object-relay-resolver.invalid.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-non-null-plural-server-object-relay-resolver.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-plural-server-object-relay-resolver.invalid.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-plural-server-object-relay-resolver.invalid.expected index e4a79288aa1f0..be5022c578352 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-plural-server-object-relay-resolver.invalid.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-plural-server-object-relay-resolver.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-plural-server-object-relay-resolver.invalid.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-plural-server-object-relay-resolver.invalid.js index e509ae0908e0a..3a8d627a3ea45 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-plural-server-object-relay-resolver.invalid.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/client-edge-to-plural-server-object-relay-resolver.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/legacy-relay-resolver-with-root-fragment-on-model.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/legacy-relay-resolver-with-root-fragment-on-model.expected new file mode 100644 index 0000000000000..f3a1b885e0429 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/legacy-relay-resolver-with-root-fragment-on-model.expected @@ -0,0 +1,36 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// relay:allow_legacy_verbose_syntax + +/** + * @RelayResolver MyType + */ + +/** + * @RelayResolver + * @onType MyType + * @fieldName my_field + * @rootFragment myRootFragment + */ + +graphql` + fragment myRootFragment on MyType { + id + } +` +==================================== OUTPUT =================================== +type MyType @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(fragment_name: "MyType__id", generated_fragment: true, inject_fragment_data: "id", import_name: "MyType", import_path: "/path/to/test/fixture/legacy-relay-resolver-with-root-fragment-on-model.js") @resolver_source_hash(value: "b81f253a757aaba36955be6d8e224c2a") @unselectable(reason: "This field is intended only for Relay's internal use") +} + + +extend type MyType { + my_field: RelayResolverValue @relay_resolver(import_name: "my_field", import_path: "/path/to/test/fixture/legacy-relay-resolver-with-root-fragment-on-model.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "6ea8e78f94a38ef98a44161e39f85f8d") +} diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/legacy-relay-resolver-with-root-fragment-on-model.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/legacy-relay-resolver-with-root-fragment-on-model.js new file mode 100644 index 0000000000000..40396ae7b0ad9 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/legacy-relay-resolver-with-root-fragment-on-model.js @@ -0,0 +1,25 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// relay:allow_legacy_verbose_syntax + +/** + * @RelayResolver MyType + */ + +/** + * @RelayResolver + * @onType MyType + * @fieldName my_field + * @rootFragment myRootFragment + */ + +graphql` + fragment myRootFragment on MyType { + id + } +` diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated-no-description.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated-no-description.expected index 12a1e152cfff5..5798bf244889e 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated-no-description.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated-no-description.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -23,5 +25,5 @@ graphql` ` ==================================== OUTPUT =================================== extend type User { - favorite_page: Page @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-deprecated-no-description.js", fragment_name: "myRootFragment", import_name: "favorite_page") @deprecated + favorite_page: Page @relay_resolver(import_name: "favorite_page", import_path: "/path/to/test/fixture/relay-resolver-deprecated-no-description.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "eb46fd9d0828f318bc5613bca9e67e42") @deprecated } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated-no-description.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated-no-description.js index 6323335391023..03221053de3df 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated-no-description.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated-no-description.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated.expected index 82f503aacc921..43ce921a782c1 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -24,5 +26,5 @@ graphql` ` ==================================== OUTPUT =================================== extend type User { - favorite_page: Page @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-deprecated.js", fragment_name: "myRootFragment", import_name: "favorite_page") @deprecated(reason: "This one is not used any more") + favorite_page: Page @relay_resolver(import_name: "favorite_page", import_path: "/path/to/test/fixture/relay-resolver-deprecated.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "01183f3ee51f5cb85eec42335548ae0f") @deprecated(reason: "This one is not used any more") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated.js index 06d951851e16c..c25c198020c0c 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-deprecated.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-id.invalid.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-id.invalid.expected index d0714e313cda2..8c9d7b473c9a4 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-id.invalid.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-id.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-id.invalid.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-id.invalid.js index cbbdf08466eab..6a273167a3860 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-id.invalid.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-id.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-grandparent-interface.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-grandparent-interface.expected index c76eec706553e..0e8d038fb2d52 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-grandparent-interface.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-grandparent-interface.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-grandparent-interface.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-grandparent-interface.js index ff7f2a5c5d85c..0d314ff855825 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-grandparent-interface.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-grandparent-interface.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-parent-interface.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-parent-interface.expected index 041547f4351a4..8d0ac6ad758d3 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-parent-interface.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-parent-interface.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-parent-interface.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-parent-interface.js index 543fd50dc72b0..0906ab3c7b85e 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-parent-interface.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-parent-interface.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-named-export.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-named-export.expected index 8dd639991fd35..46618c17b3ea7 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-named-export.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-named-export.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -26,5 +28,5 @@ graphql` ` ==================================== OUTPUT =================================== extend type User { - favorite_page: RelayResolverValue @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-named-export.js", fragment_name: "myRootFragment", import_name: "favorite_page") + favorite_page: RelayResolverValue @relay_resolver(import_name: "favorite_page", import_path: "/path/to/test/fixture/relay-resolver-named-export.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "0a13488b7b321b8d7770f691a668ba44") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-named-export.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-named-export.js index 60a25f38e5207..34026ec50e5cd 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-named-export.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-named-export.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-implementing-a-field-defined-by-parent-interface.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-implementing-a-field-defined-by-parent-interface.expected index 5997de85870ca..09620f98909f2 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-implementing-a-field-defined-by-parent-interface.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-implementing-a-field-defined-by-parent-interface.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-implementing-a-field-defined-by-parent-interface.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-implementing-a-field-defined-by-parent-interface.js index 6c7aa1671f20a..aa4930bfcc5bb 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-implementing-a-field-defined-by-parent-interface.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-implementing-a-field-defined-by-parent-interface.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-with-type.invalid.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-with-type.invalid.expected index b76802016c3c4..ce3780f6ff1a5 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-with-type.invalid.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-with-type.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-with-type.invalid.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-with-type.invalid.js index 5b95735507768..37dbc6df0a2ee 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-with-type.invalid.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface-with-type.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface.expected index 2923b568da8d2..7796b1195bc36 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -27,30 +29,30 @@ graphql` ` ==================================== OUTPUT =================================== extend interface UserNameRenderable { - favorite_page: RelayResolverValue @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-on-interface.js", fragment_name: "myRootFragment", import_name: "favorite_page") + favorite_page: RelayResolverValue @relay_resolver(import_name: "favorite_page", import_path: "/path/to/test/fixture/relay-resolver-on-interface.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "a70ac21f287c26454cea41d6b9c1feee") } extend type PlainUserNameRenderer { - favorite_page: RelayResolverValue @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-on-interface.js", fragment_name: "myRootFragment", import_name: "favorite_page") + favorite_page: RelayResolverValue @relay_resolver(import_name: "favorite_page", import_path: "/path/to/test/fixture/relay-resolver-on-interface.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "a70ac21f287c26454cea41d6b9c1feee") } extend type MarkdownUserNameRenderer { - favorite_page: RelayResolverValue @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-on-interface.js", fragment_name: "myRootFragment", import_name: "favorite_page") + favorite_page: RelayResolverValue @relay_resolver(import_name: "favorite_page", import_path: "/path/to/test/fixture/relay-resolver-on-interface.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "a70ac21f287c26454cea41d6b9c1feee") } extend type ImplementsImplementsUserNameRenderableAndUserNameRenderable { - favorite_page: RelayResolverValue @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-on-interface.js", fragment_name: "myRootFragment", import_name: "favorite_page") + favorite_page: RelayResolverValue @relay_resolver(import_name: "favorite_page", import_path: "/path/to/test/fixture/relay-resolver-on-interface.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "a70ac21f287c26454cea41d6b9c1feee") } extend interface ImplementsUserNameRenderable { - favorite_page: RelayResolverValue @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-on-interface.js", fragment_name: "myRootFragment", import_name: "favorite_page") + favorite_page: RelayResolverValue @relay_resolver(import_name: "favorite_page", import_path: "/path/to/test/fixture/relay-resolver-on-interface.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "a70ac21f287c26454cea41d6b9c1feee") } extend type ImplementsImplementsUserNameRenderable { - favorite_page: RelayResolverValue @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-on-interface.js", fragment_name: "myRootFragment", import_name: "favorite_page") + favorite_page: RelayResolverValue @relay_resolver(import_name: "favorite_page", import_path: "/path/to/test/fixture/relay-resolver-on-interface.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "a70ac21f287c26454cea41d6b9c1feee") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface.js index d06c197d5135d..535e566a649dc 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-interface.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-interface.invalid.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-interface.invalid.expected index bd3f2b99672f5..666ad19bb50bd 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-interface.invalid.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-interface.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-interface.invalid.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-interface.invalid.js index 5b95735507768..37dbc6df0a2ee 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-interface.invalid.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-interface.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-type.invalid.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-type.invalid.expected index 1180bbdba5df0..74b5722668abf 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-type.invalid.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-type.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-type.invalid.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-type.invalid.js index 7349ca9b5b909..aeb297090a483 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-type.invalid.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-invalid-type.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-type-with-interface.invalid.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-type-with-interface.invalid.expected index edda06b165368..3ae8428960451 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-type-with-interface.invalid.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-type-with-interface.invalid.expected @@ -7,6 +7,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-type-with-interface.invalid.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-type-with-interface.invalid.js index 866bfa805edcb..5e01f9e335a12 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-type-with-interface.invalid.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-on-type-with-interface.invalid.js @@ -6,6 +6,7 @@ */ // expected-to-throw +// relay:allow_legacy_verbose_syntax /** * @RelayResolver diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-bad-id.invalid.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-bad-id.invalid.expected index 0ddb5820e78aa..bb5ced1b47d1e 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-bad-id.invalid.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-bad-id.invalid.expected @@ -23,16 +23,16 @@ interface IFoo { ==================================== ERROR ==================================== ✖︎ Expected interface `IFoo` to define an `id: ID!` field. It defines an id field, but its type is `ID`. - /path/to/test/fixture/relay-resolver-strong-object-with-implements-interface-bad-id.invalid.js:2:19 + :2:20 1 │ * 2 │ * @RelayResolver ClientUser implements IFoo - │ ^^^^^^^^^^^^^^^^^^^^^^^^^^ + │ ^^ 3 │ - ℹ︎ Defined here + ℹ︎ required because the interface is implemented by a Relay Resolver type here - :2:11 + /path/to/test/fixture/relay-resolver-strong-object-with-implements-interface-bad-id.invalid.js:2:19 1 │ * 2 │ * @RelayResolver ClientUser implements IFoo - │ ^^^^ + │ ^^^^^^^^^^ 3 │ diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-no-id.invalid.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-no-id.invalid.expected index 4a14d99ab6dc1..1770ad49a317f 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-no-id.invalid.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-no-id.invalid.expected @@ -23,16 +23,16 @@ interface IFoo { ==================================== ERROR ==================================== ✖︎ Expected interface `IFoo` to define an `id: ID!` field. - /path/to/test/fixture/relay-resolver-strong-object-with-implements-interface-no-id.invalid.js:2:19 + :2:11 1 │ * 2 │ * @RelayResolver ClientUser implements IFoo - │ ^^^^^^^^^^^^^^^^^^^^^^^^^^ + │ ^^^^ 3 │ - ℹ︎ Defined here + ℹ︎ required because the interface is implemented by a Relay Resolver type here - :2:11 + /path/to/test/fixture/relay-resolver-strong-object-with-implements-interface-no-id.invalid.js:2:19 1 │ * 2 │ * @RelayResolver ClientUser implements IFoo - │ ^^^^ + │ ^^^^^^^^^^ 3 │ diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-non-interface.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-non-interface.expected index 9b10e4f8c6de3..1a864a7a4a5d0 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-non-interface.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-non-interface.expected @@ -21,10 +21,15 @@ interface IFoo { } ` ==================================== ERROR ==================================== -✖︎ The type `User` is an object. Please use a client-defined interface instead. +✖︎ Expected an interface type for name 'User', got an object. - /path/to/test/fixture/relay-resolver-strong-object-with-implements-interface-non-interface.js:2:19 + /path/to/test/fixture/relay-resolver-strong-object-with-implements-interface-non-interface.js:2:41 1 │ * 2 │ * @RelayResolver ClientUser implements User - │ ^^^^^^^^^^^^^^^^^^^^^^^^^^ - 3 │ + │ ^^^^ + 3 │ + + ℹ︎ the other type is defined here + + :1:1 +Internal error: Unable to print source, start index (19502) out of range. diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-non-existing-type.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-non-existing-type.expected index 02450f8d9e2e4..abab03d5831c9 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-non-existing-type.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-non-existing-type.expected @@ -12,10 +12,10 @@ * @RelayResolver FriendGroup implements TweedUnit */ ==================================== ERROR ==================================== -✖︎ The type `TweedUnit` is not an existing GraphQL type. Did you mean `FeedUnit`? +✖︎ Reference to undefined type 'TweedUnit'. - /path/to/test/fixture/relay-resolver-strong-object-with-implements-non-existing-type.js:2:19 + /path/to/test/fixture/relay-resolver-strong-object-with-implements-non-existing-type.js:2:42 1 │ * 2 │ * @RelayResolver FriendGroup implements TweedUnit - │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + │ ^^^^^^^^^ 3 │ diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-server-interface.invalid.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-server-interface.invalid.expected index 66753964ea476..712518d5a3a18 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-server-interface.invalid.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements-server-interface.invalid.expected @@ -17,5 +17,5 @@ /path/to/test/fixture/relay-resolver-strong-object-with-implements-server-interface.invalid.js:2:19 1 │ * 2 │ * @RelayResolver ClientUser implements Node - │ ^^^^^^^^^^^^^^^^^^^^^^^^^^ + │ ^^^^^^^^^^ 3 │ diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements.expected index c66ef9e7178ee..5c2c1ede03bd3 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-implements.expected @@ -21,5 +21,5 @@ interface IFoo { ==================================== OUTPUT =================================== type ClientUser implements IFoo @__RelayResolverModel { id: ID! - __relay_model_instance: Int @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-strong-object-with-implements.js", fragment_name: "ClientUser__id", inject_fragment_data: "id", import_name: "ClientUser") @unselectable(reason: "This field is intended only for Relay's internal use") + __relay_model_instance: RelayResolverValue! @relay_resolver(fragment_name: "ClientUser__id", generated_fragment: true, inject_fragment_data: "id", import_name: "ClientUser", import_path: "/path/to/test/fixture/relay-resolver-strong-object-with-implements.js") @resolver_source_hash(value: "76be3b85f11135352a0d3a5726418956") @unselectable(reason: "This field is intended only for Relay's internal use") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-multiple-implements.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-multiple-implements.expected index d7a93d2fdd681..9c25186d33d87 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-multiple-implements.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object-with-multiple-implements.expected @@ -27,5 +27,5 @@ interface IBar { ==================================== OUTPUT =================================== type ClientUser implements IFoo & IBar @__RelayResolverModel { id: ID! - __relay_model_instance: Int @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-strong-object-with-multiple-implements.js", fragment_name: "ClientUser__id", inject_fragment_data: "id", import_name: "ClientUser") @unselectable(reason: "This field is intended only for Relay's internal use") + __relay_model_instance: RelayResolverValue! @relay_resolver(fragment_name: "ClientUser__id", generated_fragment: true, inject_fragment_data: "id", import_name: "ClientUser", import_path: "/path/to/test/fixture/relay-resolver-strong-object-with-multiple-implements.js") @resolver_source_hash(value: "1b7346b6155a43514be2946721ff59fb") @unselectable(reason: "This field is intended only for Relay's internal use") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object.expected index ef80c5ecfdbf7..4ef88ea2cb954 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-strong-object.expected @@ -12,5 +12,5 @@ ==================================== OUTPUT =================================== type ClientUser @__RelayResolverModel { id: ID! - __relay_model_instance: Int @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-strong-object.js", fragment_name: "ClientUser__id", inject_fragment_data: "id", import_name: "ClientUser") @unselectable(reason: "This field is intended only for Relay's internal use") + __relay_model_instance: RelayResolverValue! @relay_resolver(fragment_name: "ClientUser__id", generated_fragment: true, inject_fragment_data: "id", import_name: "ClientUser", import_path: "/path/to/test/fixture/relay-resolver-strong-object.js") @resolver_source_hash(value: "b1c8ae1937aed7425f5a87a4762ad83d") @unselectable(reason: "This field is intended only for Relay's internal use") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-args.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-args.expected index 802db4376ecbb..ddd35d0d27870 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-args.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-args.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -26,5 +28,5 @@ graphql` ` ==================================== OUTPUT =================================== extend type User { - favorite_page(first: Int = 10, userID: ID!): RelayResolverValue @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-with-args.js", fragment_name: "myRootFragment", import_name: "favorite_page") + favorite_page(first: Int = 10, userID: ID!): RelayResolverValue @relay_resolver(import_name: "favorite_page", import_path: "/path/to/test/fixture/relay-resolver-with-args.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "0a13488b7b321b8d7770f691a668ba44") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-args.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-args.js index bc914b394cc15..1a2ded6a8df1f 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-args.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-args.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-and-fragment-args.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-and-fragment-args.expected index 405d4c90dcd9e..12def0880663d 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-and-fragment-args.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-and-fragment-args.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -22,5 +24,5 @@ graphql` ` ==================================== OUTPUT =================================== extend type User { - greeting(first: Int = 10, userID: ID!, salutation: String!): RelayResolverValue @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-with-field-and-fragment-args.js", fragment_name: "myRootFragment", import_name: "greeting") + greeting(first: Int = 10, userID: ID!, salutation: String!): RelayResolverValue @relay_resolver(import_name: "greeting", import_path: "/path/to/test/fixture/relay-resolver-with-field-and-fragment-args.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "74022d4ca184b7d99e312af76420f20f") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-and-fragment-args.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-and-fragment-args.js index 71b0530919e17..8bbf6b791db3e 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-and-fragment-args.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-and-fragment-args.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-args.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-args.expected index aa90b021292b9..b13add14f888f 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-args.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-args.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -22,5 +24,5 @@ graphql` ` ==================================== OUTPUT =================================== extend type User { - greeting(salutation: String!): RelayResolverValue @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-with-field-args.js", fragment_name: "myRootFragment", import_name: "greeting") + greeting(salutation: String!): RelayResolverValue @relay_resolver(import_name: "greeting", import_path: "/path/to/test/fixture/relay-resolver-with-field-args.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "74022d4ca184b7d99e312af76420f20f") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-args.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-args.js index 4c65c814c0d79..ef416893d314d 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-args.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-field-args.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-output-type.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-output-type.expected deleted file mode 100644 index 096b353a9ddef..0000000000000 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-output-type.expected +++ /dev/null @@ -1,41 +0,0 @@ -==================================== INPUT ==================================== -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -// relay:enable_output_type - -/** - * @RelayResolver - * - * @onType User - * @fieldName favorite_page - * @outputType ClientPage - * @rootFragment myRootFragment - * - * The user's favorite page! They probably clicked something in the UI - * to tell us that it was their favorite page and then we put that in a - * database or something. Then we got that info out again and put it out - * again. Anyway, I'm rambling now. Its a page that the user likes. A lot. - */ - -graphql` - fragment myRootFragment on User { - name - } -` - -// %extensions% - -graphql` - type ClientPage { - foo: String - } -` -==================================== OUTPUT =================================== -extend type User { - favorite_page: ClientPage @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver-with-output-type.js", fragment_name: "myRootFragment", has_output_type: true, import_name: "favorite_page") -} diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-output-type.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-output-type.js deleted file mode 100644 index 37614f7b4dd87..0000000000000 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver-with-output-type.js +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -// relay:enable_output_type - -/** - * @RelayResolver - * - * @onType User - * @fieldName favorite_page - * @outputType ClientPage - * @rootFragment myRootFragment - * - * The user's favorite page! They probably clicked something in the UI - * to tell us that it was their favorite page and then we put that in a - * database or something. Then we got that info out again and put it out - * again. Anyway, I'm rambling now. Its a page that the user likes. A lot. - */ - -graphql` - fragment myRootFragment on User { - name - } -` - -// %extensions% - -graphql` - type ClientPage { - foo: String - } -` \ No newline at end of file diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver.expected index e29cb6a1cd4b2..f75d0ed52fad8 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver.expected @@ -6,6 +6,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * @@ -26,5 +28,5 @@ graphql` ` ==================================== OUTPUT =================================== extend type User { - favorite_page: RelayResolverValue @relay_resolver(import_path: "/path/to/test/fixture/relay-resolver.js", fragment_name: "myRootFragment", import_name: "favorite_page") + favorite_page: RelayResolverValue @relay_resolver(import_name: "favorite_page", import_path: "/path/to/test/fixture/relay-resolver.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "0a13488b7b321b8d7770f691a668ba44") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver.js index 60a25f38e5207..34026ec50e5cd 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver.js +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/relay-resolver.js @@ -5,6 +5,8 @@ * LICENSE file in the root directory of this source tree. */ +// relay:allow_legacy_verbose_syntax + /** * @RelayResolver * diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-interface.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-interface.expected index 5c867983c8d05..806ba570e3fe1 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-interface.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-interface.expected @@ -35,35 +35,40 @@ ` ==================================== OUTPUT =================================== extend interface Node { - importantField: String @relay_resolver(import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment", has_output_type: true, import_name: "importantField") + importantField: String @relay_resolver(has_output_type: true, import_name: "importantField", import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "27c88753c4af8233f1e6e0c276cb42e9") } extend type Comment { - importantField: String @relay_resolver(import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment", has_output_type: true, import_name: "importantField") + importantField: String @relay_resolver(has_output_type: true, import_name: "importantField", import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "27c88753c4af8233f1e6e0c276cb42e9") } extend type Feedback { - importantField: String @relay_resolver(import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment", has_output_type: true, import_name: "importantField") + importantField: String @relay_resolver(has_output_type: true, import_name: "importantField", import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "27c88753c4af8233f1e6e0c276cb42e9") } extend type Page { - importantField: String @relay_resolver(import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment", has_output_type: true, import_name: "importantField") + importantField: String @relay_resolver(has_output_type: true, import_name: "importantField", import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "27c88753c4af8233f1e6e0c276cb42e9") } extend type PhotoStory { - importantField: String @relay_resolver(import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment", has_output_type: true, import_name: "importantField") + importantField: String @relay_resolver(has_output_type: true, import_name: "importantField", import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "27c88753c4af8233f1e6e0c276cb42e9") } extend type Story { - importantField: String @relay_resolver(import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment", has_output_type: true, import_name: "importantField") + importantField: String @relay_resolver(has_output_type: true, import_name: "importantField", import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "27c88753c4af8233f1e6e0c276cb42e9") } extend type User { - importantField: String @relay_resolver(import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment", has_output_type: true, import_name: "importantField") + importantField: String @relay_resolver(has_output_type: true, import_name: "importantField", import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "27c88753c4af8233f1e6e0c276cb42e9") +} + + +extend type FetchableType { + importantField: String @relay_resolver(has_output_type: true, import_name: "importantField", import_path: "/path/to/test/fixture/terse-relay-resolver-interface.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "27c88753c4af8233f1e6e0c276cb42e9") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-semantic-non-null.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-semantic-non-null.expected new file mode 100644 index 0000000000000..529b2f226a0f5 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-semantic-non-null.expected @@ -0,0 +1,27 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** + * @RelayResolver User.favorite_page: Page @semanticNonNull + * @rootFragment myRootFragment + * + * The user's favorite page! They probably clicked something in the UI + * to tell us that it was their favorite page and then we put that in a + * database or something. Then we got that info out again and put it out + * again. Anyway, I'm rambling now. Its a page that the user likes. A lot. + */ + +graphql` + fragment myRootFragment on User { + id + } +` +==================================== OUTPUT =================================== +extend type User { + favorite_page: Page @relay_resolver(import_name: "favorite_page", import_path: "/path/to/test/fixture/terse-relay-resolver-semantic-non-null.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "ba2a3b6d7c4294fef33f921df3b20065") @semanticNonNull +} diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-semantic-non-null.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-semantic-non-null.js new file mode 100644 index 0000000000000..e597830d51ab2 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-semantic-non-null.js @@ -0,0 +1,22 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** + * @RelayResolver User.favorite_page: Page @semanticNonNull + * @rootFragment myRootFragment + * + * The user's favorite page! They probably clicked something in the UI + * to tell us that it was their favorite page and then we put that in a + * database or something. Then we got that info out again and put it out + * again. Anyway, I'm rambling now. Its a page that the user likes. A lot. + */ + +graphql` + fragment myRootFragment on User { + id + } +` diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-with-output-type.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-with-output-type.expected index c770be034c81c..09202a14a709a 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-with-output-type.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-with-output-type.expected @@ -31,5 +31,5 @@ graphql` ` ==================================== OUTPUT =================================== extend type User { - favorite_page: ClientPage @relay_resolver(import_path: "/path/to/test/fixture/terse-relay-resolver-with-output-type.js", fragment_name: "myRootFragment", has_output_type: true, import_name: "favorite_page") + favorite_page: ClientPage @relay_resolver(has_output_type: true, import_name: "favorite_page", import_path: "/path/to/test/fixture/terse-relay-resolver-with-output-type.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "6debf0d3b679b66e8d0c58dbdb4d422d") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-with-root-fragment-on-model.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-with-root-fragment-on-model.expected new file mode 100644 index 0000000000000..ac003540d6f49 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-with-root-fragment-on-model.expected @@ -0,0 +1,32 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** + * @RelayResolver MyType + */ + +/** + * @RelayResolver MyType.my_field: String + * @rootFragment myRootFragment + */ + +graphql` + fragment myRootFragment on MyType { + id + } +` +==================================== OUTPUT =================================== +type MyType @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(fragment_name: "MyType__id", generated_fragment: true, inject_fragment_data: "id", import_name: "MyType", import_path: "/path/to/test/fixture/terse-relay-resolver-with-root-fragment-on-model.js") @resolver_source_hash(value: "b81f253a757aaba36955be6d8e224c2a") @unselectable(reason: "This field is intended only for Relay's internal use") +} + + +extend type MyType { + my_field: String @relay_resolver(has_output_type: true, import_name: "my_field", import_path: "/path/to/test/fixture/terse-relay-resolver-with-root-fragment-on-model.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "c8ed0d9035e933a3b6544323724f5323") +} diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-with-root-fragment-on-model.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-with-root-fragment-on-model.js new file mode 100644 index 0000000000000..5f810c1ba9477 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver-with-root-fragment-on-model.js @@ -0,0 +1,21 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** + * @RelayResolver MyType + */ + +/** + * @RelayResolver MyType.my_field: String + * @rootFragment myRootFragment + */ + +graphql` + fragment myRootFragment on MyType { + id + } +` diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver.expected index 44453d04c6b4c..b7bbd5a4dc4e6 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-relay-resolver.expected @@ -23,5 +23,5 @@ graphql` ` ==================================== OUTPUT =================================== extend type User { - favorite_page: Page @relay_resolver(import_path: "/path/to/test/fixture/terse-relay-resolver.js", fragment_name: "myRootFragment", import_name: "favorite_page") + favorite_page: Page @relay_resolver(import_name: "favorite_page", import_path: "/path/to/test/fixture/terse-relay-resolver.js", fragment_name: "myRootFragment") @resolver_source_hash(value: "0a9950ad1f952f5777b27604738fcf91") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-resolver-duplicated.invalid.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-resolver-duplicated.invalid.expected new file mode 100644 index 0000000000000..457b27be26186 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-resolver-duplicated.invalid.expected @@ -0,0 +1,55 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// expected-to-throw + +/** + * @RelayResolver User.favorite_page: Page + * @rootFragment myRootFragment + * + * The user's favorite page! They probably clicked something in the UI + * to tell us that it was their favorite page and then we put that in a + * database or something. Then we got that info out again and put it out + * again. Anyway, I'm rambling now. Its a page that the user likes. A lot. + */ +graphql` +fragment myRootFragment on User { + name +} +` + +/** +* @RelayResolver User.favorite_page: Page +* @rootFragment myRootFragment2 +* +* The user's favorite page! They probably clicked something in the UI +* to tell us that it was their favorite page and then we put that in a +* database or something. Then we got that info out again and put it out +* again. Anyway, I'm rambling now. Its a page that the user likes. A lot. +*/ +graphql` +fragment myRootFragment2 on User { + name +} +` +==================================== ERROR ==================================== +✖︎ Duplicate field definition 'favorite_page' found. + + /path/to/test/fixture/terse-resolver-duplicated.invalid.js:2:23 + 1 │ * + 2 │ * @RelayResolver User.favorite_page: Page + │ ^^^^^^^^^^^^^ + 3 │ * @rootFragment myRootFragment2 + + ℹ︎ previously defined here + + /path/to/test/fixture/terse-resolver-duplicated.invalid.js:2:24 + 1 │ * + 2 │ * @RelayResolver User.favorite_page: Page + │ ^^^^^^^^^^^^^ + 3 │ * @rootFragment myRootFragment2 diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-resolver-duplicated.invalid.js b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-resolver-duplicated.invalid.js new file mode 100644 index 0000000000000..a638752e7b309 --- /dev/null +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/terse-resolver-duplicated.invalid.js @@ -0,0 +1,38 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// expected-to-throw + +/** + * @RelayResolver User.favorite_page: Page + * @rootFragment myRootFragment + * + * The user's favorite page! They probably clicked something in the UI + * to tell us that it was their favorite page and then we put that in a + * database or something. Then we got that info out again and put it out + * again. Anyway, I'm rambling now. Its a page that the user likes. A lot. + */ +graphql` +fragment myRootFragment on User { + name +} +` + +/** +* @RelayResolver User.favorite_page: Page +* @rootFragment myRootFragment2 +* +* The user's favorite page! They probably clicked something in the UI +* to tell us that it was their favorite page and then we put that in a +* database or something. Then we got that info out again and put it out +* again. Anyway, I'm rambling now. Its a page that the user likes. A lot. +*/ +graphql` +fragment myRootFragment2 on User { + name +} +` diff --git a/compiler/crates/relay-docblock/tests/to_schema/fixtures/weak-type.expected b/compiler/crates/relay-docblock/tests/to_schema/fixtures/weak-type.expected index 4ba9eeb522032..3dde58df6c626 100644 --- a/compiler/crates/relay-docblock/tests/to_schema/fixtures/weak-type.expected +++ b/compiler/crates/relay-docblock/tests/to_schema/fixtures/weak-type.expected @@ -23,5 +23,5 @@ scalar MyClientTypeModel @__RelayCustomScalar(path: "/path/to/test/fixture/weak- type MyClientType @__RelayResolverModel @RelayOutputType @__RelayWeakObject @deprecated(reason: "Don't use this any more") { - __relay_model_instance: MyClientTypeModel @unselectable(reason: "This field is intended only for Relay's internal use") + __relay_model_instance: MyClientTypeModel! @resolver_source_hash(value: "fdcf9d39b12f481b2f5a83f2aec8d922") @unselectable(reason: "This field is intended only for Relay's internal use") } diff --git a/compiler/crates/relay-docblock/tests/to_schema/mod.rs b/compiler/crates/relay-docblock/tests/to_schema/mod.rs deleted file mode 100644 index a88d4a33c8574..0000000000000 --- a/compiler/crates/relay-docblock/tests/to_schema/mod.rs +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::DiagnosticsResult; -use common::FeatureFlag; -use common::SourceLocationKey; -use docblock_syntax::parse_docblock; -use docblock_syntax::DocblockSource; -use extract_graphql::JavaScriptSourceFeature; -use fixture_tests::Fixture; -use graphql_syntax::parse_executable; -use graphql_syntax::ExecutableDefinition; -use graphql_test_helpers::diagnostics_to_sorted_string; -use intern::string_key::Intern; -use relay_docblock::extend_schema_with_resolver_type_system_definition; -use relay_docblock::parse_docblock_ast; -use relay_docblock::ParseOptions; -use relay_test_schema::get_test_schema_with_extensions; -use schema::SDLSchema; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - let (base, mut schema) = match parts.as_slice() { - [base, extensions] => (base, extract_schema_from_js(extensions)), - [base] => (base, get_test_schema_with_extensions("")), - _ => panic!("Invalid fixture input {}", fixture.content), - }; - - let js_features = extract_graphql::extract(base); - - let executable_documents = js_features - .iter() - .enumerate() - .filter_map(|(_, source)| match source { - JavaScriptSourceFeature::GraphQL(source) => Some( - parse_executable(&source.text_source().text, SourceLocationKey::Generated) - .map_err(|diagnostics| { - diagnostics_to_sorted_string(&source.text_source().text, &diagnostics) - }) - .map(|document| document.definitions), - ), - JavaScriptSourceFeature::Docblock(_) => None, - }) - .collect::, String>>()? - .iter() - .flatten() - .cloned() - .collect::>(); - - let mut stringify = |i: usize, source: &DocblockSource| -> DiagnosticsResult { - let ast = parse_docblock( - &source.text_source().text, - SourceLocationKey::Embedded { - path: format!("/path/to/test/fixture/{}", fixture.file_name).intern(), - index: i as u16, - }, - )?; - let ir = parse_docblock_ast( - &ast, - Some(&executable_documents), - ParseOptions { - enable_output_type: if fixture.content.contains("// relay:enable_output_type") { - &FeatureFlag::Enabled - } else { - &FeatureFlag::Disabled - }, - }, - )? - .unwrap(); - - // In non-tests, this function (correctly) consumes TypeSystemDefinition when modifying the - // schema. - // In tests, we need to clone, because we **also** want to print the schema changes. - let schema_document = ir - .clone() - .to_graphql_schema_ast(&schema, &Default::default())?; - for definition in &schema_document.definitions { - extend_schema_with_resolver_type_system_definition( - definition.clone(), - Arc::get_mut(&mut schema) - .expect("Expected to be able to get mutable reference to schema"), - schema_document.location, - )?; - } - - ir.to_sdl_string(&schema, &Default::default()) - }; - - let schema_strings = js_features - .iter() - .enumerate() - .filter_map(|(i, source)| match source { - JavaScriptSourceFeature::GraphQL(_) => None, - JavaScriptSourceFeature::Docblock(docblock_source) => Some((i, docblock_source)), - }) - .map(|(i, source)| { - stringify(i, source).map_err(|diagnostics| { - diagnostics_to_sorted_string(&source.text_source().text, &diagnostics) - }) - }) - .collect::, String>>()?; - - Ok(schema_strings.join("\n\n")) -} - -fn extract_schema_from_js(js: &str) -> Arc { - let js_features = extract_graphql::extract(js); - let sdl_text = match js_features.as_slice() { - [JavaScriptSourceFeature::GraphQL(source)] => &source.text_source().text, - _ => { - panic!("Expected %extensions% to contain exactly 1 graphql`` tagged template literal.") - } - }; - - get_test_schema_with_extensions(sdl_text) -} diff --git a/compiler/crates/relay-docblock/tests/to_schema_test.rs b/compiler/crates/relay-docblock/tests/to_schema_test.rs index c25aab8b520ee..2e16619e17378 100644 --- a/compiler/crates/relay-docblock/tests/to_schema_test.rs +++ b/compiler/crates/relay-docblock/tests/to_schema_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<8cf2bea87a87f4c48d8c0b53b4e0c616>> + * @generated SignedSource<<19bffd76c064c76bef1f2108353724b1>> */ mod to_schema; @@ -12,240 +12,261 @@ mod to_schema; use to_schema::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn client_edge_relay_resolver() { +#[tokio::test] +async fn client_edge_relay_resolver() { let input = include_str!("to_schema/fixtures/client-edge-relay-resolver.js"); let expected = include_str!("to_schema/fixtures/client-edge-relay-resolver.expected"); - test_fixture(transform_fixture, "client-edge-relay-resolver.js", "to_schema/fixtures/client-edge-relay-resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-edge-relay-resolver.js", "to_schema/fixtures/client-edge-relay-resolver.expected", input, expected).await; } -#[test] -fn client_edge_to_non_null_plural_server_object_relay_resolver_invalid() { +#[tokio::test] +async fn client_edge_to_non_null_plural_server_object_relay_resolver_invalid() { let input = include_str!("to_schema/fixtures/client-edge-to-non-null-plural-server-object-relay-resolver.invalid.js"); let expected = include_str!("to_schema/fixtures/client-edge-to-non-null-plural-server-object-relay-resolver.invalid.expected"); - test_fixture(transform_fixture, "client-edge-to-non-null-plural-server-object-relay-resolver.invalid.js", "to_schema/fixtures/client-edge-to-non-null-plural-server-object-relay-resolver.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-edge-to-non-null-plural-server-object-relay-resolver.invalid.js", "to_schema/fixtures/client-edge-to-non-null-plural-server-object-relay-resolver.invalid.expected", input, expected).await; } -#[test] -fn client_edge_to_plural_server_object_relay_resolver_invalid() { +#[tokio::test] +async fn client_edge_to_plural_server_object_relay_resolver_invalid() { let input = include_str!("to_schema/fixtures/client-edge-to-plural-server-object-relay-resolver.invalid.js"); let expected = include_str!("to_schema/fixtures/client-edge-to-plural-server-object-relay-resolver.invalid.expected"); - test_fixture(transform_fixture, "client-edge-to-plural-server-object-relay-resolver.invalid.js", "to_schema/fixtures/client-edge-to-plural-server-object-relay-resolver.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-edge-to-plural-server-object-relay-resolver.invalid.js", "to_schema/fixtures/client-edge-to-plural-server-object-relay-resolver.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver() { +#[tokio::test] +async fn legacy_relay_resolver_with_root_fragment_on_model() { + let input = include_str!("to_schema/fixtures/legacy-relay-resolver-with-root-fragment-on-model.js"); + let expected = include_str!("to_schema/fixtures/legacy-relay-resolver-with-root-fragment-on-model.expected"); + test_fixture(transform_fixture, file!(), "legacy-relay-resolver-with-root-fragment-on-model.js", "to_schema/fixtures/legacy-relay-resolver-with-root-fragment-on-model.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver() { let input = include_str!("to_schema/fixtures/relay-resolver.js"); let expected = include_str!("to_schema/fixtures/relay-resolver.expected"); - test_fixture(transform_fixture, "relay-resolver.js", "to_schema/fixtures/relay-resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver.js", "to_schema/fixtures/relay-resolver.expected", input, expected).await; } -#[test] -fn relay_resolver_deprecated() { +#[tokio::test] +async fn relay_resolver_deprecated() { let input = include_str!("to_schema/fixtures/relay-resolver-deprecated.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-deprecated.expected"); - test_fixture(transform_fixture, "relay-resolver-deprecated.js", "to_schema/fixtures/relay-resolver-deprecated.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-deprecated.js", "to_schema/fixtures/relay-resolver-deprecated.expected", input, expected).await; } -#[test] -fn relay_resolver_deprecated_no_description() { +#[tokio::test] +async fn relay_resolver_deprecated_no_description() { let input = include_str!("to_schema/fixtures/relay-resolver-deprecated-no-description.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-deprecated-no-description.expected"); - test_fixture(transform_fixture, "relay-resolver-deprecated-no-description.js", "to_schema/fixtures/relay-resolver-deprecated-no-description.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-deprecated-no-description.js", "to_schema/fixtures/relay-resolver-deprecated-no-description.expected", input, expected).await; } -#[test] -fn relay_resolver_id_invalid() { +#[tokio::test] +async fn relay_resolver_id_invalid() { let input = include_str!("to_schema/fixtures/relay-resolver-id.invalid.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-id.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-id.invalid.js", "to_schema/fixtures/relay-resolver-id.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-id.invalid.js", "to_schema/fixtures/relay-resolver-id.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_implementing_a_field_defined_by_grandparent_interface() { +#[tokio::test] +async fn relay_resolver_implementing_a_field_defined_by_grandparent_interface() { let input = include_str!("to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-grandparent-interface.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-grandparent-interface.expected"); - test_fixture(transform_fixture, "relay-resolver-implementing-a-field-defined-by-grandparent-interface.js", "to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-grandparent-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-implementing-a-field-defined-by-grandparent-interface.js", "to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-grandparent-interface.expected", input, expected).await; } -#[test] -fn relay_resolver_implementing_a_field_defined_by_parent_interface() { +#[tokio::test] +async fn relay_resolver_implementing_a_field_defined_by_parent_interface() { let input = include_str!("to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-parent-interface.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-parent-interface.expected"); - test_fixture(transform_fixture, "relay-resolver-implementing-a-field-defined-by-parent-interface.js", "to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-parent-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-implementing-a-field-defined-by-parent-interface.js", "to_schema/fixtures/relay-resolver-implementing-a-field-defined-by-parent-interface.expected", input, expected).await; } -#[test] -fn relay_resolver_named_export() { +#[tokio::test] +async fn relay_resolver_named_export() { let input = include_str!("to_schema/fixtures/relay-resolver-named-export.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-named-export.expected"); - test_fixture(transform_fixture, "relay-resolver-named-export.js", "to_schema/fixtures/relay-resolver-named-export.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-named-export.js", "to_schema/fixtures/relay-resolver-named-export.expected", input, expected).await; } -#[test] -fn relay_resolver_on_interface() { +#[tokio::test] +async fn relay_resolver_on_interface() { let input = include_str!("to_schema/fixtures/relay-resolver-on-interface.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-on-interface.expected"); - test_fixture(transform_fixture, "relay-resolver-on-interface.js", "to_schema/fixtures/relay-resolver-on-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-on-interface.js", "to_schema/fixtures/relay-resolver-on-interface.expected", input, expected).await; } -#[test] -fn relay_resolver_on_interface_implementing_a_field_defined_by_parent_interface() { +#[tokio::test] +async fn relay_resolver_on_interface_implementing_a_field_defined_by_parent_interface() { let input = include_str!("to_schema/fixtures/relay-resolver-on-interface-implementing-a-field-defined-by-parent-interface.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-on-interface-implementing-a-field-defined-by-parent-interface.expected"); - test_fixture(transform_fixture, "relay-resolver-on-interface-implementing-a-field-defined-by-parent-interface.js", "to_schema/fixtures/relay-resolver-on-interface-implementing-a-field-defined-by-parent-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-on-interface-implementing-a-field-defined-by-parent-interface.js", "to_schema/fixtures/relay-resolver-on-interface-implementing-a-field-defined-by-parent-interface.expected", input, expected).await; } -#[test] -fn relay_resolver_on_interface_with_type_invalid() { +#[tokio::test] +async fn relay_resolver_on_interface_with_type_invalid() { let input = include_str!("to_schema/fixtures/relay-resolver-on-interface-with-type.invalid.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-on-interface-with-type.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-on-interface-with-type.invalid.js", "to_schema/fixtures/relay-resolver-on-interface-with-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-on-interface-with-type.invalid.js", "to_schema/fixtures/relay-resolver-on-interface-with-type.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_on_invalid_interface_invalid() { +#[tokio::test] +async fn relay_resolver_on_invalid_interface_invalid() { let input = include_str!("to_schema/fixtures/relay-resolver-on-invalid-interface.invalid.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-on-invalid-interface.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-on-invalid-interface.invalid.js", "to_schema/fixtures/relay-resolver-on-invalid-interface.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-on-invalid-interface.invalid.js", "to_schema/fixtures/relay-resolver-on-invalid-interface.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_on_invalid_type_invalid() { +#[tokio::test] +async fn relay_resolver_on_invalid_type_invalid() { let input = include_str!("to_schema/fixtures/relay-resolver-on-invalid-type.invalid.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-on-invalid-type.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-on-invalid-type.invalid.js", "to_schema/fixtures/relay-resolver-on-invalid-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-on-invalid-type.invalid.js", "to_schema/fixtures/relay-resolver-on-invalid-type.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_on_type_with_interface_invalid() { +#[tokio::test] +async fn relay_resolver_on_type_with_interface_invalid() { let input = include_str!("to_schema/fixtures/relay-resolver-on-type-with-interface.invalid.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-on-type-with-interface.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-on-type-with-interface.invalid.js", "to_schema/fixtures/relay-resolver-on-type-with-interface.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-on-type-with-interface.invalid.js", "to_schema/fixtures/relay-resolver-on-type-with-interface.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_strong_object() { +#[tokio::test] +async fn relay_resolver_strong_object() { let input = include_str!("to_schema/fixtures/relay-resolver-strong-object.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-strong-object.expected"); - test_fixture(transform_fixture, "relay-resolver-strong-object.js", "to_schema/fixtures/relay-resolver-strong-object.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-strong-object.js", "to_schema/fixtures/relay-resolver-strong-object.expected", input, expected).await; } -#[test] -fn relay_resolver_strong_object_with_implements() { +#[tokio::test] +async fn relay_resolver_strong_object_with_implements() { let input = include_str!("to_schema/fixtures/relay-resolver-strong-object-with-implements.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-strong-object-with-implements.expected"); - test_fixture(transform_fixture, "relay-resolver-strong-object-with-implements.js", "to_schema/fixtures/relay-resolver-strong-object-with-implements.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-strong-object-with-implements.js", "to_schema/fixtures/relay-resolver-strong-object-with-implements.expected", input, expected).await; } -#[test] -fn relay_resolver_strong_object_with_implements_interface_bad_id_invalid() { +#[tokio::test] +async fn relay_resolver_strong_object_with_implements_interface_bad_id_invalid() { let input = include_str!("to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-bad-id.invalid.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-bad-id.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-strong-object-with-implements-interface-bad-id.invalid.js", "to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-bad-id.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-strong-object-with-implements-interface-bad-id.invalid.js", "to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-bad-id.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_strong_object_with_implements_interface_no_id_invalid() { +#[tokio::test] +async fn relay_resolver_strong_object_with_implements_interface_no_id_invalid() { let input = include_str!("to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-no-id.invalid.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-no-id.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-strong-object-with-implements-interface-no-id.invalid.js", "to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-no-id.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-strong-object-with-implements-interface-no-id.invalid.js", "to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-no-id.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_strong_object_with_implements_interface_non_interface() { +#[tokio::test] +async fn relay_resolver_strong_object_with_implements_interface_non_interface() { let input = include_str!("to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-non-interface.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-non-interface.expected"); - test_fixture(transform_fixture, "relay-resolver-strong-object-with-implements-interface-non-interface.js", "to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-non-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-strong-object-with-implements-interface-non-interface.js", "to_schema/fixtures/relay-resolver-strong-object-with-implements-interface-non-interface.expected", input, expected).await; } -#[test] -fn relay_resolver_strong_object_with_implements_non_existing_type() { +#[tokio::test] +async fn relay_resolver_strong_object_with_implements_non_existing_type() { let input = include_str!("to_schema/fixtures/relay-resolver-strong-object-with-implements-non-existing-type.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-strong-object-with-implements-non-existing-type.expected"); - test_fixture(transform_fixture, "relay-resolver-strong-object-with-implements-non-existing-type.js", "to_schema/fixtures/relay-resolver-strong-object-with-implements-non-existing-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-strong-object-with-implements-non-existing-type.js", "to_schema/fixtures/relay-resolver-strong-object-with-implements-non-existing-type.expected", input, expected).await; } -#[test] -fn relay_resolver_strong_object_with_implements_server_interface_invalid() { +#[tokio::test] +async fn relay_resolver_strong_object_with_implements_server_interface_invalid() { let input = include_str!("to_schema/fixtures/relay-resolver-strong-object-with-implements-server-interface.invalid.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-strong-object-with-implements-server-interface.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-strong-object-with-implements-server-interface.invalid.js", "to_schema/fixtures/relay-resolver-strong-object-with-implements-server-interface.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-strong-object-with-implements-server-interface.invalid.js", "to_schema/fixtures/relay-resolver-strong-object-with-implements-server-interface.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_strong_object_with_multiple_implements() { +#[tokio::test] +async fn relay_resolver_strong_object_with_multiple_implements() { let input = include_str!("to_schema/fixtures/relay-resolver-strong-object-with-multiple-implements.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-strong-object-with-multiple-implements.expected"); - test_fixture(transform_fixture, "relay-resolver-strong-object-with-multiple-implements.js", "to_schema/fixtures/relay-resolver-strong-object-with-multiple-implements.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-strong-object-with-multiple-implements.js", "to_schema/fixtures/relay-resolver-strong-object-with-multiple-implements.expected", input, expected).await; } -#[test] -fn relay_resolver_with_args() { +#[tokio::test] +async fn relay_resolver_with_args() { let input = include_str!("to_schema/fixtures/relay-resolver-with-args.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-with-args.expected"); - test_fixture(transform_fixture, "relay-resolver-with-args.js", "to_schema/fixtures/relay-resolver-with-args.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-args.js", "to_schema/fixtures/relay-resolver-with-args.expected", input, expected).await; } -#[test] -fn relay_resolver_with_field_and_fragment_args() { +#[tokio::test] +async fn relay_resolver_with_field_and_fragment_args() { let input = include_str!("to_schema/fixtures/relay-resolver-with-field-and-fragment-args.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-with-field-and-fragment-args.expected"); - test_fixture(transform_fixture, "relay-resolver-with-field-and-fragment-args.js", "to_schema/fixtures/relay-resolver-with-field-and-fragment-args.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-field-and-fragment-args.js", "to_schema/fixtures/relay-resolver-with-field-and-fragment-args.expected", input, expected).await; } -#[test] -fn relay_resolver_with_field_args() { +#[tokio::test] +async fn relay_resolver_with_field_args() { let input = include_str!("to_schema/fixtures/relay-resolver-with-field-args.js"); let expected = include_str!("to_schema/fixtures/relay-resolver-with-field-args.expected"); - test_fixture(transform_fixture, "relay-resolver-with-field-args.js", "to_schema/fixtures/relay-resolver-with-field-args.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-field-args.js", "to_schema/fixtures/relay-resolver-with-field-args.expected", input, expected).await; } -#[test] -fn relay_resolver_with_output_type() { - let input = include_str!("to_schema/fixtures/relay-resolver-with-output-type.js"); - let expected = include_str!("to_schema/fixtures/relay-resolver-with-output-type.expected"); - test_fixture(transform_fixture, "relay-resolver-with-output-type.js", "to_schema/fixtures/relay-resolver-with-output-type.expected", input, expected); -} - -#[test] -fn terse_relay_resolver() { +#[tokio::test] +async fn terse_relay_resolver() { let input = include_str!("to_schema/fixtures/terse-relay-resolver.js"); let expected = include_str!("to_schema/fixtures/terse-relay-resolver.expected"); - test_fixture(transform_fixture, "terse-relay-resolver.js", "to_schema/fixtures/terse-relay-resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "terse-relay-resolver.js", "to_schema/fixtures/terse-relay-resolver.expected", input, expected).await; } -#[test] -fn terse_relay_resolver_id_invalid() { +#[tokio::test] +async fn terse_relay_resolver_id_invalid() { let input = include_str!("to_schema/fixtures/terse-relay-resolver-id.invalid.js"); let expected = include_str!("to_schema/fixtures/terse-relay-resolver-id.invalid.expected"); - test_fixture(transform_fixture, "terse-relay-resolver-id.invalid.js", "to_schema/fixtures/terse-relay-resolver-id.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-id.invalid.js", "to_schema/fixtures/terse-relay-resolver-id.invalid.expected", input, expected).await; } -#[test] -fn terse_relay_resolver_interface() { +#[tokio::test] +async fn terse_relay_resolver_interface() { let input = include_str!("to_schema/fixtures/terse-relay-resolver-interface.js"); let expected = include_str!("to_schema/fixtures/terse-relay-resolver-interface.expected"); - test_fixture(transform_fixture, "terse-relay-resolver-interface.js", "to_schema/fixtures/terse-relay-resolver-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-interface.js", "to_schema/fixtures/terse-relay-resolver-interface.expected", input, expected).await; } -#[test] -fn terse_relay_resolver_non_existent_type_invalid() { +#[tokio::test] +async fn terse_relay_resolver_non_existent_type_invalid() { let input = include_str!("to_schema/fixtures/terse-relay-resolver-non-existent-type.invalid.js"); let expected = include_str!("to_schema/fixtures/terse-relay-resolver-non-existent-type.invalid.expected"); - test_fixture(transform_fixture, "terse-relay-resolver-non-existent-type.invalid.js", "to_schema/fixtures/terse-relay-resolver-non-existent-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-non-existent-type.invalid.js", "to_schema/fixtures/terse-relay-resolver-non-existent-type.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn terse_relay_resolver_semantic_non_null() { + let input = include_str!("to_schema/fixtures/terse-relay-resolver-semantic-non-null.js"); + let expected = include_str!("to_schema/fixtures/terse-relay-resolver-semantic-non-null.expected"); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-semantic-non-null.js", "to_schema/fixtures/terse-relay-resolver-semantic-non-null.expected", input, expected).await; } -#[test] -fn terse_relay_resolver_with_output_type() { +#[tokio::test] +async fn terse_relay_resolver_with_output_type() { let input = include_str!("to_schema/fixtures/terse-relay-resolver-with-output-type.js"); let expected = include_str!("to_schema/fixtures/terse-relay-resolver-with-output-type.expected"); - test_fixture(transform_fixture, "terse-relay-resolver-with-output-type.js", "to_schema/fixtures/terse-relay-resolver-with-output-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-with-output-type.js", "to_schema/fixtures/terse-relay-resolver-with-output-type.expected", input, expected).await; +} + +#[tokio::test] +async fn terse_relay_resolver_with_root_fragment_on_model() { + let input = include_str!("to_schema/fixtures/terse-relay-resolver-with-root-fragment-on-model.js"); + let expected = include_str!("to_schema/fixtures/terse-relay-resolver-with-root-fragment-on-model.expected"); + test_fixture(transform_fixture, file!(), "terse-relay-resolver-with-root-fragment-on-model.js", "to_schema/fixtures/terse-relay-resolver-with-root-fragment-on-model.expected", input, expected).await; +} + +#[tokio::test] +async fn terse_resolver_duplicated_invalid() { + let input = include_str!("to_schema/fixtures/terse-resolver-duplicated.invalid.js"); + let expected = include_str!("to_schema/fixtures/terse-resolver-duplicated.invalid.expected"); + test_fixture(transform_fixture, file!(), "terse-resolver-duplicated.invalid.js", "to_schema/fixtures/terse-resolver-duplicated.invalid.expected", input, expected).await; } -#[test] -fn weak_type() { +#[tokio::test] +async fn weak_type() { let input = include_str!("to_schema/fixtures/weak-type.js"); let expected = include_str!("to_schema/fixtures/weak-type.expected"); - test_fixture(transform_fixture, "weak-type.js", "to_schema/fixtures/weak-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "weak-type.js", "to_schema/fixtures/weak-type.expected", input, expected).await; } diff --git a/compiler/crates/relay-lsp/Cargo.toml b/compiler/crates/relay-lsp/Cargo.toml index 0f8feae980ed4..26a8e7623f455 100644 --- a/compiler/crates/relay-lsp/Cargo.toml +++ b/compiler/crates/relay-lsp/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/relay-lsp:[find_field_usages_test,hover,relay-lsp] + [package] name = "relay-lsp" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -13,7 +15,7 @@ path = "tests/hover_test.rs" [dependencies] common = { path = "../common" } crossbeam = "0.8" -dashmap = { version = "5.4", features = ["raw-api", "rayon", "serde"] } +dashmap = { version = "5.5.3", features = ["rayon", "serde"] } docblock-shared = { path = "../docblock-shared" } docblock-syntax = { path = "../docblock-syntax" } dunce = "1.0.2" @@ -24,23 +26,24 @@ graphql-syntax = { path = "../graphql-syntax" } graphql-text-printer = { path = "../graphql-text-printer" } graphql-watchman = { path = "../graphql-watchman" } intern = { path = "../intern" } -itertools = "0.10.3" +itertools = "0.11.0" log = { version = "0.4.17", features = ["kv_unstable", "kv_unstable_std"] } -lsp-server = "0.5.2" -lsp-types = "0.93.2" +lsp-server = "0.7.2" +lsp-types = "0.94.1" percent-encoding = "2.1" rand = { version = "0.8", features = ["small_rng"] } -rayon = "1.2" +rayon = "1.9.0" relay-compiler = { path = "../relay-compiler" } relay-docblock = { path = "../relay-docblock" } relay-transforms = { path = "../relay-transforms" } resolution-path = { path = "../resolution-path" } schema = { path = "../schema" } +schema-diff = { path = "../schema-diff" } schema-documentation = { path = "../schema-documentation" } schema-print = { path = "../schema-print" } -serde = { version = "1.0.136", features = ["derive", "rc"] } -serde_json = { version = "1.0.79", features = ["float_roundtrip", "unbounded_depth"] } -tokio = { version = "1.25.0", features = ["full", "test-util", "tracing"] } +serde = { version = "1.0.185", features = ["derive", "rc"] } +serde_json = { version = "1.0.100", features = ["float_roundtrip", "unbounded_depth"] } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } [dev-dependencies] fixture-tests = { path = "../fixture-tests" } diff --git a/compiler/crates/relay-lsp/src/code_action.rs b/compiler/crates/relay-lsp/src/code_action.rs new file mode 100644 index 0000000000000..d6d41a708249d --- /dev/null +++ b/compiler/crates/relay-lsp/src/code_action.rs @@ -0,0 +1,352 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +mod create_name_suggestion; + +use std::collections::HashMap; +use std::collections::HashSet; + +use common::Span; +use create_name_suggestion::create_default_name; +use create_name_suggestion::create_default_name_with_index; +use create_name_suggestion::create_impactful_name; +use create_name_suggestion::create_name_wrapper; +use create_name_suggestion::DefinitionNameSuffix; +use graphql_syntax::ExecutableDefinition; +use intern::Lookup; +use lsp_types::request::CodeActionRequest; +use lsp_types::request::Request; +use lsp_types::CodeAction; +use lsp_types::CodeActionOrCommand; +use lsp_types::Diagnostic; +use lsp_types::Position; +use lsp_types::Range; +use lsp_types::TextDocumentPositionParams; +use lsp_types::TextEdit; +use lsp_types::Url; +use lsp_types::WorkspaceEdit; +use resolution_path::FragmentDefinitionPath; +use resolution_path::IdentParent; +use resolution_path::IdentPath; +use resolution_path::OperationDefinitionPath; +use resolution_path::ResolutionPath; +use resolution_path::ResolvePosition; +use serde_json::Value; + +use self::create_name_suggestion::create_default_fragment_name; +use self::create_name_suggestion::create_default_fragment_name_with_index; +use crate::lsp_runtime_error::LSPRuntimeError; +use crate::lsp_runtime_error::LSPRuntimeResult; +use crate::server::GlobalState; +use crate::utils::is_file_uri_in_dir; + +pub(crate) fn on_code_action( + state: &impl GlobalState, + params: ::Params, +) -> LSPRuntimeResult<::Result> { + let uri = params.text_document.uri.clone(); + + if !is_file_uri_in_dir(state.root_dir(), &uri) { + return Err(LSPRuntimeError::ExpectedError); + } + + if let Some(diagnostic) = state.get_diagnostic_for_range(&uri, params.range) { + let code_actions = get_code_actions_from_diagnostics(&uri, diagnostic); + if code_actions.is_some() { + return Ok(code_actions); + } + } + + let definitions = state.resolve_executable_definitions(¶ms.text_document.uri)?; + + let text_document_position_params = TextDocumentPositionParams { + text_document: params.text_document, + position: params.range.start, + }; + let (document, position_span) = + state.extract_executable_document_from_text(&text_document_position_params, 1)?; + + let path = document.resolve((), position_span); + + let used_definition_names = get_definition_names(&definitions); + let result = get_code_actions(path, used_definition_names, uri, params.range) + .ok_or(LSPRuntimeError::ExpectedError)?; + Ok(Some(result)) +} + +fn get_code_actions_from_diagnostics( + url: &Url, + diagnostic: Diagnostic, +) -> Option> { + let code_actions = if let Some(Value::Array(data)) = &diagnostic.data { + data.iter() + .filter_map(|item| match item { + Value::String(suggestion) => Some(create_code_action( + "Fix Error", + suggestion.to_string(), + url, + diagnostic.range, + )), + _ => None, + }) + .collect::<_>() + } else { + vec![] + }; + + if !code_actions.is_empty() { + Some(code_actions) + } else { + None + } +} + +struct FragmentAndOperationNames { + operation_names: HashSet, + fragment_names: HashSet, +} + +fn get_definition_names(definitions: &[ExecutableDefinition]) -> FragmentAndOperationNames { + let mut operation_names = HashSet::new(); + let mut fragment_names = HashSet::new(); + for definition in definitions.iter() { + match definition { + ExecutableDefinition::Operation(operation) => { + if let Some(name) = &operation.name { + operation_names.insert(name.value.lookup().to_string()); + } + } + ExecutableDefinition::Fragment(fragment) => { + fragment_names.insert(fragment.name.value.lookup().to_string()); + } + } + } + + FragmentAndOperationNames { + operation_names, + fragment_names, + } +} + +fn get_code_actions( + path: ResolutionPath<'_>, + used_definition_names: FragmentAndOperationNames, + url: Url, + range: Range, +) -> Option> { + match path { + ResolutionPath::Ident(IdentPath { + inner: _, + parent: + IdentParent::OperationDefinitionName(OperationDefinitionPath { + inner: operation_definition, + parent: _, + }), + }) => { + let suffix = if let Some((_, operation_kind)) = operation_definition.operation { + DefinitionNameSuffix::from(&operation_kind) + } else { + return None; + }; + + let operation_name = if let Some(operation_name) = &operation_definition.name { + operation_name + } else { + return None; + }; + + let code_action_range = get_code_action_range(range, operation_name.span); + Some(create_rename_operation_code_actions( + operation_name.value.lookup(), + used_definition_names.operation_names, + suffix, + &url, + code_action_range, + )) + } + ResolutionPath::Ident(IdentPath { + inner: _, + parent: + IdentParent::FragmentDefinitionName(FragmentDefinitionPath { + inner: fragment_definition, + parent: _, + }), + }) => { + let code_action_range = get_code_action_range(range, fragment_definition.name.span); + Some(create_rename_fragment_code_actions( + fragment_definition.name.value.lookup(), + used_definition_names.fragment_names, + &url, + code_action_range, + )) + } + _ => None, + } +} + +fn create_rename_fragment_code_actions( + _original_name: &str, + used_names: HashSet, + url: &Url, + range: Range, +) -> Vec { + let mut suggested_names = Vec::with_capacity(2); + suggested_names.push(create_default_fragment_name(url.path())); + suggested_names.push(create_default_fragment_name_with_index( + url.path(), + &used_names, + )); + + suggested_names + .iter() + .filter_map(|suggested_name| { + if let Some(name) = suggested_name { + if used_names.contains(name) { + return None; + } + + Some(create_code_action( + "Rename Fragment", + name.clone(), + url, + range, + )) + } else { + None + } + }) + .collect::>() +} + +fn create_rename_operation_code_actions( + original_name: &str, + used_names: HashSet, + suffix: DefinitionNameSuffix, + url: &Url, + range: Range, +) -> Vec { + let mut suggested_names = Vec::with_capacity(4); + suggested_names.push(create_default_name(url.path(), suffix)); + suggested_names.push(create_default_name_with_index( + url.path(), + suffix, + &used_names, + )); + suggested_names.push(create_name_wrapper(original_name, url.path(), suffix)); + suggested_names.push(create_impactful_name(url.path(), suffix)); + suggested_names + .iter() + .filter_map(|suggested_name| { + if let Some(name) = suggested_name { + if used_names.contains(name) { + return None; + } + + Some(create_code_action( + "Rename Operation", + name.clone(), + url, + range, + )) + } else { + None + } + }) + .collect::>() +} + +fn get_code_action_range(range: Range, span: Span) -> Range { + Range { + start: Position { + line: range.start.line, + character: (span.start - 1), + }, + end: Position { + line: range.start.line, + character: (span.end - 1), + }, + } +} + +fn create_code_action( + title: &str, + new_name: String, + url: &Url, + range: Range, +) -> CodeActionOrCommand { + let mut changes = HashMap::new(); + let title = format!("{}: '{}'", title, &new_name); + let text_edit = TextEdit { + range, + new_text: new_name, + }; + changes.insert(url.clone(), vec![text_edit]); + + CodeActionOrCommand::CodeAction(CodeAction { + title, + kind: Some(lsp_types::CodeActionKind::QUICKFIX), + diagnostics: None, + edit: Some(WorkspaceEdit { + changes: Some(changes), + document_changes: None, + ..Default::default() + }), + command: None, + is_preferred: Some(false), + ..Default::default() + }) +} + +#[cfg(test)] +mod tests { + use lsp_types::CodeActionOrCommand; + use lsp_types::Diagnostic; + use lsp_types::Position; + use lsp_types::Range; + use lsp_types::Url; + use serde_json::json; + + use crate::code_action::get_code_actions_from_diagnostics; + + #[test] + fn test_get_code_actions_from_diagnostics() { + let diagnostic = Diagnostic { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 0, + }, + }, + message: "Error Message".to_string(), + data: Some(json!(vec!["item1", "item2"])), + ..Default::default() + }; + let url = Url::parse("file://relay.js").unwrap(); + let code_actions = get_code_actions_from_diagnostics(&url, diagnostic); + + assert_eq!( + code_actions + .unwrap() + .iter() + .map(|item| { + match item { + CodeActionOrCommand::CodeAction(action) => action.title.clone(), + _ => panic!("unexpected case"), + } + }) + .collect::>(), + vec![ + "Fix Error: 'item1'".to_string(), + "Fix Error: 'item2'".to_string(), + ] + ); + } +} diff --git a/compiler/crates/relay-lsp/src/code_action/create_name_suggestion.rs b/compiler/crates/relay-lsp/src/code_action/create_name_suggestion.rs index d542179dc56a2..6d1f621ad462b 100644 --- a/compiler/crates/relay-lsp/src/code_action/create_name_suggestion.rs +++ b/compiler/crates/relay-lsp/src/code_action/create_name_suggestion.rs @@ -35,7 +35,7 @@ impl fmt::Display for DefinitionNameSuffix { } } -/// This function will create a default name suggestion for operation/fragment in a file. +/// This function will create a default name suggestion for an operation in a file. /// Default name is {prefix}{Query|Mutation|Subscription}, /// where {prefix} is a cameCased base file stem, without extension and suffix (like .react.js, .jsx, etc..) pub fn create_default_name(file_name: &str, suffix: DefinitionNameSuffix) -> Option { @@ -47,6 +47,10 @@ pub fn create_default_name(file_name: &str, suffix: DefinitionNameSuffix) -> Opt } } +pub fn create_default_fragment_name(file_name: &str) -> Option { + extract_module_name(file_name) +} + /// This function will create a name suggestion for operation/fragment /// in a file adding an incremental index. /// Suggested name is {prefix}{index}{Query|Mutation|Subscription}, @@ -69,6 +73,22 @@ pub fn create_default_name_with_index( } } +pub fn create_default_fragment_name_with_index( + file_name: &str, + used_names: &HashSet, +) -> Option { + let module_name = extract_module_name(file_name)?; + let mut index = 1; + loop { + let new_name = format!("{}{}", module_name, index); + if used_names.contains(&new_name) { + index += 1; + } else { + return Some(new_name); + } + } +} + /// If you already have a name for your fragment/operation, /// it doesn't start/end with correct prefixes/suffixes - /// this function will return a correctly wrapped name. diff --git a/compiler/crates/relay-lsp/src/code_action/mod.rs b/compiler/crates/relay-lsp/src/code_action/mod.rs deleted file mode 100644 index f83a51a43fc84..0000000000000 --- a/compiler/crates/relay-lsp/src/code_action/mod.rs +++ /dev/null @@ -1,302 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -mod create_name_suggestion; - -use std::collections::HashMap; -use std::collections::HashSet; - -use common::Span; -use create_name_suggestion::create_default_name; -use create_name_suggestion::create_default_name_with_index; -use create_name_suggestion::create_impactful_name; -use create_name_suggestion::create_name_wrapper; -use create_name_suggestion::DefinitionNameSuffix; -use graphql_syntax::ExecutableDefinition; -use intern::Lookup; -use lsp_types::request::CodeActionRequest; -use lsp_types::request::Request; -use lsp_types::CodeAction; -use lsp_types::CodeActionOrCommand; -use lsp_types::Diagnostic; -use lsp_types::Position; -use lsp_types::Range; -use lsp_types::TextDocumentPositionParams; -use lsp_types::TextEdit; -use lsp_types::Url; -use lsp_types::WorkspaceEdit; -use resolution_path::IdentParent; -use resolution_path::IdentPath; -use resolution_path::OperationDefinitionPath; -use resolution_path::ResolutionPath; -use resolution_path::ResolvePosition; -use serde_json::Value; - -use crate::lsp_runtime_error::LSPRuntimeError; -use crate::lsp_runtime_error::LSPRuntimeResult; -use crate::server::GlobalState; -use crate::utils::is_file_uri_in_dir; - -pub(crate) fn on_code_action( - state: &impl GlobalState, - params: ::Params, -) -> LSPRuntimeResult<::Result> { - let uri = params.text_document.uri.clone(); - - if !is_file_uri_in_dir(state.root_dir(), &uri) { - return Err(LSPRuntimeError::ExpectedError); - } - - if let Some(js_server) = state.get_js_language_sever() { - if let Ok(result) = js_server.on_code_action(¶ms, state) { - return Ok(result); - } - } - - if let Some(diagnostic) = state.get_diagnostic_for_range(&uri, params.range) { - let code_actions = get_code_actions_from_diagnostics(&uri, diagnostic); - if code_actions.is_some() { - return Ok(code_actions); - } - } - - let definitions = state.resolve_executable_definitions(¶ms.text_document.uri)?; - - let text_document_position_params = TextDocumentPositionParams { - text_document: params.text_document, - position: params.range.start, - }; - let (document, position_span) = - state.extract_executable_document_from_text(&text_document_position_params, 1)?; - - let path = document.resolve((), position_span); - - let used_definition_names = get_definition_names(&definitions); - let result = get_code_actions(path, used_definition_names, uri, params.range) - .ok_or(LSPRuntimeError::ExpectedError)?; - Ok(Some(result)) -} - -fn get_code_actions_from_diagnostics( - url: &Url, - diagnostic: Diagnostic, -) -> Option> { - let code_actions = if let Some(Value::Array(data)) = &diagnostic.data { - data.iter() - .filter_map(|item| match item { - Value::String(suggestion) => Some(create_code_action( - "Fix Error", - suggestion.to_string(), - url, - diagnostic.range, - )), - _ => None, - }) - .collect::<_>() - } else { - vec![] - }; - - if !code_actions.is_empty() { - Some(code_actions) - } else { - None - } -} - -struct FragmentAndOperationNames { - operation_names: HashSet, - _fragment_names: HashSet, -} - -fn get_definition_names(definitions: &[ExecutableDefinition]) -> FragmentAndOperationNames { - let mut operation_names = HashSet::new(); - let mut fragment_names = HashSet::new(); - for definition in definitions.iter() { - match definition { - ExecutableDefinition::Operation(operation) => { - if let Some(name) = &operation.name { - operation_names.insert(name.value.lookup().to_string()); - } - } - ExecutableDefinition::Fragment(fragment) => { - fragment_names.insert(fragment.name.value.lookup().to_string()); - } - } - } - - FragmentAndOperationNames { - operation_names, - _fragment_names: fragment_names, - } -} - -fn get_code_actions( - path: ResolutionPath<'_>, - used_definition_names: FragmentAndOperationNames, - url: Url, - range: Range, -) -> Option> { - match path { - ResolutionPath::Ident(IdentPath { - inner: _, - parent: - IdentParent::OperationDefinitionName(OperationDefinitionPath { - inner: operation_definition, - parent: _, - }), - }) => { - let suffix = if let Some((_, operation_kind)) = operation_definition.operation { - DefinitionNameSuffix::from(&operation_kind) - } else { - return None; - }; - - let operation_name = if let Some(operation_name) = &operation_definition.name { - operation_name - } else { - return None; - }; - - let code_action_range = get_code_action_range(range, operation_name.span); - Some(create_code_actions( - "Rename Operation", - operation_name.value.lookup(), - used_definition_names.operation_names, - suffix, - &url, - code_action_range, - )) - } - _ => None, - } -} - -fn create_code_actions( - title: &str, - original_name: &str, - used_names: HashSet, - suffix: DefinitionNameSuffix, - url: &Url, - range: Range, -) -> Vec { - let mut suggested_names = Vec::with_capacity(4); - suggested_names.push(create_default_name(url.path(), suffix)); - suggested_names.push(create_default_name_with_index( - url.path(), - suffix, - &used_names, - )); - suggested_names.push(create_name_wrapper(original_name, url.path(), suffix)); - suggested_names.push(create_impactful_name(url.path(), suffix)); - suggested_names - .iter() - .filter_map(|suggested_name| { - if let Some(name) = suggested_name { - if used_names.contains(name) { - return None; - } - - Some(create_code_action(title, name.clone(), url, range)) - } else { - None - } - }) - .collect::>() -} - -fn get_code_action_range(range: Range, span: Span) -> Range { - Range { - start: Position { - line: range.start.line, - character: (span.start - 1), - }, - end: Position { - line: range.start.line, - character: (span.end - 1), - }, - } -} - -fn create_code_action( - title: &str, - new_name: String, - url: &Url, - range: Range, -) -> CodeActionOrCommand { - let mut changes = HashMap::new(); - let title = format!("{}: '{}'", title, &new_name); - let text_edit = TextEdit { - range, - new_text: new_name, - }; - changes.insert(url.clone(), vec![text_edit]); - - CodeActionOrCommand::CodeAction(CodeAction { - title, - kind: Some(lsp_types::CodeActionKind::QUICKFIX), - diagnostics: None, - edit: Some(WorkspaceEdit { - changes: Some(changes), - document_changes: None, - ..Default::default() - }), - command: None, - is_preferred: Some(false), - ..Default::default() - }) -} - -#[cfg(test)] -mod tests { - use lsp_types::CodeActionOrCommand; - use lsp_types::Diagnostic; - use lsp_types::Position; - use lsp_types::Range; - use lsp_types::Url; - use serde_json::json; - - use crate::code_action::get_code_actions_from_diagnostics; - - #[test] - fn test_get_code_actions_from_diagnostics() { - let diagnostic = Diagnostic { - range: Range { - start: Position { - line: 0, - character: 0, - }, - end: Position { - line: 0, - character: 0, - }, - }, - message: "Error Message".to_string(), - data: Some(json!(vec!["item1", "item2"])), - ..Default::default() - }; - let url = Url::parse("file://relay.js").unwrap(); - let code_actions = get_code_actions_from_diagnostics(&url, diagnostic); - - assert_eq!( - code_actions - .unwrap() - .iter() - .map(|item| { - match item { - CodeActionOrCommand::CodeAction(action) => action.title.clone(), - _ => panic!("unexpected case"), - } - }) - .collect::>(), - vec![ - "Fix Error: 'item1'".to_string(), - "Fix Error: 'item2'".to_string(), - ] - ); - } -} diff --git a/compiler/crates/relay-lsp/src/completion.rs b/compiler/crates/relay-lsp/src/completion.rs new file mode 100644 index 0000000000000..3f09db9a8c1b1 --- /dev/null +++ b/compiler/crates/relay-lsp/src/completion.rs @@ -0,0 +1,1427 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//! Utilities for providing the completion language feature + +use common::ArgumentName; +use common::DirectiveName; +use common::Named; +use common::NamedItem; +use common::Span; +use fnv::FnvHashSet; +use graphql_ir::FragmentDefinitionName; +use graphql_ir::OperationDefinitionName; +use graphql_ir::Program; +use graphql_ir::VariableDefinition; +use graphql_ir::VariableName; +use graphql_ir::DIRECTIVE_ARGUMENTS; +use graphql_syntax::Argument; +use graphql_syntax::ConstantValue; +use graphql_syntax::Directive; +use graphql_syntax::DirectiveLocation; +use graphql_syntax::ExecutableDefinition; +use graphql_syntax::ExecutableDocument; +use graphql_syntax::FragmentSpread; +use graphql_syntax::InlineFragment; +use graphql_syntax::LinkedField; +use graphql_syntax::List; +use graphql_syntax::OperationDefinition; +use graphql_syntax::ScalarField; +use graphql_syntax::Selection; +use graphql_syntax::TokenKind; +use graphql_syntax::Value; +use intern::string_key::StringKey; +use intern::Lookup; +use log::debug; +use lsp_types::request::Completion; +use lsp_types::request::Request; +use lsp_types::request::ResolveCompletionItem; +use lsp_types::CompletionItem; +use lsp_types::CompletionItemKind; +use lsp_types::CompletionResponse; +use lsp_types::Documentation; +use lsp_types::InsertTextFormat; +use lsp_types::MarkupContent; +use lsp_types::MarkupKind; +use schema::Argument as SchemaArgument; +use schema::Directive as SchemaDirective; +use schema::InputObject; +use schema::InterfaceID; +use schema::ObjectID; +use schema::SDLSchema; +use schema::Schema; +use schema::Type; +use schema::TypeReference; +use schema::TypeWithFields; + +use crate::lsp_runtime_error::LSPRuntimeResult; +use crate::node_resolution_info::TypePath; +use crate::node_resolution_info::TypePathItem; +use crate::server::GlobalState; +use crate::LSPRuntimeError; +use crate::SchemaDocumentation; + +#[derive(Debug, Clone)] +pub enum CompletionKind { + FieldName { + existing_linked_field: bool, + }, + FragmentSpread, + DirectiveName { + location: DirectiveLocation, + }, + ArgumentName { + has_colon: bool, + existing_names: FnvHashSet, + kind: ArgumentKind, + }, + ArgumentValue { + executable_name: ExecutableName, + argument_name: StringKey, + kind: ArgumentKind, + }, + InlineFragmentType { + existing_inline_fragment: bool, + }, + InputObjectFieldName { + name: StringKey, + existing_names: FnvHashSet, + input_field_path: Vec, + }, +} + +#[derive(Debug, Clone)] +pub enum ArgumentKind { + Field, + Directive(DirectiveName), + ArgumentsDirective(StringKey), +} + +#[derive(Debug)] +pub struct CompletionRequest { + /// The type of the completion request we're responding to + kind: CompletionKind, + /// A list of type metadata that we can use to resolve the leaf + /// type the request is being made against + type_path: TypePath, + /// The project the request belongs to + pub project_name: StringKey, +} + +impl CompletionRequest { + fn new(project_name: StringKey, kind: CompletionKind, type_path: TypePath) -> Self { + Self { + kind, + type_path, + project_name, + } + } +} + +#[derive(Debug, Copy, Clone)] +pub enum ExecutableName { + Operation(StringKey), + Fragment(FragmentDefinitionName), +} + +trait ArgumentLike { + fn name(&self) -> StringKey; + fn type_(&self) -> &TypeReference; +} + +impl ArgumentLike for &SchemaArgument { + fn name(&self) -> StringKey { + self.name.item.0 + } + fn type_(&self) -> &TypeReference { + &self.type_ + } +} + +impl ArgumentLike for &VariableDefinition { + fn name(&self) -> StringKey { + self.name.item.0 + } + fn type_(&self) -> &TypeReference { + &self.type_ + } +} + +struct CompletionRequestBuilder { + project_name: StringKey, + current_executable_name: Option, +} + +impl CompletionRequestBuilder { + fn new(project_name: StringKey) -> Self { + Self { + project_name, + current_executable_name: None, + } + } + + fn new_request(&self, kind: CompletionKind, type_path: Vec) -> CompletionRequest { + CompletionRequest::new(self.project_name, kind, type_path.into()) + } + + fn create_completion_request( + &mut self, + document: ExecutableDocument, + position_span: Span, + ) -> Option { + for definition in document.definitions { + match &definition { + ExecutableDefinition::Operation(operation) => { + if operation.location.contains(position_span) { + self.current_executable_name = operation + .name + .as_ref() + .map(|name| ExecutableName::Operation(name.value)); + let (_, kind) = operation.operation?; + let type_path = vec![TypePathItem::Operation(kind)]; + + debug!( + "Completion request is within operation: {:?}", + operation.name + ); + let OperationDefinition { + selections, + directives, + .. + } = operation; + + let directive_location = kind.into(); + + if let Some(req) = self.build_request_from_selection_or_directives( + selections, + directives, + directive_location, + position_span, + type_path, + ) { + return Some(req); + } + } + // Check if the position span is within this operation's span + } + ExecutableDefinition::Fragment(fragment) => { + if fragment.location.contains(position_span) { + self.current_executable_name = Some(ExecutableName::Fragment( + FragmentDefinitionName(fragment.name.value), + )); + let type_name = fragment.type_condition.type_.value; + let type_path = vec![TypePathItem::FragmentDefinition { type_name }]; + if let Some(req) = self.build_request_from_selection_or_directives( + &fragment.selections, + &fragment.directives, + DirectiveLocation::FragmentDefinition, + position_span, + type_path, + ) { + return Some(req); + } + } + } + } + } + None + } + + fn build_request_from_selections( + &self, + selections: &List, + position_span: Span, + mut type_path: Vec, + ) -> Option { + for item in &selections.items { + if item.span().contains(position_span) { + return match item { + Selection::LinkedField(node) => { + if node.name.span.contains(position_span) { + return Some(self.new_request( + CompletionKind::FieldName { + existing_linked_field: true, + }, + type_path, + )); + } + let LinkedField { + name, + selections, + directives, + arguments, + .. + } = node; + type_path.push(TypePathItem::LinkedField { name: name.value }); + if let Some(arguments) = arguments { + if arguments.span.contains(position_span) { + return self.build_request_from_arguments( + arguments, + position_span, + type_path, + ArgumentKind::Field, + ); + } + } + self.build_request_from_selection_or_directives( + selections, + directives, + DirectiveLocation::Field, + position_span, + type_path, + ) + } + Selection::FragmentSpread(spread) => { + let FragmentSpread { + name, directives, .. + } = spread; + if name.span.contains(position_span) { + Some(self.new_request(CompletionKind::FragmentSpread, type_path)) + } else { + self.build_request_from_directives( + directives, + DirectiveLocation::FragmentSpread, + position_span, + type_path, + Some(name.value), + ) + } + } + Selection::InlineFragment(node) => { + let InlineFragment { + selections, + directives, + type_condition, + .. + } = node; + if let Some(type_condition) = type_condition { + let type_name = type_condition.type_.value; + if type_condition.span.contains(position_span) { + return Some(self.new_request( + CompletionKind::InlineFragmentType { + existing_inline_fragment: selections.start.kind + != TokenKind::Empty, + }, + type_path, + )); + } + type_path.push(TypePathItem::InlineFragment { type_name }); + } + self.build_request_from_selection_or_directives( + selections, + directives, + DirectiveLocation::InlineFragment, + position_span, + type_path, + ) + } + Selection::ScalarField(node) => { + if node.name.span.contains(position_span) { + return Some(self.new_request( + CompletionKind::FieldName { + existing_linked_field: false, + }, + type_path, + )); + } + let ScalarField { + directives, + name, + arguments, + .. + } = node; + type_path.push(TypePathItem::ScalarField { name: name.value }); + if let Some(arguments) = arguments { + if arguments.span.contains(position_span) { + return self.build_request_from_arguments( + arguments, + position_span, + type_path, + ArgumentKind::Field, + ); + } + } + self.build_request_from_directives( + directives, + DirectiveLocation::Field, + position_span, + type_path, + None, + ) + } + }; + } + } + // The selection list is empty or the current cursor is out of any of the selection + Some(self.new_request( + CompletionKind::FieldName { + existing_linked_field: false, + }, + type_path, + )) + } + + fn build_request_from_constant_input_value( + &self, + position_span: Span, + type_path: Vec, + mut input_field_path: Vec, + constant_value: &ConstantValue, + name: StringKey, + ) -> Option { + match constant_value { + ConstantValue::List(list) => list + .items + .iter() + .find(|arg| arg.span().contains(position_span)) + .and_then(|constant_value| { + self.build_request_from_constant_input_value( + position_span, + type_path, + input_field_path, + constant_value, + name, + ) + }), + ConstantValue::Object(arguments) => { + if let Some(constant_argument) = arguments + .items + .iter() + .find(|arg| arg.span.contains(position_span)) + { + input_field_path.push(constant_argument.name()); + self.build_request_from_constant_input_value( + position_span, + type_path, + input_field_path, + &constant_argument.value, + name, + ) + } else { + Some(self.new_request( + CompletionKind::InputObjectFieldName { + name, + existing_names: + arguments.items.iter().map(|item| item.name()).collect(), + input_field_path, + }, + type_path, + )) + } + } + _ => None, + } + } + + fn build_request_from_input_value( + &self, + position_span: Span, + type_path: Vec, + mut input_field_path: Vec, + value: &Value, + name: StringKey, + ) -> Option { + match value { + Value::List(list) => list + .items + .iter() + .find(|arg| arg.span().contains(position_span)) + .and_then(|value| { + self.build_request_from_input_value( + position_span, + type_path, + input_field_path, + value, + name, + ) + }), + Value::Object(arguments) => { + if let Some(position_argument) = arguments + .items + .iter() + .find(|arg| arg.span.contains(position_span)) + { + input_field_path.push(position_argument.name()); + self.build_request_from_input_value( + position_span, + type_path, + input_field_path, + &position_argument.value, + name, + ) + } else { + Some(self.new_request( + CompletionKind::InputObjectFieldName { + name, + existing_names: + arguments.items.iter().map(|item| item.name()).collect(), + input_field_path, + }, + type_path, + )) + } + } + Value::Constant(constant_value) => self.build_request_from_constant_input_value( + position_span, + type_path, + input_field_path, + constant_value, + name, + ), + _ => None, + } + } + + fn build_request_from_arguments( + &self, + arguments: &List, + position_span: Span, + type_path: Vec, + kind: ArgumentKind, + ) -> Option { + for ( + i, + Argument { + name, + value, + colon, + span, + .. + }, + ) in arguments.items.iter().enumerate() + { + if span.contains(position_span) { + return if name.span.contains(position_span) { + Some(self.new_request( + CompletionKind::ArgumentName { + has_colon: colon.kind != TokenKind::Empty, + existing_names: + arguments.items.iter().map(|arg| arg.name.value).collect(), + kind, + }, + type_path, + )) + } else if let Some(executable_name) = self.current_executable_name { + match value { + Value::Constant(ConstantValue::Null(token)) + if token.kind == TokenKind::Empty => + { + Some(self.new_request( + CompletionKind::ArgumentValue { + argument_name: name.value, + executable_name, + kind, + }, + type_path, + )) + } + Value::Constant(constant_value) => self + .build_request_from_constant_input_value( + position_span, + type_path, + Default::default(), + constant_value, + name.value, + ), + Value::Variable(_) => Some(self.new_request( + CompletionKind::ArgumentValue { + argument_name: name.value, + executable_name, + kind, + }, + type_path, + )), + value => self.build_request_from_input_value( + position_span, + type_path, + Default::default(), + value, + name.value, + ), + } + } else { + None + }; + } else if span.end <= position_span.start { + let is_cursor_in_next_white_space = { + if let Some(next_argument) = arguments.items.get(i + 1) { + position_span.start < next_argument.span.start + } else { + position_span.start < arguments.span.end + } + }; + if is_cursor_in_next_white_space { + // Handles the following special case + // (args1: | args2:$var) + // ^ cursor here + // The cursor is on the white space between args1 and args2. + // We want to autocomplete the value if it's empty. + return if let Some(executable_name) = self.current_executable_name { + match value { + Value::Constant(ConstantValue::Null(token)) + if token.kind == TokenKind::Empty => + { + Some(self.new_request( + CompletionKind::ArgumentValue { + argument_name: name.value, + executable_name, + kind, + }, + type_path, + )) + } + _ => Some(self.new_request( + CompletionKind::ArgumentName { + has_colon: false, + existing_names: + arguments.items.iter().map(|arg| arg.name.value).collect(), + kind, + }, + type_path, + )), + } + } else { + None + }; + } + } + } + // The argument list is empty or the cursor is not on any of the argument + Some(self.new_request( + CompletionKind::ArgumentName { + has_colon: false, + existing_names: arguments.items.iter().map(|arg| arg.name.value).collect(), + kind, + }, + type_path, + )) + } + + fn build_request_from_directives( + &self, + directives: &[Directive], + location: DirectiveLocation, + position_span: Span, + type_path: Vec, + fragment_spread_name: Option, + ) -> Option { + for directive in directives { + if !directive.span.contains(position_span) { + continue; + }; + return if directive.name.span.contains(position_span) { + Some(self.new_request(CompletionKind::DirectiveName { location }, type_path)) + } else if let Some(arguments) = &directive.arguments { + if arguments.span.contains(position_span) { + self.build_request_from_arguments( + arguments, + position_span, + type_path, + if let Some(fragment_spread_name) = fragment_spread_name { + if directive.name.value == *DIRECTIVE_ARGUMENTS { + ArgumentKind::ArgumentsDirective(fragment_spread_name) + } else { + ArgumentKind::Directive(DirectiveName(directive.name.value)) + } + } else { + ArgumentKind::Directive(DirectiveName(directive.name.value)) + }, + ) + } else { + None + } + } else { + // The directive doesn't have a name `@|` + Some(self.new_request(CompletionKind::DirectiveName { location }, type_path)) + }; + } + None + } + + fn build_request_from_selection_or_directives( + &self, + selections: &List, + directives: &[Directive], + directive_location: DirectiveLocation, + position_span: Span, + type_path: Vec, + ) -> Option { + if selections.span.contains(position_span) { + // TODO(brandondail) handle when the completion occurs at/within the start token + self.build_request_from_selections(selections, position_span, type_path) + } else { + self.build_request_from_directives( + directives, + directive_location, + position_span, + type_path, + None, + ) + } + } +} + +fn completion_items_for_request( + request: CompletionRequest, + schema: &SDLSchema, + schema_documentation: impl SchemaDocumentation, + program: &Program, +) -> Option> { + let kind = request.kind; + match kind { + CompletionKind::FragmentSpread => { + let leaf_type = request.type_path.resolve_leaf_type(schema)?; + Some(resolve_completion_items_for_fragment_spread( + leaf_type, program, schema, true, + )) + } + CompletionKind::FieldName { + existing_linked_field, + } => match request.type_path.resolve_leaf_type(schema)? { + Type::Interface(interface_id) => { + let interface = schema.interface(interface_id); + Some(merge_completion_items_ordered([ + resolve_completion_items_for_fields( + interface, + schema, + schema_documentation, + existing_linked_field, + ), + resolve_completion_items_typename(Type::Interface(interface_id), schema), + resolve_completion_items_for_inline_fragment( + Type::Interface(interface_id), + schema, + false, + ), + resolve_completion_items_for_fragment_spread( + Type::Interface(interface_id), + program, + schema, + false, + ), + ])) + } + Type::Object(object_id) => Some(merge_completion_items_ordered([ + resolve_completion_items_for_fields( + schema.object(object_id), + schema, + schema_documentation, + existing_linked_field, + ), + resolve_completion_items_typename(Type::Object(object_id), schema), + resolve_completion_items_for_fragment_spread( + Type::Object(object_id), + program, + schema, + false, + ), + ])), + Type::Union(union_id) => Some(merge_completion_items_ordered([ + resolve_completion_items_typename(Type::Union(union_id), schema), + resolve_completion_items_for_inline_fragment(Type::Union(union_id), schema, false), + resolve_completion_items_for_fragment_spread( + Type::Union(union_id), + program, + schema, + false, + ), + ])), + Type::Enum(_) | Type::InputObject(_) | Type::Scalar(_) => None, + }, + CompletionKind::DirectiveName { location } => { + let directives = schema.directives_for_location(location); + let items = directives + .iter() + .map(|directive| completion_item_from_directive(directive, schema)) + .collect(); + Some(items) + } + CompletionKind::ArgumentName { + has_colon, + existing_names, + kind, + } => match kind { + ArgumentKind::Field => { + let (_, field) = request.type_path.resolve_current_field(schema)?; + Some(resolve_completion_items_for_argument_name( + field.arguments.iter(), + schema, + existing_names, + has_colon, + )) + } + ArgumentKind::ArgumentsDirective(fragment_spread_name) => { + let fragment = program.fragment(FragmentDefinitionName(fragment_spread_name))?; + Some(resolve_completion_items_for_argument_name( + fragment.variable_definitions.iter(), + schema, + existing_names, + has_colon, + )) + } + ArgumentKind::Directive(directive_name) => { + Some(resolve_completion_items_for_argument_name( + schema.get_directive(directive_name)?.arguments.iter(), + schema, + existing_names, + has_colon, + )) + } + }, + CompletionKind::ArgumentValue { + executable_name, + argument_name, + kind, + } => { + let argument_type = match kind { + ArgumentKind::Field => { + let (_, field) = request.type_path.resolve_current_field(schema)?; + &field.arguments.named(ArgumentName(argument_name))?.type_ + } + ArgumentKind::ArgumentsDirective(fragment_spread_name) => { + let fragment = + program.fragment(FragmentDefinitionName(fragment_spread_name))?; + &fragment + .variable_definitions + .named(VariableName(argument_name))? + .type_ + } + ArgumentKind::Directive(directive_name) => { + &schema + .get_directive(directive_name)? + .arguments + .named(ArgumentName(argument_name))? + .type_ + } + }; + Some(resolve_completion_items_for_argument_value( + schema, + argument_type, + program, + executable_name, + )) + } + CompletionKind::InlineFragmentType { + existing_inline_fragment, + } => { + let type_ = request.type_path.resolve_leaf_type(schema)?; + Some(resolve_completion_items_for_inline_fragment( + type_, + schema, + existing_inline_fragment, + )) + } + CompletionKind::InputObjectFieldName { + name, + existing_names, + input_field_path, + } => { + let (_, field) = request.type_path.resolve_current_field(schema)?; + + fn resolve_root_input_field<'a>( + schema: &'a SDLSchema, + input_object: &'a TypeReference, + ) -> Option<&'a InputObject> { + match input_object { + TypeReference::Named(Type::InputObject(input_object_id)) => { + Some(schema.input_object(*input_object_id)) + } + TypeReference::Named(_) => None, + TypeReference::NonNull(inner) => resolve_root_input_field(schema, inner), + TypeReference::List(inner) => resolve_root_input_field(schema, inner), + } + } + + fn resolve_input_field<'a>( + schema: &'a SDLSchema, + input_object: &'a InputObject, + field_name: &StringKey, + ) -> Option<&'a InputObject> { + input_object + .fields + .iter() + .find(|field| field.name.item.0 == *field_name) + .and_then(|field| resolve_root_input_field(schema, &field.type_)) + } + + let field_argument = field + .arguments + .iter() + .find(|argument| argument.name() == name)?; + + let mut input_object = resolve_root_input_field(schema, &field_argument.type_)?; + + for input_field_name in input_field_path.iter() { + input_object = resolve_input_field(schema, input_object, input_field_name)?; + } + + Some(resolve_completion_items_for_input_object( + input_object, + schema, + existing_names, + )) + } + } +} + +fn resolve_completion_items_typename(type_: Type, schema: &SDLSchema) -> Vec { + if type_.is_root_type(schema) { + vec![] + } else { + let mut item = CompletionItem::new_simple("__typename".to_owned(), "String!".to_owned()); + item.kind = Some(CompletionItemKind::FIELD); + vec![item] + } +} + +fn resolve_completion_items_for_input_object( + input_object: &InputObject, + schema: &SDLSchema, + existing_names: FnvHashSet, +) -> Vec { + input_object + .fields + .iter() + .filter(|arg| !existing_names.contains(&arg.name())) + .map(|arg| { + let label = arg.name().lookup().to_string(); + let detail = schema.get_type_string(arg.type_()); + let kind = match arg.type_().inner() { + Type::InputObject(_) => Some(CompletionItemKind::STRUCT), + Type::Scalar(_) => Some(CompletionItemKind::FIELD), + _ => None, + }; + + CompletionItem { + label: label.clone(), + kind, + detail: Some(detail), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: Some(format!("{}: $1", label)), + insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), + text_edit: None, + additional_text_edits: None, + command: Some(lsp_types::Command::new( + "Suggest".into(), + "editor.action.triggerSuggest".into(), + None, + )), + data: None, + tags: None, + ..Default::default() + } + }) + .collect() +} + +fn resolve_completion_items_for_argument_name( + arguments: impl Iterator, + schema: &SDLSchema, + existing_names: FnvHashSet, + has_colon: bool, +) -> Vec { + arguments + .filter(|arg| !existing_names.contains(&arg.name())) + .map(|arg| { + let label = arg.name().lookup().into(); + let detail = schema.get_type_string(arg.type_()); + if has_colon { + CompletionItem::new_simple(label, detail) + } else { + CompletionItem { + label: label.clone(), + kind: None, + detail: Some(detail), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: Some(format!("{}: $1", label)), + insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), + text_edit: None, + additional_text_edits: None, + command: Some(lsp_types::Command::new( + "Suggest".into(), + "editor.action.triggerSuggest".into(), + None, + )), + data: None, + tags: None, + ..Default::default() + } + } + }) + .collect() +} + +fn resolve_completion_items_for_inline_fragment( + type_: Type, + schema: &SDLSchema, + existing_inline_fragment: bool, +) -> Vec { + match type_ { + Type::Interface(id) => { + let interface = schema.interface(id); + + get_abstract_type_suggestions(schema, &interface.implementing_objects, Some(&id)) + } + Type::Union(id) => { + let union = schema.union(id); + + get_abstract_type_suggestions(schema, &union.members, None) + } + Type::Enum(_) | Type::Object(_) | Type::InputObject(_) | Type::Scalar(_) => vec![], + } + .into_iter() + .map(|type_| { + let type_name = schema.get_type_name(type_).lookup(); + if existing_inline_fragment { + CompletionItem::new_simple(type_name.to_owned(), "".into()) + } else { + CompletionItem { + label: format!("... on {type_name}"), + kind: None, + detail: None, + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: Some(format!("... on {type_name} {{\n\t$1\n}}")), + insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), + text_edit: None, + additional_text_edits: None, + command: Some(lsp_types::Command::new( + "Suggest".into(), + "editor.action.triggerSuggest".into(), + None, + )), + data: None, + tags: None, + ..Default::default() + } + } + }) + .collect() +} + +fn resolve_completion_items_for_argument_value( + schema: &SDLSchema, + type_: &TypeReference, + program: &Program, + executable_name: ExecutableName, +) -> Vec { + let mut completion_items = match executable_name { + ExecutableName::Fragment(name) => { + if let Some(fragment) = program.fragment(name) { + fragment + .used_global_variables + .iter() + .chain(fragment.variable_definitions.iter()) + .filter(|variable| variable.type_.eq(type_)) + .map(|variable| { + CompletionItem::new_simple(format!("${}", variable.name.item,), "".into()) + }) + .collect() + } else { + vec![] + } + } + ExecutableName::Operation(name) => { + if let Some(operation) = program.operation(OperationDefinitionName(name)) { + operation + .variable_definitions + .iter() + .filter(|variable| variable.type_.eq(type_)) + .map(|variable| { + CompletionItem::new_simple(format!("${}", variable.name.item,), "".into()) + }) + .collect() + } else { + vec![] + } + } + }; + + if !type_.is_list() { + if let Type::Enum(id) = type_.inner() { + let enum_ = schema.enum_(id); + completion_items.extend( + enum_ + .values + .iter() + .map(|value| CompletionItem::new_simple(value.value.to_string(), "".into())), + ) + } + } + + completion_items +} + +fn resolve_completion_items_for_fields( + type_: &T, + schema: &SDLSchema, + schema_documentation: impl SchemaDocumentation, + existing_linked_field: bool, +) -> Vec { + type_ + .fields() + .iter() + .map(|field_id| { + let field = schema.field(*field_id); + let field_name = field.name.item.to_string(); + let deprecated = field.deprecated(); + let is_deprecated = deprecated.is_some(); + let deprecated_reason = deprecated + .and_then(|deprecated| deprecated.reason) + .map(|reason| format!("Deprecated: {}", reason)); + let args = create_arguments_snippets(field.arguments.iter(), schema); + let insert_text = match ( + existing_linked_field + || matches!(field.type_.inner(), Type::Scalar(_) | Type::Enum(_)), // don't insert { } + args.is_empty(), // don't insert arguments + ) { + (true, true) => None, + (true, false) => Some(format!("{}({})", field_name, args.join(", "))), + (false, true) => Some(format!("{} {{\n\t$1\n}}", field_name)), + (false, false) => Some(format!( + "{}({}) {{\n\t${}\n}}", + field_name, + args.join(", "), + args.len() + 1 + )), + }; + let (insert_text_format, command) = if insert_text.is_some() { + ( + Some(lsp_types::InsertTextFormat::SNIPPET), + Some(lsp_types::Command::new( + "Suggest".into(), + "editor.action.triggerSuggest".into(), + None, + )), + ) + } else { + (None, None) + }; + + let type_description = schema_documentation + .get_type_description(schema.get_type_name(field.type_.inner()).lookup()); + + let field_description = schema_documentation + .get_field_description(type_.name().lookup(), field.name.item.lookup()); + + let type_name = schema.get_type_string(&field.type_); + let documentation = make_markdown_table_documentation( + field.name.item.lookup(), + &type_name, + field_description.unwrap_or(""), + type_description.unwrap_or(""), + ); + + let kind = match field.type_.inner() { + Type::Enum(_) => Some(CompletionItemKind::ENUM), + // There is no Kind for union, so we'll use interface + Type::Interface(_) | Type::Union(_) => Some(CompletionItemKind::INTERFACE), + Type::Object(_) | Type::InputObject(_) => Some(CompletionItemKind::STRUCT), + Type::Scalar(_) => Some(CompletionItemKind::FIELD), + }; + + CompletionItem { + label: field_name, + kind, + detail: deprecated_reason.or(Some(type_name)), + documentation: Some(documentation), + deprecated: Some(is_deprecated), + preselect: None, + sort_text: None, + filter_text: None, + insert_text, + insert_text_format, + text_edit: None, + additional_text_edits: None, + command, + data: None, + tags: None, + ..Default::default() + } + }) + .collect() +} + +fn resolve_completion_items_for_fragment_spread( + type_: Type, + source_program: &Program, + schema: &SDLSchema, + existing_fragment_spread: bool, +) -> Vec { + source_program + .fragments() + .filter(|fragment| schema.are_overlapping_types(fragment.type_condition, type_)) + .map(|fragment| { + let label = if existing_fragment_spread { + fragment.name.item.to_string() + } else { + format!("...{}", fragment.name.item) + }; + let detail = schema + .get_type_name(fragment.type_condition) + .lookup() + .to_string(); + if fragment.variable_definitions.is_empty() { + return CompletionItem::new_simple(label, detail); + } + // Create a snippet if the fragment has required argumentDefinition with no default values + let args = create_arguments_snippets(fragment.variable_definitions.iter(), schema); + if args.is_empty() { + return CompletionItem::new_simple(label, detail); + } + let insert_text = format!("{} @arguments({})", label, args.join(", ")); + CompletionItem { + label, + kind: None, + detail: Some(detail), + documentation: None, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: Some(insert_text), + insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), + text_edit: None, + additional_text_edits: None, + command: Some(lsp_types::Command::new( + "Suggest".into(), + "editor.action.triggerSuggest".into(), + None, + )), + data: None, + tags: None, + ..Default::default() + } + }) + .collect() +} + +fn merge_completion_items_ordered>>( + completion_item_groups: I, +) -> Vec { + completion_item_groups + .into_iter() + .enumerate() + .flat_map(|(index, mut items)| { + items.iter_mut().for_each(|item| { + item.sort_text = Some(format!( + "{}{}", + index, + item.sort_text.clone().unwrap_or_else(|| item.label.clone()) + )); + }); + items + }) + .collect() +} + +fn completion_item_from_directive( + directive: &SchemaDirective, + schema: &SDLSchema, +) -> CompletionItem { + let SchemaDirective { + name, arguments, .. + } = directive; + + // Always use the name of the directive as the label + let label = name.item.to_string(); + + // We can return a snippet with the expected arguments of the directive + let (insert_text, insert_text_format) = if arguments.is_empty() { + (label.clone(), InsertTextFormat::PLAIN_TEXT) + } else { + let args = create_arguments_snippets(arguments.iter(), schema); + if args.is_empty() { + (label.clone(), InsertTextFormat::PLAIN_TEXT) + } else { + let insert_text = format!("{}({})", label, args.join(", ")); + (insert_text, InsertTextFormat::SNIPPET) + } + }; + + let documentation = directive.description.map(|desc| { + Documentation::MarkupContent(MarkupContent { + kind: MarkupKind::Markdown, + value: desc.to_string(), + }) + }); + + CompletionItem { + label, + kind: None, + detail: None, + documentation, + deprecated: None, + preselect: None, + sort_text: None, + filter_text: None, + insert_text: Some(insert_text), + insert_text_format: Some(insert_text_format), + text_edit: None, + additional_text_edits: None, + command: Some(lsp_types::Command::new( + "Suggest".into(), + "editor.action.triggerSuggest".into(), + None, + )), + data: None, + tags: None, + ..Default::default() + } +} + +fn create_arguments_snippets( + arguments: impl Iterator, + schema: &SDLSchema, +) -> Vec { + let mut cursor_location = 1; + let mut args = vec![]; + + for arg in arguments { + if let TypeReference::NonNull(type_) = arg.type_() { + let value_snippet = match type_ { + t if t.is_list() => format!("[${}]", cursor_location), + t if schema.is_string(t.inner()) => format!("\"${}\"", cursor_location), + _ => format!("${}", cursor_location), + }; + let str = format!("{}: {}", arg.name(), value_snippet); + args.push(str); + cursor_location += 1; + } + } + args +} + +pub fn on_completion( + state: &impl GlobalState, + params: ::Params, +) -> LSPRuntimeResult<::Result> { + match state.extract_executable_document_from_text(¶ms.text_document_position, 0) { + Ok((document, position_span)) => { + let project_name = state + .extract_project_name_from_url(¶ms.text_document_position.text_document.uri)?; + let schema = &state.get_schema(&project_name)?; + let items = resolve_completion_items( + document, + position_span, + project_name, + schema, + state.get_schema_documentation(project_name.lookup()), + &state.get_program(&project_name)?, + ) + .unwrap_or_else(Vec::new); + Ok(Some(CompletionResponse::Array(items))) + } + Err(graphql_err) => { + if matches!(graphql_err, LSPRuntimeError::ExpectedError) { + Err(LSPRuntimeError::ExpectedError) + } else { + Err(LSPRuntimeError::UnexpectedError(format!( + "Unable to get completion {:?}", + &graphql_err, + ))) + } + } + } +} + +pub(crate) fn on_resolve_completion_item( + _state: &impl GlobalState, + params: ::Params, +) -> LSPRuntimeResult<::Result> { + // We currently don't do anything with the selected item + // and we just return an input + + Ok(params) +} + +fn resolve_completion_items( + document: ExecutableDocument, + position_span: Span, + project_name: StringKey, + schema: &SDLSchema, + schema_documentation: impl SchemaDocumentation, + progam: &Program, +) -> Option> { + let completion_request = CompletionRequestBuilder::new(project_name) + .create_completion_request(document, position_span); + completion_request.and_then(|completion_request| { + completion_items_for_request(completion_request, schema, schema_documentation, progam) + }) +} + +fn make_markdown_table_documentation( + field_name: &str, + type_name: &str, + field_description: &str, + type_description: &str, +) -> Documentation { + Documentation::MarkupContent(MarkupContent { + kind: MarkupKind::Markdown, + value: [ + format!("| **Field: {}** |", field_name), + "| :--- |".to_string(), + format!("| {} |", field_description), + format!("| **Type: {}** |", type_name), + format!("| {} |", type_description), + ] + .join("\n"), + }) +} + +fn get_abstract_type_suggestions( + schema: &SDLSchema, + objects: &[ObjectID], + base_interface_id: Option<&InterfaceID>, +) -> Vec { + let object_types: Vec<_> = objects.iter().map(|id| schema.object(*id)).collect(); + + let mut interfaces = Vec::new(); + let mut types = Vec::new(); + + for object_type in &object_types { + if let Some(t) = schema.get_type(object_type.name.item.0) { + types.push(t); + } + + for interface_id in &object_type.interfaces { + let interface_type = schema.interface(*interface_id); + + if let Some(base_id) = base_interface_id { + if interface_id == base_id || !interface_type.interfaces.contains(base_id) { + continue; + } + } + + if let Some(t) = schema.get_type(interface_type.name.item.0) { + if interfaces.contains(&t) { + continue; + } + + interfaces.push(t); + } + } + } + + types.extend(interfaces); + + types +} + +#[cfg(test)] +mod test; diff --git a/compiler/crates/relay-lsp/src/completion/mod.rs b/compiler/crates/relay-lsp/src/completion/mod.rs deleted file mode 100644 index 0ced431fc7bf5..0000000000000 --- a/compiler/crates/relay-lsp/src/completion/mod.rs +++ /dev/null @@ -1,1408 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -//! Utilities for providing the completion language feature -use std::iter::once; - -use common::ArgumentName; -use common::DirectiveName; -use common::Named; -use common::NamedItem; -use common::Span; -use fnv::FnvHashSet; -use graphql_ir::FragmentDefinitionName; -use graphql_ir::OperationDefinitionName; -use graphql_ir::Program; -use graphql_ir::VariableDefinition; -use graphql_ir::VariableName; -use graphql_ir::DIRECTIVE_ARGUMENTS; -use graphql_syntax::Argument; -use graphql_syntax::ConstantValue; -use graphql_syntax::Directive; -use graphql_syntax::DirectiveLocation; -use graphql_syntax::ExecutableDefinition; -use graphql_syntax::ExecutableDocument; -use graphql_syntax::FragmentSpread; -use graphql_syntax::InlineFragment; -use graphql_syntax::LinkedField; -use graphql_syntax::List; -use graphql_syntax::OperationDefinition; -use graphql_syntax::ScalarField; -use graphql_syntax::Selection; -use graphql_syntax::TokenKind; -use graphql_syntax::Value; -use intern::string_key::StringKey; -use intern::Lookup; -use log::debug; -use lsp_types::request::Completion; -use lsp_types::request::Request; -use lsp_types::request::ResolveCompletionItem; -use lsp_types::CompletionItem; -use lsp_types::CompletionItemKind; -use lsp_types::CompletionResponse; -use lsp_types::Documentation; -use lsp_types::InsertTextFormat; -use lsp_types::MarkupContent; -use lsp_types::MarkupKind; -use schema::Argument as SchemaArgument; -use schema::Directive as SchemaDirective; -use schema::InputObject; -use schema::SDLSchema; -use schema::Schema; -use schema::Type; -use schema::TypeReference; -use schema::TypeWithFields; - -use crate::lsp_runtime_error::LSPRuntimeResult; -use crate::node_resolution_info::TypePath; -use crate::node_resolution_info::TypePathItem; -use crate::server::GlobalState; -use crate::LSPRuntimeError; -use crate::SchemaDocumentation; - -#[derive(Debug, Clone)] -pub enum CompletionKind { - FieldName { - existing_linked_field: bool, - }, - FragmentSpread, - DirectiveName { - location: DirectiveLocation, - }, - ArgumentName { - has_colon: bool, - existing_names: FnvHashSet, - kind: ArgumentKind, - }, - ArgumentValue { - executable_name: ExecutableName, - argument_name: StringKey, - kind: ArgumentKind, - }, - InlineFragmentType { - existing_inline_fragment: bool, - }, - InputObjectFieldName { - name: StringKey, - existing_names: FnvHashSet, - input_field_path: Vec, - }, -} - -#[derive(Debug, Clone)] -pub enum ArgumentKind { - Field, - Directive(DirectiveName), - ArgumentsDirective(StringKey), -} - -#[derive(Debug)] -pub struct CompletionRequest { - /// The type of the completion request we're responding to - kind: CompletionKind, - /// A list of type metadata that we can use to resolve the leaf - /// type the request is being made against - type_path: TypePath, - /// The project the request belongs to - pub project_name: StringKey, -} - -impl CompletionRequest { - fn new(project_name: StringKey, kind: CompletionKind, type_path: TypePath) -> Self { - Self { - kind, - type_path, - project_name, - } - } -} - -#[derive(Debug, Copy, Clone)] -pub enum ExecutableName { - Operation(StringKey), - Fragment(FragmentDefinitionName), -} - -trait ArgumentLike { - fn name(&self) -> StringKey; - fn type_(&self) -> &TypeReference; -} - -impl ArgumentLike for &SchemaArgument { - fn name(&self) -> StringKey { - self.name.0 - } - fn type_(&self) -> &TypeReference { - &self.type_ - } -} - -impl ArgumentLike for &VariableDefinition { - fn name(&self) -> StringKey { - self.name.item.0 - } - fn type_(&self) -> &TypeReference { - &self.type_ - } -} - -struct CompletionRequestBuilder { - project_name: StringKey, - current_executable_name: Option, -} - -impl CompletionRequestBuilder { - fn new(project_name: StringKey) -> Self { - Self { - project_name, - current_executable_name: None, - } - } - - fn new_request(&self, kind: CompletionKind, type_path: Vec) -> CompletionRequest { - CompletionRequest::new(self.project_name, kind, type_path.into()) - } - - fn create_completion_request( - &mut self, - document: ExecutableDocument, - position_span: Span, - ) -> Option { - for definition in document.definitions { - match &definition { - ExecutableDefinition::Operation(operation) => { - if operation.location.contains(position_span) { - self.current_executable_name = operation - .name - .as_ref() - .map(|name| ExecutableName::Operation(name.value)); - let (_, kind) = operation.operation.clone()?; - let type_path = vec![TypePathItem::Operation(kind)]; - - debug!( - "Completion request is within operation: {:?}", - operation.name - ); - let OperationDefinition { - selections, - directives, - .. - } = operation; - - let directive_location = kind.into(); - - if let Some(req) = self.build_request_from_selection_or_directives( - selections, - directives, - directive_location, - position_span, - type_path, - ) { - return Some(req); - } - } - // Check if the position span is within this operation's span - } - ExecutableDefinition::Fragment(fragment) => { - if fragment.location.contains(position_span) { - self.current_executable_name = Some(ExecutableName::Fragment( - FragmentDefinitionName(fragment.name.value), - )); - let type_name = fragment.type_condition.type_.value; - let type_path = vec![TypePathItem::FragmentDefinition { type_name }]; - if let Some(req) = self.build_request_from_selection_or_directives( - &fragment.selections, - &fragment.directives, - DirectiveLocation::FragmentDefinition, - position_span, - type_path, - ) { - return Some(req); - } - } - } - } - } - None - } - - fn build_request_from_selections( - &self, - selections: &List, - position_span: Span, - mut type_path: Vec, - ) -> Option { - for item in &selections.items { - if item.span().contains(position_span) { - return match item { - Selection::LinkedField(node) => { - if node.name.span.contains(position_span) { - return Some(self.new_request( - CompletionKind::FieldName { - existing_linked_field: true, - }, - type_path, - )); - } - let LinkedField { - name, - selections, - directives, - arguments, - .. - } = node; - type_path.push(TypePathItem::LinkedField { name: name.value }); - if let Some(arguments) = arguments { - if arguments.span.contains(position_span) { - return self.build_request_from_arguments( - arguments, - position_span, - type_path, - ArgumentKind::Field, - ); - } - } - self.build_request_from_selection_or_directives( - selections, - directives, - DirectiveLocation::Field, - position_span, - type_path, - ) - } - Selection::FragmentSpread(spread) => { - let FragmentSpread { - name, directives, .. - } = spread; - if name.span.contains(position_span) { - Some(self.new_request(CompletionKind::FragmentSpread, type_path)) - } else { - self.build_request_from_directives( - directives, - DirectiveLocation::FragmentSpread, - position_span, - type_path, - Some(name.value), - ) - } - } - Selection::InlineFragment(node) => { - let InlineFragment { - selections, - directives, - type_condition, - .. - } = node; - if let Some(type_condition) = type_condition { - let type_name = type_condition.type_.value; - if type_condition.span.contains(position_span) { - return Some(self.new_request( - CompletionKind::InlineFragmentType { - existing_inline_fragment: selections.start.kind - != TokenKind::Empty, - }, - type_path, - )); - } - type_path.push(TypePathItem::InlineFragment { type_name }); - } - self.build_request_from_selection_or_directives( - selections, - directives, - DirectiveLocation::InlineFragment, - position_span, - type_path, - ) - } - Selection::ScalarField(node) => { - if node.name.span.contains(position_span) { - return Some(self.new_request( - CompletionKind::FieldName { - existing_linked_field: false, - }, - type_path, - )); - } - let ScalarField { - directives, - name, - arguments, - .. - } = node; - type_path.push(TypePathItem::ScalarField { name: name.value }); - if let Some(arguments) = arguments { - if arguments.span.contains(position_span) { - return self.build_request_from_arguments( - arguments, - position_span, - type_path, - ArgumentKind::Field, - ); - } - } - self.build_request_from_directives( - directives, - DirectiveLocation::Field, - position_span, - type_path, - None, - ) - } - }; - } - } - // The selection list is empty or the current cursor is out of any of the selection - Some(self.new_request( - CompletionKind::FieldName { - existing_linked_field: false, - }, - type_path, - )) - } - - fn build_request_from_constant_input_value( - &self, - position_span: Span, - type_path: Vec, - mut input_field_path: Vec, - constant_value: &ConstantValue, - name: StringKey, - ) -> Option { - match constant_value { - ConstantValue::List(list) => list - .items - .iter() - .find(|arg| arg.span().contains(position_span)) - .and_then(|constant_value| { - self.build_request_from_constant_input_value( - position_span, - type_path, - input_field_path, - constant_value, - name, - ) - }), - ConstantValue::Object(arguments) => { - if let Some(constant_argument) = arguments - .items - .iter() - .find(|arg| arg.span.contains(position_span)) - { - input_field_path.push(constant_argument.name()); - self.build_request_from_constant_input_value( - position_span, - type_path, - input_field_path, - &constant_argument.value, - name, - ) - } else { - Some(self.new_request( - CompletionKind::InputObjectFieldName { - name, - existing_names: - arguments.items.iter().map(|item| item.name()).collect(), - input_field_path, - }, - type_path, - )) - } - } - _ => None, - } - } - - fn build_request_from_input_value( - &self, - position_span: Span, - type_path: Vec, - mut input_field_path: Vec, - value: &Value, - name: StringKey, - ) -> Option { - match value { - Value::List(list) => list - .items - .iter() - .find(|arg| arg.span().contains(position_span)) - .and_then(|value| { - self.build_request_from_input_value( - position_span, - type_path, - input_field_path, - value, - name, - ) - }), - Value::Object(arguments) => { - if let Some(position_argument) = arguments - .items - .iter() - .find(|arg| arg.span.contains(position_span)) - { - input_field_path.push(position_argument.name()); - self.build_request_from_input_value( - position_span, - type_path, - input_field_path, - &position_argument.value, - name, - ) - } else { - Some(self.new_request( - CompletionKind::InputObjectFieldName { - name, - existing_names: - arguments.items.iter().map(|item| item.name()).collect(), - input_field_path, - }, - type_path, - )) - } - } - Value::Constant(constant_value) => self.build_request_from_constant_input_value( - position_span, - type_path, - input_field_path, - constant_value, - name, - ), - _ => None, - } - } - - fn build_request_from_arguments( - &self, - arguments: &List, - position_span: Span, - type_path: Vec, - kind: ArgumentKind, - ) -> Option { - for ( - i, - Argument { - name, - value, - colon, - span, - .. - }, - ) in arguments.items.iter().enumerate() - { - if span.contains(position_span) { - return if name.span.contains(position_span) { - Some(self.new_request( - CompletionKind::ArgumentName { - has_colon: colon.kind != TokenKind::Empty, - existing_names: - arguments.items.iter().map(|arg| arg.name.value).collect(), - kind, - }, - type_path, - )) - } else if let Some(executable_name) = self.current_executable_name { - match value { - Value::Constant(ConstantValue::Null(token)) - if token.kind == TokenKind::Empty => - { - Some(self.new_request( - CompletionKind::ArgumentValue { - argument_name: name.value, - executable_name, - kind, - }, - type_path, - )) - } - Value::Constant(constant_value) => self - .build_request_from_constant_input_value( - position_span, - type_path, - Default::default(), - constant_value, - name.value, - ), - Value::Variable(_) => Some(self.new_request( - CompletionKind::ArgumentValue { - argument_name: name.value, - executable_name, - kind, - }, - type_path, - )), - value => self.build_request_from_input_value( - position_span, - type_path, - Default::default(), - value, - name.value, - ), - } - } else { - None - }; - } else if span.end <= position_span.start { - let is_cursor_in_next_white_space = { - if let Some(next_argument) = arguments.items.get(i + 1) { - position_span.start < next_argument.span.start - } else { - position_span.start < arguments.span.end - } - }; - if is_cursor_in_next_white_space { - // Handles the following special case - // (args1: | args2:$var) - // ^ cursor here - // The cursor is on the white space between args1 and args2. - // We want to autocomplete the value if it's empty. - return if let Some(executable_name) = self.current_executable_name { - match value { - Value::Constant(ConstantValue::Null(token)) - if token.kind == TokenKind::Empty => - { - Some(self.new_request( - CompletionKind::ArgumentValue { - argument_name: name.value, - executable_name, - kind, - }, - type_path, - )) - } - _ => Some(self.new_request( - CompletionKind::ArgumentName { - has_colon: false, - existing_names: - arguments.items.iter().map(|arg| arg.name.value).collect(), - kind, - }, - type_path, - )), - } - } else { - None - }; - } - } - } - // The argument list is empty or the cursor is not on any of the argument - Some(self.new_request( - CompletionKind::ArgumentName { - has_colon: false, - existing_names: arguments.items.iter().map(|arg| arg.name.value).collect(), - kind, - }, - type_path, - )) - } - - fn build_request_from_directives( - &self, - directives: &[Directive], - location: DirectiveLocation, - position_span: Span, - type_path: Vec, - fragment_spread_name: Option, - ) -> Option { - for directive in directives { - if !directive.span.contains(position_span) { - continue; - }; - return if directive.name.span.contains(position_span) { - Some(self.new_request(CompletionKind::DirectiveName { location }, type_path)) - } else if let Some(arguments) = &directive.arguments { - if arguments.span.contains(position_span) { - self.build_request_from_arguments( - arguments, - position_span, - type_path, - if let Some(fragment_spread_name) = fragment_spread_name { - if directive.name.value == *DIRECTIVE_ARGUMENTS { - ArgumentKind::ArgumentsDirective(fragment_spread_name) - } else { - ArgumentKind::Directive(DirectiveName(directive.name.value)) - } - } else { - ArgumentKind::Directive(DirectiveName(directive.name.value)) - }, - ) - } else { - None - } - } else { - // The directive doesn't have a name `@|` - Some(self.new_request(CompletionKind::DirectiveName { location }, type_path)) - }; - } - None - } - - fn build_request_from_selection_or_directives( - &self, - selections: &List, - directives: &[Directive], - directive_location: DirectiveLocation, - position_span: Span, - type_path: Vec, - ) -> Option { - if selections.span.contains(position_span) { - // TODO(brandondail) handle when the completion occurs at/within the start token - self.build_request_from_selections(selections, position_span, type_path) - } else { - self.build_request_from_directives( - directives, - directive_location, - position_span, - type_path, - None, - ) - } - } -} - -fn completion_items_for_request( - request: CompletionRequest, - schema: &SDLSchema, - schema_documentation: impl SchemaDocumentation, - program: &Program, -) -> Option> { - let kind = request.kind; - debug!("completion_items_for_request: {:?}", kind); - match kind { - CompletionKind::FragmentSpread => { - let leaf_type = request.type_path.resolve_leaf_type(schema)?; - Some(resolve_completion_items_for_fragment_spread( - leaf_type, program, schema, true, - )) - } - CompletionKind::FieldName { - existing_linked_field, - } => match request.type_path.resolve_leaf_type(schema)? { - Type::Interface(interface_id) => { - let interface = schema.interface(interface_id); - Some(merge_completion_items_ordered([ - resolve_completion_items_for_fields( - interface, - schema, - schema_documentation, - existing_linked_field, - ), - resolve_completion_items_typename(Type::Interface(interface_id), schema), - resolve_completion_items_for_inline_fragment( - Type::Interface(interface_id), - schema, - false, - ), - resolve_completion_items_for_fragment_spread( - Type::Interface(interface_id), - program, - schema, - false, - ), - ])) - } - Type::Object(object_id) => Some(merge_completion_items_ordered([ - resolve_completion_items_for_fields( - schema.object(object_id), - schema, - schema_documentation, - existing_linked_field, - ), - resolve_completion_items_typename(Type::Object(object_id), schema), - resolve_completion_items_for_fragment_spread( - Type::Object(object_id), - program, - schema, - false, - ), - ])), - Type::Union(union_id) => Some(merge_completion_items_ordered([ - resolve_completion_items_typename(Type::Union(union_id), schema), - resolve_completion_items_for_inline_fragment(Type::Union(union_id), schema, false), - resolve_completion_items_for_fragment_spread( - Type::Union(union_id), - program, - schema, - false, - ), - ])), - Type::Enum(_) | Type::InputObject(_) | Type::Scalar(_) => None, - }, - CompletionKind::DirectiveName { location } => { - let directives = schema.directives_for_location(location); - let items = directives - .iter() - .map(|directive| completion_item_from_directive(directive, schema)) - .collect(); - Some(items) - } - CompletionKind::ArgumentName { - has_colon, - existing_names, - kind, - } => match kind { - ArgumentKind::Field => { - let (_, field) = request.type_path.resolve_current_field(schema)?; - Some(resolve_completion_items_for_argument_name( - field.arguments.iter(), - schema, - existing_names, - has_colon, - )) - } - ArgumentKind::ArgumentsDirective(fragment_spread_name) => { - let fragment = program.fragment(FragmentDefinitionName(fragment_spread_name))?; - Some(resolve_completion_items_for_argument_name( - fragment.variable_definitions.iter(), - schema, - existing_names, - has_colon, - )) - } - ArgumentKind::Directive(directive_name) => { - Some(resolve_completion_items_for_argument_name( - schema.get_directive(directive_name)?.arguments.iter(), - schema, - existing_names, - has_colon, - )) - } - }, - CompletionKind::ArgumentValue { - executable_name, - argument_name, - kind, - } => { - let argument_type = match kind { - ArgumentKind::Field => { - let (_, field) = request.type_path.resolve_current_field(schema)?; - &field.arguments.named(ArgumentName(argument_name))?.type_ - } - ArgumentKind::ArgumentsDirective(fragment_spread_name) => { - let fragment = - program.fragment(FragmentDefinitionName(fragment_spread_name))?; - &fragment - .variable_definitions - .named(VariableName(argument_name))? - .type_ - } - ArgumentKind::Directive(directive_name) => { - &schema - .get_directive(directive_name)? - .arguments - .named(ArgumentName(argument_name))? - .type_ - } - }; - Some(resolve_completion_items_for_argument_value( - schema, - argument_type, - program, - executable_name, - )) - } - CompletionKind::InlineFragmentType { - existing_inline_fragment, - } => { - let type_ = request.type_path.resolve_leaf_type(schema)?; - Some(resolve_completion_items_for_inline_fragment( - type_, - schema, - existing_inline_fragment, - )) - } - CompletionKind::InputObjectFieldName { - name, - existing_names, - input_field_path, - } => { - let (_, field) = request.type_path.resolve_current_field(schema)?; - - fn resolve_root_input_field<'a>( - schema: &'a SDLSchema, - input_object: &'a TypeReference, - ) -> Option<&'a InputObject> { - match input_object { - TypeReference::Named(Type::InputObject(input_object_id)) => { - Some(schema.input_object(*input_object_id)) - } - TypeReference::Named(_) => None, - TypeReference::NonNull(inner) => resolve_root_input_field(schema, inner), - TypeReference::List(inner) => resolve_root_input_field(schema, inner), - } - } - - fn resolve_input_field<'a>( - schema: &'a SDLSchema, - input_object: &'a InputObject, - field_name: &StringKey, - ) -> Option<&'a InputObject> { - input_object - .fields - .iter() - .find(|field| field.name.0 == *field_name) - .and_then(|field| resolve_root_input_field(schema, &field.type_)) - } - - let field_argument = field - .arguments - .iter() - .find(|argument| argument.name() == name)?; - - let mut input_object = resolve_root_input_field(schema, &field_argument.type_)?; - - for input_field_name in input_field_path.iter() { - input_object = resolve_input_field(schema, input_object, input_field_name)?; - } - - Some(resolve_completion_items_for_input_object( - input_object, - schema, - existing_names, - )) - } - } -} - -fn resolve_completion_items_typename(type_: Type, schema: &SDLSchema) -> Vec { - if type_.is_root_type(schema) { - vec![] - } else { - let mut item = CompletionItem::new_simple("__typename".to_owned(), "String!".to_owned()); - item.kind = Some(CompletionItemKind::FIELD); - vec![item] - } -} - -fn resolve_completion_items_for_input_object( - input_object: &InputObject, - schema: &SDLSchema, - existing_names: FnvHashSet, -) -> Vec { - input_object - .fields - .iter() - .filter(|arg| !existing_names.contains(&arg.name())) - .map(|arg| { - let label = arg.name().lookup().to_string(); - let detail = schema.get_type_string(arg.type_()); - let kind = match arg.type_().inner() { - Type::InputObject(_) => Some(CompletionItemKind::STRUCT), - Type::Scalar(_) => Some(CompletionItemKind::FIELD), - _ => None, - }; - - CompletionItem { - label: label.clone(), - kind, - detail: Some(detail), - documentation: None, - deprecated: None, - preselect: None, - sort_text: None, - filter_text: None, - insert_text: Some(format!("{}: $1", label)), - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - text_edit: None, - additional_text_edits: None, - command: Some(lsp_types::Command::new( - "Suggest".into(), - "editor.action.triggerSuggest".into(), - None, - )), - data: None, - tags: None, - ..Default::default() - } - }) - .collect() -} - -fn resolve_completion_items_for_argument_name( - arguments: impl Iterator, - schema: &SDLSchema, - existing_names: FnvHashSet, - has_colon: bool, -) -> Vec { - arguments - .filter(|arg| !existing_names.contains(&arg.name())) - .map(|arg| { - let label = arg.name().lookup().into(); - let detail = schema.get_type_string(arg.type_()); - if has_colon { - CompletionItem::new_simple(label, detail) - } else { - CompletionItem { - label: label.clone(), - kind: None, - detail: Some(detail), - documentation: None, - deprecated: None, - preselect: None, - sort_text: None, - filter_text: None, - insert_text: Some(format!("{}: $1", label)), - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - text_edit: None, - additional_text_edits: None, - command: Some(lsp_types::Command::new( - "Suggest".into(), - "editor.action.triggerSuggest".into(), - None, - )), - data: None, - tags: None, - ..Default::default() - } - } - }) - .collect() -} - -fn resolve_completion_items_for_inline_fragment( - type_: Type, - schema: &SDLSchema, - existing_inline_fragment: bool, -) -> Vec { - match type_ { - Type::Interface(id) => { - let interface = schema.interface(id); - once(type_) - .chain( - interface - .implementing_objects - .iter() - .filter_map(|id| schema.get_type(schema.object(*id).name.item.0)), - ) - .collect() - } - Type::Union(id) => { - let union = schema.union(id); - once(type_) - .chain( - union - .members - .iter() - .filter_map(|id| schema.get_type(schema.object(*id).name.item.0)), - ) - .collect() - } - Type::Enum(_) | Type::Object(_) | Type::InputObject(_) | Type::Scalar(_) => vec![], - } - .into_iter() - .map(|type_| { - let type_name = schema.get_type_name(type_).lookup(); - if existing_inline_fragment { - CompletionItem::new_simple(type_name.to_owned(), "".into()) - } else { - CompletionItem { - label: format!("... on {type_name}"), - kind: None, - detail: None, - documentation: None, - deprecated: None, - preselect: None, - sort_text: None, - filter_text: None, - insert_text: Some(format!("... on {type_name} {{\n\t$1\n}}")), - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - text_edit: None, - additional_text_edits: None, - command: Some(lsp_types::Command::new( - "Suggest".into(), - "editor.action.triggerSuggest".into(), - None, - )), - data: None, - tags: None, - ..Default::default() - } - } - }) - .collect() -} - -fn resolve_completion_items_for_argument_value( - schema: &SDLSchema, - type_: &TypeReference, - program: &Program, - executable_name: ExecutableName, -) -> Vec { - let mut completion_items = match executable_name { - ExecutableName::Fragment(name) => { - if let Some(fragment) = program.fragment(name) { - fragment - .used_global_variables - .iter() - .chain(fragment.variable_definitions.iter()) - .filter(|variable| variable.type_.eq(type_)) - .map(|variable| { - CompletionItem::new_simple(format!("${}", variable.name.item,), "".into()) - }) - .collect() - } else { - vec![] - } - } - ExecutableName::Operation(name) => { - if let Some(operation) = program.operation(OperationDefinitionName(name)) { - operation - .variable_definitions - .iter() - .filter(|variable| variable.type_.eq(type_)) - .map(|variable| { - CompletionItem::new_simple(format!("${}", variable.name.item,), "".into()) - }) - .collect() - } else { - vec![] - } - } - }; - - if !type_.is_list() { - if let Type::Enum(id) = type_.inner() { - let enum_ = schema.enum_(id); - completion_items.extend( - enum_ - .values - .iter() - .map(|value| CompletionItem::new_simple(value.value.to_string(), "".into())), - ) - } - } - - completion_items -} - -fn resolve_completion_items_for_fields( - type_: &T, - schema: &SDLSchema, - schema_documentation: impl SchemaDocumentation, - existing_linked_field: bool, -) -> Vec { - type_ - .fields() - .iter() - .map(|field_id| { - let field = schema.field(*field_id); - let field_name = field.name.item.to_string(); - let deprecated = field.deprecated(); - let is_deprecated = deprecated.is_some(); - let deprecated_reason = deprecated - .and_then(|deprecated| deprecated.reason) - .map(|reason| format!("Deprecated: {}", reason)); - let args = create_arguments_snippets(field.arguments.iter(), schema); - let insert_text = match ( - existing_linked_field - || matches!(field.type_.inner(), Type::Scalar(_) | Type::Enum(_)), // don't insert { } - args.is_empty(), // don't insert arguments - ) { - (true, true) => None, - (true, false) => Some(format!("{}({})", field_name, args.join(", "))), - (false, true) => Some(format!("{} {{\n\t$1\n}}", field_name)), - (false, false) => Some(format!( - "{}({}) {{\n\t${}\n}}", - field_name, - args.join(", "), - args.len() + 1 - )), - }; - let (insert_text_format, command) = if insert_text.is_some() { - ( - Some(lsp_types::InsertTextFormat::SNIPPET), - Some(lsp_types::Command::new( - "Suggest".into(), - "editor.action.triggerSuggest".into(), - None, - )), - ) - } else { - (None, None) - }; - - let type_description = schema_documentation - .get_type_description(schema.get_type_name(field.type_.inner()).lookup()); - - let field_description = schema_documentation - .get_field_description(type_.name().lookup(), field.name.item.lookup()); - - let type_name = schema.get_type_string(&field.type_); - let documentation = make_markdown_table_documentation( - field.name.item.lookup(), - &type_name, - field_description.unwrap_or(""), - type_description.unwrap_or(""), - ); - - let kind = match field.type_.inner() { - Type::Enum(_) => Some(CompletionItemKind::ENUM), - // There is no Kind for union, so we'll use interface - Type::Interface(_) | Type::Union(_) => Some(CompletionItemKind::INTERFACE), - Type::Object(_) | Type::InputObject(_) => Some(CompletionItemKind::STRUCT), - Type::Scalar(_) => Some(CompletionItemKind::FIELD), - }; - - CompletionItem { - label: field_name, - kind, - detail: deprecated_reason.or(Some(type_name)), - documentation: Some(documentation), - deprecated: Some(is_deprecated), - preselect: None, - sort_text: None, - filter_text: None, - insert_text, - insert_text_format, - text_edit: None, - additional_text_edits: None, - command, - data: None, - tags: None, - ..Default::default() - } - }) - .collect() -} - -fn resolve_completion_items_for_fragment_spread( - type_: Type, - source_program: &Program, - schema: &SDLSchema, - existing_fragment_spread: bool, -) -> Vec { - source_program - .fragments() - .filter(|fragment| schema.are_overlapping_types(fragment.type_condition, type_)) - .map(|fragment| { - let label = if existing_fragment_spread { - fragment.name.item.to_string() - } else { - format!("...{}", fragment.name.item) - }; - let detail = schema - .get_type_name(fragment.type_condition) - .lookup() - .to_string(); - if fragment.variable_definitions.is_empty() { - return CompletionItem::new_simple(label, detail); - } - // Create a snippet if the fragment has required argumentDefinition with no default values - let args = create_arguments_snippets(fragment.variable_definitions.iter(), schema); - if args.is_empty() { - return CompletionItem::new_simple(label, detail); - } - let insert_text = format!("{} @arguments({})", label, args.join(", ")); - CompletionItem { - label, - kind: None, - detail: Some(detail), - documentation: None, - deprecated: None, - preselect: None, - sort_text: None, - filter_text: None, - insert_text: Some(insert_text), - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - text_edit: None, - additional_text_edits: None, - command: Some(lsp_types::Command::new( - "Suggest".into(), - "editor.action.triggerSuggest".into(), - None, - )), - data: None, - tags: None, - ..Default::default() - } - }) - .collect() -} - -fn merge_completion_items_ordered>>( - completion_item_groups: I, -) -> Vec { - completion_item_groups - .into_iter() - .enumerate() - .flat_map(|(index, mut items)| { - items.iter_mut().for_each(|item| { - item.sort_text = Some(format!( - "{}{}", - index, - item.sort_text.clone().unwrap_or_else(|| item.label.clone()) - )); - }); - items - }) - .collect() -} - -fn completion_item_from_directive( - directive: &SchemaDirective, - schema: &SDLSchema, -) -> CompletionItem { - let SchemaDirective { - name, arguments, .. - } = directive; - - // Always use the name of the directive as the label - let label = name.to_string(); - - // We can return a snippet with the expected arguments of the directive - let (insert_text, insert_text_format) = if arguments.is_empty() { - (label.clone(), InsertTextFormat::PLAIN_TEXT) - } else { - let args = create_arguments_snippets(arguments.iter(), schema); - if args.is_empty() { - (label.clone(), InsertTextFormat::PLAIN_TEXT) - } else { - let insert_text = format!("{}({})", label, args.join(", ")); - (insert_text, InsertTextFormat::SNIPPET) - } - }; - - let documentation = directive.description.map(|desc| { - Documentation::MarkupContent(MarkupContent { - kind: MarkupKind::Markdown, - value: desc.to_string(), - }) - }); - - CompletionItem { - label, - kind: None, - detail: None, - documentation, - deprecated: None, - preselect: None, - sort_text: None, - filter_text: None, - insert_text: Some(insert_text), - insert_text_format: Some(insert_text_format), - text_edit: None, - additional_text_edits: None, - command: Some(lsp_types::Command::new( - "Suggest".into(), - "editor.action.triggerSuggest".into(), - None, - )), - data: None, - tags: None, - ..Default::default() - } -} - -fn create_arguments_snippets( - arguments: impl Iterator, - schema: &SDLSchema, -) -> Vec { - let mut cursor_location = 1; - let mut args = vec![]; - - for arg in arguments { - if let TypeReference::NonNull(type_) = arg.type_() { - let value_snippet = match type_ { - t if t.is_list() => format!("[${}]", cursor_location), - t if schema.is_string(t.inner()) => format!("\"${}\"", cursor_location), - _ => format!("${}", cursor_location), - }; - let str = format!("{}: {}", arg.name(), value_snippet); - args.push(str); - cursor_location += 1; - } - } - args -} - -pub fn on_completion( - state: &impl GlobalState, - params: ::Params, -) -> LSPRuntimeResult<::Result> { - match state.extract_executable_document_from_text(¶ms.text_document_position, 0) { - Ok((document, position_span)) => { - let project_name = state - .extract_project_name_from_url(¶ms.text_document_position.text_document.uri)?; - let schema = &state.get_schema(&project_name)?; - let items = resolve_completion_items( - document, - position_span, - project_name, - schema, - state.get_schema_documentation(project_name.lookup()), - &state.get_program(&project_name)?, - ) - .unwrap_or_else(Vec::new); - Ok(Some(CompletionResponse::Array(items))) - } - Err(graphql_err) => { - let js_server = state.get_js_language_sever().ok_or_else(|| { - if matches!(graphql_err, LSPRuntimeError::ExpectedError) { - LSPRuntimeError::ExpectedError - } else { - LSPRuntimeError::UnexpectedError(format!( - "Unable to get completion {:?}", - &graphql_err, - )) - } - })?; - - if let Ok(response) = js_server.on_complete(¶ms, state) { - Ok(response) - } else { - Err(graphql_err) - } - } - } -} - -pub(crate) fn on_resolve_completion_item( - _state: &impl GlobalState, - params: ::Params, -) -> LSPRuntimeResult<::Result> { - // We currently don't do anything with the selected item - // and we just return an input - - Ok(params) -} - -fn resolve_completion_items( - document: ExecutableDocument, - position_span: Span, - project_name: StringKey, - schema: &SDLSchema, - schema_documentation: impl SchemaDocumentation, - progam: &Program, -) -> Option> { - let completion_request = CompletionRequestBuilder::new(project_name) - .create_completion_request(document, position_span); - completion_request.and_then(|completion_request| { - completion_items_for_request(completion_request, schema, schema_documentation, progam) - }) -} - -fn make_markdown_table_documentation( - field_name: &str, - type_name: &str, - field_description: &str, - type_description: &str, -) -> Documentation { - Documentation::MarkupContent(MarkupContent { - kind: MarkupKind::Markdown, - value: [ - format!("| **Field: {}** |", field_name), - "| :--- |".to_string(), - format!("| {} |", field_description), - format!("| **Type: {}** |", type_name), - format!("| {} |", type_description), - ] - .join("\n"), - }) -} - -#[cfg(test)] -mod test; diff --git a/compiler/crates/relay-lsp/src/completion/test.rs b/compiler/crates/relay-lsp/src/completion/test.rs index 74a06c1baaf89..209ce02ed1120 100644 --- a/compiler/crates/relay-lsp/src/completion/test.rs +++ b/compiler/crates/relay-lsp/src/completion/test.rs @@ -28,7 +28,7 @@ fn parse_and_resolve_completion_items( program: Option, ) -> Option> { let pos = source.find('|').unwrap() - 1; - let next_source = source.replace("|", ""); + let next_source = source.replace('|', ""); let document = parse_executable_with_error_recovery( &next_source, SourceLocationKey::standalone("/test/file"), @@ -244,7 +244,6 @@ fn whitespace_in_interface() { "source", "node", "__typename", - "... on CommentsEdgeInterface", "... on CommentsEdge", "...ImplementingFragment", "...InterfaceFragment", @@ -265,11 +264,11 @@ fn whitespace_in_union() { fragment UnionFragment on CommentBody { __typename } - + fragment UnionVariantFragment on PlainCommentBody { __typename } - + fragment UnrelatedFragment on Task { __typename } @@ -280,11 +279,10 @@ fn whitespace_in_union() { items.unwrap(), vec![ "__typename", - "... on CommentBody", - "... on PlainCommentBody", "... on MarkdownCommentBody", - "...UnionFragment", + "... on PlainCommentBody", "...UnionVariantFragment", + "...UnionFragment", ], ) } @@ -312,9 +310,28 @@ fn inline_fragment_on_interface() { "#, None, ); + assert_labels(items.unwrap(), vec!["... on SimpleNamed", "... on User"]); +} + +#[test] +fn inline_fragment_on_interface_objects_implement_interface_implementing_base_interface() { + let items = parse_and_resolve_completion_items( + r#" + fragment Test on UserNameRenderable { + ... on |a + } + "#, + None, + ); + assert_labels( items.unwrap(), - vec!["... on Named", "... on User", "... on SimpleNamed"], + vec![ + "... on PlainUserNameRenderer", + "... on ImplementsImplementsUserNameRenderableAndUserNameRenderable", + "... on MarkdownUserNameRenderer", + "... on ImplementsUserNameRenderable", + ], ); } @@ -328,7 +345,7 @@ fn inline_fragment_on_interface_with_existing_inline_fragment() { "#, None, ); - assert_labels(items.unwrap(), vec!["Named", "User", "SimpleNamed"]); + assert_labels(items.unwrap(), vec!["User", "SimpleNamed"]); } #[test] @@ -344,10 +361,12 @@ fn inline_fragment_on_union() { assert_labels( items.unwrap(), vec![ - "... on MaybeNode", - "... on Story", "... on FakeNode", + "... on FeedUnit", + "... on Node", "... on NonNode", + "... on Story", + "... on MaybeNodeInterface", ], ); } @@ -364,7 +383,14 @@ fn inline_fragment_on_union_with_existing_inline_fragment() { ); assert_labels( items.unwrap(), - vec!["MaybeNode", "Story", "FakeNode", "NonNode"], + vec![ + "Node", + "Story", + "FakeNode", + "NonNode", + "MaybeNodeInterface", + "FeedUnit", + ], ); } @@ -383,12 +409,14 @@ fn directive() { assert_labels( items.unwrap(), vec![ + "credentials", "prependEdge", "deleteRecord", "appendNode", "deleteEdge", "__clientField", "appendEdge", + "catch", "required", "stream_connection", "match", @@ -420,12 +448,14 @@ fn directive_on_scalar_field() { assert_labels( items.unwrap(), vec![ + "credentials", "prependEdge", "deleteRecord", "appendNode", "deleteEdge", "__clientField", "appendEdge", + "catch", "required", "stream_connection", "match", @@ -456,7 +486,7 @@ fn empty_argument_list() { ); assert_labels( items.unwrap(), - vec!["label", "initial_count", "if", "use_customized_batch"], + vec!["label", "initialCount", "if", "useCustomizedBatch"], ); } @@ -474,7 +504,7 @@ fn argument_name_without_value() { ); assert_labels( items.unwrap(), - vec!["label", "initial_count", "if", "use_customized_batch"], + vec!["label", "initialCount", "if", "useCustomizedBatch"], ); } @@ -495,7 +525,7 @@ fn argument_name_with_existing_name() { ); assert_labels( items.unwrap(), - vec!["label", "initial_count", "use_customized_batch"], + vec!["label", "initialCount", "useCustomizedBatch"], ); } @@ -706,12 +736,14 @@ fn empty_directive() { assert_labels( items.unwrap(), vec![ + "credentials", "prependEdge", "deleteRecord", "appendNode", "deleteEdge", "__clientField", "appendEdge", + "catch", "required", "stream_connection", "match", diff --git a/compiler/crates/relay-lsp/src/diagnostic_reporter.rs b/compiler/crates/relay-lsp/src/diagnostic_reporter.rs index cb40621b1fb56..9cb87c3b041ed 100644 --- a/compiler/crates/relay-lsp/src/diagnostic_reporter.rs +++ b/compiler/crates/relay-lsp/src/diagnostic_reporter.rs @@ -6,6 +6,7 @@ */ //! Utilities for reporting errors to an LSP client +use std::path::Path; use std::path::PathBuf; use common::get_diagnostics_data; @@ -41,7 +42,7 @@ use crate::lsp_process_error::LSPProcessResult; /// Converts a Location to a Url pointing to the canonical path based on the root_dir provided. /// Returns None if we are unable to do the conversion -fn url_from_location(location: Location, root_dir: &PathBuf) -> Option { +fn url_from_location(location: Location, root_dir: &Path) -> Option { let file_path = location.source_location().path(); let canonical_path = canonicalize(root_dir.join(file_path)).ok()?; Url::from_file_path(canonical_path).ok() @@ -323,17 +324,14 @@ pub fn publish_diagnostic( sender: &Sender, ) -> LSPProcessResult<()> { let notif = ServerNotification::new(PublishDiagnostics::METHOD.into(), diagnostic_params); - sender - .send(Message::Notification(notif)) - .unwrap_or_else(|_| { - // TODO(brandondail) log here - }); + sender.send(Message::Notification(notif)).unwrap_or(()); Ok(()) } #[cfg(test)] mod tests { use std::env; + use std::path::Path; use std::path::PathBuf; use common::Diagnostic; @@ -351,7 +349,7 @@ mod tests { struct MockSourceReader(String); impl SourceReader for MockSourceReader { - fn read_file_to_string(&self, _path: &PathBuf) -> std::io::Result { + fn read_file_to_string(&self, _path: &Path) -> std::io::Result { Ok(self.0.to_string()) } } diff --git a/compiler/crates/relay-lsp/src/docblock_resolution_info.rs b/compiler/crates/relay-lsp/src/docblock_resolution_info.rs index 0d36fdf0903dc..13c2bc87a7502 100644 --- a/compiler/crates/relay-lsp/src/docblock_resolution_info.rs +++ b/compiler/crates/relay-lsp/src/docblock_resolution_info.rs @@ -8,13 +8,20 @@ use common::Span; use graphql_ir::reexport::StringKey; use graphql_ir::FragmentDefinitionName; +use graphql_syntax::Identifier; use relay_docblock::DocblockIr; use relay_docblock::On; +use relay_docblock::ResolverFieldDocblockIr; +use relay_docblock::ResolverTypeDocblockIr; pub enum DocblockResolutionInfo { Type(StringKey), RootFragment(FragmentDefinitionName), FieldName(StringKey), + FieldArgumentName { + field_name: Identifier, + argument_name: Identifier, + }, Deprecated, } @@ -23,7 +30,7 @@ pub fn create_docblock_resolution_info( position_span: Span, ) -> Option { match docblock_ir { - DocblockIr::RelayResolver(resolver_ir) => { + DocblockIr::Field(ResolverFieldDocblockIr::LegacyVerboseResolver(resolver_ir)) => { match resolver_ir.on { On::Type(on_type) => { if on_type.value.location.contains(position_span) { @@ -37,18 +44,33 @@ pub fn create_docblock_resolution_info( } }; + // Root fragment if let Some(root_fragment) = resolver_ir.root_fragment { if root_fragment.location.contains(position_span) { return Some(DocblockResolutionInfo::RootFragment(root_fragment.item)); } } + // Field name if resolver_ir.field.name.span.contains(position_span) { return Some(DocblockResolutionInfo::FieldName( resolver_ir.field.name.value, )); } + // Field arguments + if let Some(field_arguments) = &resolver_ir.field.arguments { + for field_argument in &field_arguments.items { + if field_argument.name.span.contains(position_span) { + return Some(DocblockResolutionInfo::FieldArgumentName { + field_name: resolver_ir.field.name, + argument_name: field_argument.name, + }); + } + } + } + + // Return type if let Some(output_type) = &resolver_ir.output_type { if output_type.inner().location.contains(position_span) { return Some(DocblockResolutionInfo::Type( @@ -57,6 +79,7 @@ pub fn create_docblock_resolution_info( } } + // @deprecated key if let Some(deprecated) = resolver_ir.deprecated { if deprecated.key_location().contains(position_span) { return Some(DocblockResolutionInfo::Deprecated); @@ -65,7 +88,7 @@ pub fn create_docblock_resolution_info( None } - DocblockIr::TerseRelayResolver(resolver_ir) => { + DocblockIr::Field(ResolverFieldDocblockIr::TerseRelayResolver(resolver_ir)) => { // Parent type if resolver_ir.type_.location.contains(position_span) { return Some(DocblockResolutionInfo::Type(resolver_ir.type_.item)); @@ -89,6 +112,25 @@ pub fn create_docblock_resolution_info( } } + // Field name + if resolver_ir.field.name.span.contains(position_span) { + return Some(DocblockResolutionInfo::FieldName( + resolver_ir.field.name.value, + )); + } + + // Field arguments + if let Some(field_arguments) = &resolver_ir.field.arguments { + for field_argument in &field_arguments.items { + if field_argument.name.span.contains(position_span) { + return Some(DocblockResolutionInfo::FieldArgumentName { + field_name: resolver_ir.field.name, + argument_name: field_argument.name, + }); + } + } + } + // @deprecated key if let Some(deprecated) = resolver_ir.deprecated { if deprecated.key_location().contains(position_span) { @@ -98,7 +140,7 @@ pub fn create_docblock_resolution_info( None } - DocblockIr::StrongObjectResolver(strong_object) => { + DocblockIr::Type(ResolverTypeDocblockIr::StrongObjectResolver(strong_object)) => { if strong_object.rhs_location.contains(position_span) { return Some(DocblockResolutionInfo::Type(strong_object.type_name.value)); } @@ -110,7 +152,7 @@ pub fn create_docblock_resolution_info( } None } - DocblockIr::WeakObjectType(weak_type_ir) => { + DocblockIr::Type(ResolverTypeDocblockIr::WeakObjectType(weak_type_ir)) => { if weak_type_ir.rhs_location.contains(position_span) { return Some(DocblockResolutionInfo::Type(weak_type_ir.type_name.value)); } diff --git a/compiler/crates/relay-lsp/src/explore_schema_for_type/mod.rs b/compiler/crates/relay-lsp/src/explore_schema_for_type.rs similarity index 100% rename from compiler/crates/relay-lsp/src/explore_schema_for_type/mod.rs rename to compiler/crates/relay-lsp/src/explore_schema_for_type.rs diff --git a/compiler/crates/relay-lsp/src/explore_schema_for_type/types.rs b/compiler/crates/relay-lsp/src/explore_schema_for_type/types.rs index e37a7c714936e..37f995bc85a42 100644 --- a/compiler/crates/relay-lsp/src/explore_schema_for_type/types.rs +++ b/compiler/crates/relay-lsp/src/explore_schema_for_type/types.rs @@ -271,7 +271,7 @@ fn get_schema_explorer_input_object( get_empty_schema_explorer_type_reference(arg.type_.inner(), schema, documentation); SchemaExplorerFieldArgument { - argument_name: arg.name.to_string(), + argument_name: arg.name.item.to_string(), argument_description: None, default_value: arg.default_value.as_ref().map(|value| value.to_string()), rendered_type_name: schema.get_type_string(&arg.type_), @@ -399,12 +399,12 @@ fn get_schema_explorer_field( .get_field_argument_description( parent_type_name.lookup(), &field_name, - arg.name.0.lookup(), + arg.name.item.0.lookup(), ) .map(|field_argument_description| field_argument_description.to_string()); SchemaExplorerFieldArgument { - argument_name: arg.name.to_string(), + argument_name: arg.name.item.to_string(), argument_description, rendered_type_name: schema.get_type_string(&arg.type_), type_reference, diff --git a/compiler/crates/relay-lsp/src/find_field_usages.rs b/compiler/crates/relay-lsp/src/find_field_usages.rs new file mode 100644 index 0000000000000..dbd92fab78258 --- /dev/null +++ b/compiler/crates/relay-lsp/src/find_field_usages.rs @@ -0,0 +1,282 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +mod find_field_locations; + +use std::collections::HashMap; +use std::sync::Arc; + +use common::Location as IRLocation; +use common::WithLocation; +pub(crate) use find_field_locations::find_field_locations; +use graphql_ir::FragmentDefinition; +use graphql_ir::InlineFragment; +use graphql_ir::LinkedField; +use graphql_ir::OperationDefinition; +use graphql_ir::Program; +use graphql_ir::ScalarField; +use graphql_ir::Visitor; +use intern::string_key::Intern; +use intern::string_key::StringKey; +use itertools::Itertools; +use lsp_types::request::Request; +use schema::FieldID; +use schema::SDLSchema; +use schema::Schema; +use schema::Type; +use serde::Deserialize; +use serde::Serialize; + +use crate::location::transform_relay_location_on_disk_to_lsp_location; +use crate::server::GlobalState; +use crate::LSPRuntimeError; +use crate::LSPRuntimeResult; + +// This implementation of FindFieldUsages find matching fields in: +// - exact type matches +// - subtypes (of the input type) +// - (not supertypes, since this could introduce many false positives) +// It currently does a shallow traversal of Operations and Fragments, +// not following fragment spreads to named fragments +// - this could result in false negatives, but saves on memory / time +pub struct FindFieldUsages {} + +#[derive(Deserialize, Serialize)] +pub struct FindFieldUsagesParams { + pub schema_name: String, + pub type_name: String, + pub field_name: String, +} + +#[derive(Deserialize, Serialize)] +struct FindFieldUsageResultItem { + location_uri: String, + location_range: lsp_types::Range, + label: String, +} + +#[derive(Deserialize, Serialize)] +pub struct FindFieldUsagesResult { + usages: Vec, +} + +impl Request for FindFieldUsages { + type Params = FindFieldUsagesParams; + type Result = FindFieldUsagesResult; + const METHOD: &'static str = "relay/findFieldUsages"; +} + +pub fn on_find_field_usages( + state: &impl GlobalState, + params: ::Params, +) -> LSPRuntimeResult<::Result> { + let schema_name = params.schema_name.intern(); + let type_name = params.type_name.intern(); + let field_name = params.field_name.intern(); + + let schema = state.get_schema(&schema_name)?; + let program = state.get_program(&schema_name)?; + let root_dir = &state.root_dir(); + + let ir_locations = get_usages(&program, &schema, type_name, field_name)?; + let lsp_locations = ir_locations + .into_iter() + .map(|(label, ir_location)| { + let lsp_location = + transform_relay_location_on_disk_to_lsp_location(root_dir, ir_location)?; + Ok(FindFieldUsageResultItem { + location_uri: lsp_location.uri.to_string(), + location_range: lsp_location.range, + label, + }) + }) + .collect::, LSPRuntimeError>>()?; + Ok(FindFieldUsagesResult { + usages: lsp_locations, + }) +} + +pub fn get_usages( + program: &Program, + schema: &Arc, + type_name: StringKey, + field_name: StringKey, +) -> LSPRuntimeResult> { + let type_ = schema.get_type(type_name).ok_or_else(|| { + LSPRuntimeError::UnexpectedError(format!("Type {} not found!", type_name)) + })?; + let mut usage_finder = FieldUsageFinder::new(schema, type_, field_name); + usage_finder.visit_program(program); + + let mut result = Vec::with_capacity(usage_finder.usages.len()); + for (label, locations) in usage_finder.usages.into_iter().sorted() { + if let [location] = locations.as_slice() { + // exactly 1 location, so no need to enumerate + result.push((label.to_string(), *location)); + } else { + for (idx, location) in locations.into_iter().enumerate() { + result.push((format!("{} - {}", label, idx), location)); + } + } + } + Ok(result) +} + +#[derive(Default)] +struct FieldUsageFinderScope { + // Types that a visited field acts on + // - reset when we see a linked field, a fragment, or an operation + // - pushed when we see type refinement (inline fragments) + types: Vec, + // name of the enclosing Fragment or Operation + label: Option, +} + +pub(crate) struct FieldUsageFinder<'schema> { + usages: HashMap>, + schema: &'schema Arc, + type_: Type, + field_name: StringKey, + current_scope: FieldUsageFinderScope, +} + +impl<'schema> FieldUsageFinder<'schema> { + pub(crate) fn new( + schema: &'schema Arc, + type_: Type, + field_name: StringKey, + ) -> FieldUsageFinder<'schema> { + FieldUsageFinder { + usages: Default::default(), + schema, + type_, + field_name, + current_scope: Default::default(), + } + } + + fn match_field(&mut self, field: &WithLocation) -> bool { + // check field name match + if self.schema.field(field.item).name.item == self.field_name { + // check for + // - exact type match + // - inferred types (spread on concrete should match all abstract) + for curr_typename in &self.current_scope.types { + if self.schema.is_named_type_subtype_of( + self.schema.get_type(*curr_typename).unwrap(), + self.type_, + ) { + return true; + } + } + } + false + } + + fn add_field(&mut self, field: &WithLocation) { + let current_label = self + .current_scope + .label + .expect("Expected label in find_field_usages"); + self.usages + .entry(current_label) + .or_default() + .push(field.location); + } + + pub(crate) fn get_locations(&self) -> Vec { + self.usages.values().flatten().copied().collect_vec() + } +} + +impl Visitor for FieldUsageFinder<'_> { + const NAME: &'static str = "FieldUsageFinder"; + const VISIT_ARGUMENTS: bool = false; + const VISIT_DIRECTIVES: bool = false; + + fn visit_operation(&mut self, operation: &OperationDefinition) { + // Check that scope is empty + populate the name + type (Query/Mutation/Subscription) + // before recursively visiting the operation's selections + assert!(self.current_scope.label.is_none()); + assert!(self.current_scope.types.is_empty()); + self.current_scope.label = Some(operation.name.item.0); + self.current_scope + .types + .push(self.schema.get_type_name(operation.type_)); + + self.default_visit_operation(operation); + + self.current_scope.types.pop(); + self.current_scope.label = None; + assert!(self.current_scope.types.is_empty()); + } + + fn visit_fragment(&mut self, fragment: &FragmentDefinition) { + assert!(self.current_scope.label.is_none()); + assert!(self.current_scope.types.is_empty()); + + self.current_scope.label = Some(fragment.name.item.0); + self.current_scope + .types + .push(self.schema.get_type_name(fragment.type_condition)); + + self.default_visit_fragment(fragment); + + self.current_scope.types.pop(); + self.current_scope.label = None; + assert!(self.current_scope.types.is_empty()); + } + + fn visit_inline_fragment(&mut self, fragment: &InlineFragment) { + // Inline fragments might not have a type condition + // fragment Foo on User { + // ... { + // name + // } + // } + let should_pop = if let Some(type_) = fragment.type_condition { + self.current_scope + .types + .push(self.schema.get_type_name(type_)); + true + } else { + false + }; + self.default_visit_inline_fragment(fragment); + + if should_pop { + self.current_scope.types.pop(); + } + } + + fn visit_linked_field(&mut self, field: &LinkedField) { + if self.match_field(&field.definition) { + self.add_field(&field.definition); + } + + // save all enclosing types + let prev_types = std::mem::take(&mut self.current_scope.types); + + // remember linked type + let linked_type = self.schema.field(field.definition.item).type_.inner(); + self.current_scope + .types + .push(self.schema.get_type_name(linked_type)); + + self.default_visit_linked_field(field); + + self.current_scope.types.pop(); + assert!(self.current_scope.types.is_empty()); + self.current_scope.types = prev_types; + } + + fn visit_scalar_field(&mut self, field: &ScalarField) { + if self.match_field(&field.definition) { + self.add_field(&field.definition); + } + } +} diff --git a/compiler/crates/relay-lsp/src/find_field_usages/mod.rs b/compiler/crates/relay-lsp/src/find_field_usages/mod.rs deleted file mode 100644 index b7267979dbe97..0000000000000 --- a/compiler/crates/relay-lsp/src/find_field_usages/mod.rs +++ /dev/null @@ -1,286 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -mod find_field_locations; - -use std::collections::HashMap; -use std::sync::Arc; - -use common::Location as IRLocation; -use common::WithLocation; -pub(crate) use find_field_locations::find_field_locations; -use graphql_ir::FragmentDefinition; -use graphql_ir::InlineFragment; -use graphql_ir::LinkedField; -use graphql_ir::OperationDefinition; -use graphql_ir::Program; -use graphql_ir::ScalarField; -use graphql_ir::Visitor; -use intern::string_key::Intern; -use intern::string_key::StringKey; -use itertools::Itertools; -use lsp_types::request::Request; -use schema::FieldID; -use schema::SDLSchema; -use schema::Schema; -use schema::Type; -use serde::Deserialize; -use serde::Serialize; - -use crate::location::transform_relay_location_to_lsp_location; -use crate::server::GlobalState; -use crate::LSPRuntimeError; -use crate::LSPRuntimeResult; - -// This implementation of FindFieldUsages find matching fields in: -// - exact type matches -// - subtypes (of the input type) -// - (not supertypes, since this could introduce many false positives) -// It currently does a shallow traversal of Operations and Fragments, -// not following fragment spreads to named fragments -// - this could result in false negatives, but saves on memory / time -pub struct FindFieldUsages {} - -#[derive(Deserialize, Serialize)] -pub struct FindFieldUsagesParams { - pub schema_name: String, - pub type_name: String, - pub field_name: String, -} - -#[derive(Deserialize, Serialize)] -struct FindFieldUsageResultItem { - location_uri: String, - location_range: lsp_types::Range, - label: String, -} - -#[derive(Deserialize, Serialize)] -pub struct FindFieldUsagesResult { - usages: Vec, -} - -impl Request for FindFieldUsages { - type Params = FindFieldUsagesParams; - type Result = FindFieldUsagesResult; - const METHOD: &'static str = "relay/findFieldUsages"; -} - -pub fn on_find_field_usages( - state: &impl GlobalState, - params: ::Params, -) -> LSPRuntimeResult<::Result> { - let schema_name = params.schema_name.intern(); - let type_name = params.type_name.intern(); - let field_name = params.field_name.intern(); - - let schema = state.get_schema(&schema_name)?; - let program = state.get_program(&schema_name)?; - let root_dir = &state.root_dir(); - - let ir_locations = get_usages(&program, &schema, type_name, field_name)?; - let lsp_locations = ir_locations - .into_iter() - .map(|(label, ir_location)| { - let lsp_location = transform_relay_location_to_lsp_location(root_dir, ir_location)?; - Ok(FindFieldUsageResultItem { - location_uri: lsp_location.uri.to_string(), - location_range: lsp_location.range, - label, - }) - }) - .collect::, LSPRuntimeError>>()?; - Ok(FindFieldUsagesResult { - usages: lsp_locations, - }) -} - -pub fn get_usages( - program: &Program, - schema: &Arc, - type_name: StringKey, - field_name: StringKey, -) -> LSPRuntimeResult> { - let type_ = schema.get_type(type_name).ok_or_else(|| { - LSPRuntimeError::UnexpectedError(format!("Type {} not found!", type_name)) - })?; - let mut usage_finder = FieldUsageFinder::new(schema, type_, field_name); - usage_finder.visit_program(program); - - let mut result = Vec::with_capacity(usage_finder.usages.len()); - for (label, locations) in usage_finder.usages.into_iter().sorted() { - if let [location] = locations.as_slice() { - // exactly 1 location, so no need to enumerate - result.push((label.to_string(), *location)); - } else { - for (idx, location) in locations.into_iter().enumerate() { - result.push((format!("{} - {}", label, idx), location)); - } - } - } - Ok(result) -} - -#[derive(Default)] -struct FieldUsageFinderScope { - // Types that a visited field acts on - // - reset when we see a linked field, a fragment, or an operation - // - pushed when we see type refinement (inline fragments) - types: Vec, - // name of the enclosing Fragment or Operation - label: Option, -} - -pub(crate) struct FieldUsageFinder<'schema> { - usages: HashMap>, - schema: &'schema Arc, - type_: Type, - field_name: StringKey, - current_scope: FieldUsageFinderScope, -} - -impl<'schema> FieldUsageFinder<'schema> { - pub(crate) fn new( - schema: &'schema Arc, - type_: Type, - field_name: StringKey, - ) -> FieldUsageFinder<'schema> { - FieldUsageFinder { - usages: Default::default(), - schema, - type_, - field_name, - current_scope: Default::default(), - } - } - - fn match_field(&mut self, field: &WithLocation) -> bool { - // check field name match - if self.schema.field(field.item).name.item == self.field_name { - // check for - // - exact type match - // - inferred types (spread on concrete should match all abstract) - for curr_typename in &self.current_scope.types { - if self.schema.is_named_type_subtype_of( - self.schema.get_type(*curr_typename).unwrap(), - self.type_, - ) { - return true; - } - } - } - false - } - - fn add_field(&mut self, field: &WithLocation) { - let current_label = self - .current_scope - .label - .expect("Expected label in find_field_usages"); - self.usages - .entry(current_label) - .or_default() - .push(field.location); - } - - pub(crate) fn get_locations(&self) -> Vec { - self.usages - .values() - .into_iter() - .flatten() - .copied() - .collect_vec() - } -} - -impl Visitor for FieldUsageFinder<'_> { - const NAME: &'static str = "FieldUsageFinder"; - const VISIT_ARGUMENTS: bool = false; - const VISIT_DIRECTIVES: bool = false; - - fn visit_operation(&mut self, operation: &OperationDefinition) { - // Check that scope is empty + populate the name + type (Query/Mutation/Subscription) - // before recursively visiting the operation's selections - assert!(self.current_scope.label.is_none()); - assert!(self.current_scope.types.is_empty()); - self.current_scope.label = Some(operation.name.item.0); - self.current_scope - .types - .push(self.schema.get_type_name(operation.type_)); - - self.default_visit_operation(operation); - - self.current_scope.types.pop(); - self.current_scope.label = None; - assert!(self.current_scope.types.is_empty()); - } - - fn visit_fragment(&mut self, fragment: &FragmentDefinition) { - assert!(self.current_scope.label.is_none()); - assert!(self.current_scope.types.is_empty()); - - self.current_scope.label = Some(fragment.name.item.0); - self.current_scope - .types - .push(self.schema.get_type_name(fragment.type_condition)); - - self.default_visit_fragment(fragment); - - self.current_scope.types.pop(); - self.current_scope.label = None; - assert!(self.current_scope.types.is_empty()); - } - - fn visit_inline_fragment(&mut self, fragment: &InlineFragment) { - // Inline fragments might not have a type condition - // fragment Foo on User { - // ... { - // name - // } - // } - let should_pop = if let Some(type_) = fragment.type_condition { - self.current_scope - .types - .push(self.schema.get_type_name(type_)); - true - } else { - false - }; - self.default_visit_inline_fragment(fragment); - - if should_pop { - self.current_scope.types.pop(); - } - } - - fn visit_linked_field(&mut self, field: &LinkedField) { - if self.match_field(&field.definition) { - self.add_field(&field.definition); - } - - // save all enclosing types - let prev_types = std::mem::take(&mut self.current_scope.types); - - // remember linked type - let linked_type = self.schema.field(field.definition.item).type_.inner(); - self.current_scope - .types - .push(self.schema.get_type_name(linked_type)); - - self.default_visit_linked_field(field); - - self.current_scope.types.pop(); - assert!(self.current_scope.types.is_empty()); - self.current_scope.types = prev_types; - } - - fn visit_scalar_field(&mut self, field: &ScalarField) { - if self.match_field(&field.definition) { - self.add_field(&field.definition); - } - } -} diff --git a/compiler/crates/relay-lsp/src/goto_definition.rs b/compiler/crates/relay-lsp/src/goto_definition.rs new file mode 100644 index 0000000000000..91df1882926a8 --- /dev/null +++ b/compiler/crates/relay-lsp/src/goto_definition.rs @@ -0,0 +1,442 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//! Utilities for providing the goto definition feature + +mod goto_docblock_definition; +mod goto_graphql_definition; +use std::str; +use std::sync::Arc; + +use common::ArgumentName; +use common::DirectiveName; +use graphql_ir::FragmentDefinitionName; +use intern::string_key::Intern; +use intern::string_key::StringKey; +use log::error; +use log::info; +use lsp_types::request::GotoDefinition; +use lsp_types::request::Request; +use lsp_types::GotoDefinitionResponse; +use lsp_types::Url; +use schema::SDLSchema; +use schema::Schema; +use schema::Type; +use serde::Deserialize; +use serde::Serialize; + +use self::goto_docblock_definition::get_docblock_definition_description; +use self::goto_graphql_definition::get_graphql_definition_description; +use self::goto_graphql_definition::get_graphql_schema_definition_description; +use crate::location::transform_relay_location_on_disk_to_lsp_location; +use crate::lsp_runtime_error::LSPRuntimeError; +use crate::lsp_runtime_error::LSPRuntimeResult; +use crate::server::GlobalState; +use crate::FieldDefinitionSourceInfo; +use crate::FieldSchemaInfo; +use crate::LSPExtraDataProvider; + +/// A concrete description of a GraphQL definition that a user would like to goto. +pub enum DefinitionDescription { + Field { + parent_type: Type, + field_name: StringKey, + }, + FieldArgument { + parent_type: Type, + field_name: StringKey, + argument_name: ArgumentName, + }, + DirectiveArgument { + directive_name: DirectiveName, + argument_name: ArgumentName, + }, + Fragment { + fragment_name: FragmentDefinitionName, + }, + Type { + type_name: StringKey, + }, + Directive { + directive_name: DirectiveName, + }, +} + +/// Resolve a GotoDefinitionRequest to a GotoDefinitionResponse +pub fn on_goto_definition( + state: &impl GlobalState, + params: ::Params, +) -> LSPRuntimeResult<::Result> { + let (feature, position_span) = + state.extract_feature_from_text(¶ms.text_document_position_params, 1)?; + + let project_name = state + .extract_project_name_from_url(¶ms.text_document_position_params.text_document.uri)?; + let schema = state.get_schema(&project_name)?; + let program = state.get_program(&project_name)?; + + let definition_description = match feature { + crate::Feature::ExecutableDocument(document) => { + get_graphql_definition_description(document, position_span, &schema)? + } + crate::Feature::DocblockIr(docblock_ir) => { + get_docblock_definition_description(&docblock_ir, position_span)? + } + crate::Feature::SchemaDocument(document) => { + get_graphql_schema_definition_description(document, position_span)? + } + }; + + let extra_data_provider = state.get_extra_data_provider(); + let root_dir = state.root_dir(); + + let goto_definition_response: GotoDefinitionResponse = match definition_description { + DefinitionDescription::FieldArgument { + parent_type, + field_name, + argument_name, + } => locate_field_argument_definition( + &schema, + parent_type, + field_name, + argument_name, + &root_dir, + )?, + DefinitionDescription::DirectiveArgument { + directive_name, + argument_name, + } => { + locate_directive_argument_definition(&schema, directive_name, argument_name, &root_dir)? + } + DefinitionDescription::Field { + parent_type, + field_name, + } => locate_field_definition( + &schema, + parent_type, + field_name, + extra_data_provider, + project_name, + &root_dir, + )?, + DefinitionDescription::Fragment { fragment_name } => { + locate_fragment_definition(program, fragment_name, &root_dir)? + } + DefinitionDescription::Type { type_name } => locate_type_definition( + extra_data_provider, + project_name, + type_name, + &schema, + &root_dir, + )?, + DefinitionDescription::Directive { directive_name } => { + locate_directive_definition(directive_name, &schema, &root_dir)? + } + }; + + // For some lsp-clients, such as clients relying on org.eclipse.lsp4j, + // (see https://javadoc.io/static/org.eclipse.lsp4j/org.eclipse.lsp4j/0.8.1/org/eclipse/lsp4j/services/TextDocumentService.html) + // the definition response should be vector of location or locationlink. + // Therefore, let's convert the GotoDefinitionResponse::Scalar into Vector + if let GotoDefinitionResponse::Scalar(l) = goto_definition_response { + return Ok(Some(GotoDefinitionResponse::Array(vec![l]))); + } + + Ok(Some(goto_definition_response)) +} + +fn locate_fragment_definition( + program: graphql_ir::Program, + fragment_name: FragmentDefinitionName, + root_dir: &std::path::Path, +) -> Result { + let fragment = program.fragment(fragment_name).ok_or_else(|| { + LSPRuntimeError::UnexpectedError(format!( + "Could not find fragment with name {}", + fragment_name + )) + })?; + Ok(GotoDefinitionResponse::Scalar( + transform_relay_location_on_disk_to_lsp_location(root_dir, fragment.name.location)?, + )) +} + +fn locate_directive_definition( + directive_name: DirectiveName, + schema: &Arc, + root_dir: &std::path::Path, +) -> Result { + let directive = schema.get_directive(directive_name); + + directive + .map(|directive| directive.name.location) + .map(|schema_location| { + transform_relay_location_on_disk_to_lsp_location(root_dir, schema_location) + .map(GotoDefinitionResponse::Scalar) + }) + .ok_or(LSPRuntimeError::ExpectedError)? +} + +fn locate_type_definition( + extra_data_provider: &dyn LSPExtraDataProvider, + project_name: StringKey, + type_name: StringKey, + schema: &Arc, + root_dir: &std::path::Path, +) -> Result { + let provider_response = extra_data_provider.resolve_field_definition( + project_name.to_string(), + type_name.to_string(), + None, + ); + + let field_definition_source_info = get_field_definition_source_info_result(provider_response); + + match field_definition_source_info { + Ok(source_info) => Ok(if source_info.is_local { + GotoDefinitionResponse::Scalar(get_location( + &source_info.file_path, + source_info.line_number, + )?) + } else { + return Err(LSPRuntimeError::ExpectedError); + }), + // If we couldn't resolve through the extra data provider, we'll fallback to + // try to find a location in the server sdl. + Err(err) => { + error!( + "Failed to resolve type definition through extra data provider. Falling back to schema file. Got error: {:?}", + err + ); + let type_ = schema.get_type(type_name); + + type_ + .map(|type_| match type_ { + Type::InputObject(input_object_id) => { + schema.input_object(input_object_id).name.location + } + Type::Enum(enum_id) => schema.enum_(enum_id).name.location, + Type::Interface(interface_id) => schema.interface(interface_id).name.location, + Type::Scalar(scalar_id) => schema.scalar(scalar_id).name.location, + Type::Union(union_id) => schema.union(union_id).name.location, + Type::Object(object_id) => schema.object(object_id).name.location, + }) + .map(|schema_location| { + transform_relay_location_on_disk_to_lsp_location(root_dir, schema_location) + .map(GotoDefinitionResponse::Scalar) + }) + .ok_or(LSPRuntimeError::ExpectedError)? + } + } +} + +fn locate_field_argument_definition( + schema: &Arc, + parent_type: Type, + field_name: StringKey, + argument_name: ArgumentName, + root_dir: &std::path::Path, +) -> Result { + let field = schema.field(schema.named_field(parent_type, field_name).ok_or_else(|| { + LSPRuntimeError::UnexpectedError(format!("Could not find field with name {}", field_name)) + })?); + + let argument = field + .arguments + .iter() + .find(|argument| argument.name.item == argument_name) + .ok_or_else(|| { + LSPRuntimeError::UnexpectedError(format!( + "Could not find argument with name {} on field with name {}", + argument_name, field_name, + )) + })?; + + transform_relay_location_on_disk_to_lsp_location(root_dir, argument.name.location) + .map(|location| Ok(GotoDefinitionResponse::Scalar(location)))? +} + +fn locate_directive_argument_definition( + schema: &SDLSchema, + directive_name: DirectiveName, + argument_name: ArgumentName, + root_dir: &std::path::Path, +) -> LSPRuntimeResult { + let directive = + schema + .get_directive(directive_name) + .ok_or(LSPRuntimeError::UnexpectedError(format!( + "Could not find directive with name {}", + directive_name + )))?; + + let argument = directive + .arguments + .iter() + .find(|argument| argument.name.item == argument_name) + .ok_or_else(|| { + LSPRuntimeError::UnexpectedError(format!( + "Could not find argument with name {} on directive with name {}", + argument_name, directive_name, + )) + })?; + + transform_relay_location_on_disk_to_lsp_location(root_dir, argument.name.location) + .map(|location| Ok(GotoDefinitionResponse::Scalar(location)))? +} + +fn locate_field_definition( + schema: &Arc, + parent_type: Type, + field_name: StringKey, + extra_data_provider: &dyn LSPExtraDataProvider, + project_name: StringKey, + root_dir: &std::path::Path, +) -> Result { + let field = schema.field(schema.named_field(parent_type, field_name).ok_or_else(|| { + LSPRuntimeError::UnexpectedError(format!("Could not find field with name {}", field_name,)) + })?); + let parent_type = schema.get_type_name(parent_type); + let provider_response = extra_data_provider.resolve_field_definition( + project_name.to_string(), + parent_type.to_string(), + Some(FieldSchemaInfo { + name: field_name.to_string(), + is_extension: field.is_extension, + }), + ); + + match provider_response { + Ok(Some(source_info)) => { + // Step 1: does extra_data_provider know anything about this field? + if source_info.is_local { + return Ok(GotoDefinitionResponse::Scalar(get_location( + &source_info.file_path, + source_info.line_number, + )?)); + } else { + error!( + "Expected local source info from extra data provider, but got non-local. Falling back to schema file.", + ); + } + } + Ok(None) => { + info!( + "Extra data provider did not have any information about this field. Falling back to schema file." + ); + } + // Step 2: is field a standalone graphql file? + Err(err) => { + error!( + "Failed to resolve field definition through extra data provider. Falling back to schema file. Got error: {:?}", + err + ); + } + } + + transform_relay_location_on_disk_to_lsp_location(root_dir, field.name.location) + .map(GotoDefinitionResponse::Scalar) + // If the field does not exist in the schema, that's fine + .map_err(|_| LSPRuntimeError::ExpectedError) +} + +fn get_location(path: &str, line: u64) -> Result { + let start = lsp_types::Position { + line: line as u32, + character: 0, + }; + let range = lsp_types::Range { start, end: start }; + + let uri = Url::parse(&format!("file://{}", path)).map_err(|e| { + LSPRuntimeError::UnexpectedError(format!("Could not parse path as URL: {}", e)) + })?; + + Ok(lsp_types::Location { uri, range }) +} + +pub(crate) enum GetSourceLocationOfTypeDefinition {} + +#[derive(Deserialize, Serialize)] +pub(crate) struct GetSourceLocationOfTypeDefinitionParams { + type_name: String, + field_name: Option, + schema_name: String, +} + +#[derive(Deserialize, Serialize)] +pub(crate) struct GetSourceLocationOfTypeDefinitionResult { + field_definition_source_info: FieldDefinitionSourceInfo, +} + +impl Request for GetSourceLocationOfTypeDefinition { + type Params = GetSourceLocationOfTypeDefinitionParams; + type Result = GetSourceLocationOfTypeDefinitionResult; + const METHOD: &'static str = "relay/getSourceLocationOfTypeDefinition"; +} + +// Specific to schema explorer. +pub(crate) fn on_get_source_location_of_type_definition( + state: &impl GlobalState, + params: ::Params, +) -> LSPRuntimeResult<::Result> { + let schema = state.get_schema(&(¶ms.schema_name as &str).intern())?; + + let type_ = schema + .get_type((¶ms.type_name as &str).intern()) + .ok_or_else(|| { + LSPRuntimeError::UnexpectedError(format!( + "Could not find type with name {}", + ¶ms.type_name + )) + })?; + + let field_info = params + .field_name + .map(|field_name| { + schema + .named_field(type_, (&field_name as &str).intern()) + .ok_or_else(|| { + LSPRuntimeError::UnexpectedError(format!( + "Could not find field with name {}", + field_name + )) + }) + }) + .transpose()? + .map(|field_id| { + let field = schema.field(field_id); + FieldSchemaInfo { + name: field.name.item.to_string(), + is_extension: field.is_extension, + } + }); + + // TODO add go-to-definition for client fields + let field_definition_source_info = get_field_definition_source_info_result( + state.get_extra_data_provider().resolve_field_definition( + params.schema_name, + params.type_name, + field_info, + ), + )?; + + Ok(GetSourceLocationOfTypeDefinitionResult { + field_definition_source_info, + }) +} + +fn get_field_definition_source_info_result( + result: Result, String>, +) -> LSPRuntimeResult { + result + .map_err(LSPRuntimeError::UnexpectedError)? + .ok_or_else(|| { + LSPRuntimeError::UnexpectedError( + "Expected result when resolving field definition location".to_string(), + ) + }) +} diff --git a/compiler/crates/relay-lsp/src/goto_definition/goto_docblock_definition.rs b/compiler/crates/relay-lsp/src/goto_definition/goto_docblock_definition.rs index b00f78de01fec..06732cbb36d94 100644 --- a/compiler/crates/relay-lsp/src/goto_definition/goto_docblock_definition.rs +++ b/compiler/crates/relay-lsp/src/goto_definition/goto_docblock_definition.rs @@ -26,7 +26,14 @@ pub fn get_docblock_definition_description( Ok(DefinitionDescription::Fragment { fragment_name }) } DocblockResolutionInfo::FieldName(_) => { - // The field name _id_ the definition of the field. + // The field name _is_ the definition of the field. + Err(LSPRuntimeError::ExpectedError) + } + DocblockResolutionInfo::FieldArgumentName { + field_name: _, + argument_name: _, + } => { + // The argument name _is_ the definition of the argument. Err(LSPRuntimeError::ExpectedError) } DocblockResolutionInfo::Deprecated => { diff --git a/compiler/crates/relay-lsp/src/goto_definition/goto_graphql_definition.rs b/compiler/crates/relay-lsp/src/goto_definition/goto_graphql_definition.rs index ffa142bd1f50b..d8d3f70d98d7e 100644 --- a/compiler/crates/relay-lsp/src/goto_definition/goto_graphql_definition.rs +++ b/compiler/crates/relay-lsp/src/goto_definition/goto_graphql_definition.rs @@ -7,10 +7,19 @@ use std::sync::Arc; +use common::ArgumentName; +use common::DirectiveName; use common::Span; use graphql_ir::FragmentDefinitionName; use graphql_syntax::ExecutableDocument; +use graphql_syntax::SchemaDocument; use intern::string_key::StringKey; +use resolution_path::ArgumentParent; +use resolution_path::ArgumentPath; +use resolution_path::ConstantArgumentParent; +use resolution_path::ConstantArgumentPath; +use resolution_path::ConstantDirectivePath; +use resolution_path::DirectivePath; use resolution_path::IdentParent; use resolution_path::IdentPath; use resolution_path::LinkedFieldPath; @@ -25,12 +34,61 @@ use super::DefinitionDescription; use crate::lsp_runtime_error::LSPRuntimeError; use crate::lsp_runtime_error::LSPRuntimeResult; +pub fn get_graphql_schema_definition_description( + document: SchemaDocument, + position_span: Span, +) -> LSPRuntimeResult { + let node_path = document.resolve((), position_span); + + match node_path { + ResolutionPath::Ident(IdentPath { + inner: type_name, + parent: + IdentParent::NamedTypeAnnotation(_) + | IdentParent::UnionTypeMemberType(_) + | IdentParent::ImplementedInterfaceName(_) + | IdentParent::OperationTypeDefinitionType(_) + | IdentParent::InputObjectTypeExtensionName(_) + | IdentParent::ObjectTypeExtensionName(_) + | IdentParent::InterfaceTypeExtensionName(_) + | IdentParent::UnionTypeExtensionName(_) + | IdentParent::EnumTypeExtensionName(_) + | IdentParent::ScalarTypeExtensionName(_), + }) => Ok(DefinitionDescription::Type { + type_name: type_name.value, + }), + ResolutionPath::Ident(IdentPath { + inner: directive_name, + parent: IdentParent::ConstantDirectiveName(_), + }) => Ok(DefinitionDescription::Directive { + directive_name: DirectiveName(directive_name.value), + }), + ResolutionPath::Ident(IdentPath { + inner: argument_name, + parent: + IdentParent::ConstantArgumentKey(ConstantArgumentPath { + inner: _, + parent: + ConstantArgumentParent::ConstantDirective(ConstantDirectivePath { + inner: directive, + .. + }), + }), + }) => Ok(DefinitionDescription::DirectiveArgument { + directive_name: DirectiveName(directive.name.value), + argument_name: ArgumentName(argument_name.value), + }), + _ => Err(LSPRuntimeError::ExpectedError), + } +} + pub fn get_graphql_definition_description( document: ExecutableDocument, position_span: Span, schema: &Arc, ) -> LSPRuntimeResult { let node_path = document.resolve((), position_span); + match node_path { ResolutionPath::Ident(IdentPath { inner: fragment_name, @@ -38,6 +96,54 @@ pub fn get_graphql_definition_description( }) => Ok(DefinitionDescription::Fragment { fragment_name: FragmentDefinitionName(fragment_name.value), }), + ResolutionPath::Ident(IdentPath { + inner: argument_name, + parent: + IdentParent::ArgumentName(ArgumentPath { + inner: _, + parent: + ArgumentParent::Directive(DirectivePath { + inner: directive, .. + }), + }), + }) => Ok(DefinitionDescription::DirectiveArgument { + directive_name: DirectiveName(directive.name.value), + argument_name: ArgumentName(argument_name.value), + }), + ResolutionPath::Ident(IdentPath { + inner: argument_name, + parent: + IdentParent::ArgumentName(ArgumentPath { + inner: _, + parent: + ArgumentParent::ScalarField(ScalarFieldPath { + inner: field, + parent: selection_path, + }), + }), + }) => resolve_field_argument( + field.name.value, + ArgumentName(argument_name.value), + selection_path.parent, + schema, + ), + ResolutionPath::Ident(IdentPath { + inner: argument_name, + parent: + IdentParent::ArgumentName(ArgumentPath { + inner: _, + parent: + ArgumentParent::LinkedField(LinkedFieldPath { + inner: field, + parent: selection_path, + }), + }), + }) => resolve_field_argument( + field.name.value, + ArgumentName(argument_name.value), + selection_path.parent, + schema, + ), ResolutionPath::Ident(IdentPath { inner: field_name, parent: @@ -64,6 +170,18 @@ pub fn get_graphql_definition_description( }) => Ok(DefinitionDescription::Type { type_name: type_condition.type_.value, }), + ResolutionPath::Ident(IdentPath { + inner: directive_name, + parent: IdentParent::DirectiveName(_), + }) => Ok(DefinitionDescription::Directive { + directive_name: DirectiveName(directive_name.value), + }), + ResolutionPath::Ident(IdentPath { + inner: type_name, + parent: IdentParent::NamedTypeAnnotation(_), + }) => Ok(DefinitionDescription::Type { + type_name: type_name.value, + }), _ => Err(LSPRuntimeError::ExpectedError), } } @@ -82,3 +200,20 @@ fn resolve_field( field_name, }) } + +fn resolve_field_argument( + field_name: StringKey, + argument_name: ArgumentName, + selection_parent: SelectionParent<'_>, + schema: &Arc, +) -> LSPRuntimeResult { + let parent_type = selection_parent + .find_parent_type(schema) + .ok_or(LSPRuntimeError::ExpectedError)?; + + Ok(DefinitionDescription::FieldArgument { + parent_type, + field_name, + argument_name, + }) +} diff --git a/compiler/crates/relay-lsp/src/goto_definition/mod.rs b/compiler/crates/relay-lsp/src/goto_definition/mod.rs deleted file mode 100644 index 12b08395d6ff9..0000000000000 --- a/compiler/crates/relay-lsp/src/goto_definition/mod.rs +++ /dev/null @@ -1,313 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -//! Utilities for providing the goto definition feature - -mod goto_docblock_definition; -mod goto_graphql_definition; -use std::str; -use std::sync::Arc; - -use graphql_ir::FragmentDefinitionName; -use intern::string_key::Intern; -use intern::string_key::StringKey; -use lsp_types::request::GotoDefinition; -use lsp_types::request::Request; -use lsp_types::GotoDefinitionResponse; -use lsp_types::Url; -use schema::SDLSchema; -use schema::Schema; -use schema::Type; -use serde::Deserialize; -use serde::Serialize; - -use self::goto_docblock_definition::get_docblock_definition_description; -use self::goto_graphql_definition::get_graphql_definition_description; -use crate::location::transform_relay_location_to_lsp_location; -use crate::lsp_runtime_error::LSPRuntimeError; -use crate::lsp_runtime_error::LSPRuntimeResult; -use crate::server::GlobalState; -use crate::FieldDefinitionSourceInfo; -use crate::FieldSchemaInfo; -use crate::LSPExtraDataProvider; - -/// A concrete description of a GraphQL definition that a user would like to goto. -pub enum DefinitionDescription { - Field { - parent_type: Type, - field_name: StringKey, - }, - Fragment { - fragment_name: FragmentDefinitionName, - }, - Type { - type_name: StringKey, - }, -} - -/// Resolve a GotoDefinitionRequest to a GotoDefinitionResponse -pub fn on_goto_definition( - state: &impl GlobalState, - params: ::Params, -) -> LSPRuntimeResult<::Result> { - let (feature, position_span) = - state.extract_feature_from_text(¶ms.text_document_position_params, 1)?; - - let project_name = state - .extract_project_name_from_url(¶ms.text_document_position_params.text_document.uri)?; - let schema = state.get_schema(&project_name)?; - let program = state.get_program(&project_name)?; - - let definition_description = match feature { - crate::Feature::GraphQLDocument(document) => { - get_graphql_definition_description(document, position_span, &schema)? - } - crate::Feature::DocblockIr(docblock_ir) => { - get_docblock_definition_description(&docblock_ir, position_span)? - } - }; - - let extra_data_provider = &*state.get_extra_data_provider(); - let root_dir = state.root_dir(); - - let goto_definition_response: GotoDefinitionResponse = match definition_description { - DefinitionDescription::Field { - parent_type, - field_name, - } => locate_field_definition( - &schema, - parent_type, - field_name, - extra_data_provider, - project_name, - &root_dir, - )?, - DefinitionDescription::Fragment { fragment_name } => { - locate_fragment_definition(program, fragment_name, &root_dir)? - } - DefinitionDescription::Type { type_name } => locate_type_definition( - extra_data_provider, - project_name, - type_name, - &schema, - &root_dir, - )?, - }; - - // For some lsp-clients, such as clients relying on org.eclipse.lsp4j, - // (see https://javadoc.io/static/org.eclipse.lsp4j/org.eclipse.lsp4j/0.8.1/org/eclipse/lsp4j/services/TextDocumentService.html) - // the definition response should be vector of location or locationlink. - // Therefore, let's convert the GotoDefinitionResponse::Scalar into Vector - if let GotoDefinitionResponse::Scalar(l) = goto_definition_response { - return Ok(Some(GotoDefinitionResponse::Array(vec![l]))); - } - - Ok(Some(goto_definition_response)) -} - -fn locate_fragment_definition( - program: graphql_ir::Program, - fragment_name: FragmentDefinitionName, - root_dir: &std::path::Path, -) -> Result { - let fragment = program.fragment(fragment_name).ok_or_else(|| { - LSPRuntimeError::UnexpectedError(format!( - "Could not find fragment with name {}", - fragment_name - )) - })?; - Ok(GotoDefinitionResponse::Scalar( - transform_relay_location_to_lsp_location(root_dir, fragment.name.location)?, - )) -} - -fn locate_type_definition( - extra_data_provider: &dyn LSPExtraDataProvider, - project_name: StringKey, - type_name: StringKey, - schema: &Arc, - root_dir: &std::path::Path, -) -> Result { - let provider_response = extra_data_provider.resolve_field_definition( - project_name.to_string(), - type_name.to_string(), - None, - ); - - let field_definition_source_info = get_field_definition_source_info_result(provider_response); - - match field_definition_source_info { - Ok(source_info) => Ok(if source_info.is_local { - GotoDefinitionResponse::Scalar(get_location( - &source_info.file_path, - source_info.line_number, - )?) - } else { - return Err(LSPRuntimeError::ExpectedError); - }), - // If we couldn't resolve through the extra data provider, we'll fallback to - // try to find a location in the server sdl. - Err(_) => { - let type_ = schema.get_type(type_name); - - type_ - .map(|type_| match type_ { - Type::InputObject(input_object_id) => { - schema.input_object(input_object_id).name.location - } - Type::Enum(enum_id) => schema.enum_(enum_id).name.location, - Type::Interface(interface_id) => schema.interface(interface_id).name.location, - Type::Scalar(scalar_id) => schema.scalar(scalar_id).name.location, - Type::Union(union_id) => schema.union(union_id).name.location, - Type::Object(object_id) => schema.object(object_id).name.location, - }) - .map(|schema_location| { - transform_relay_location_to_lsp_location(root_dir, schema_location) - .map(GotoDefinitionResponse::Scalar) - }) - .ok_or(LSPRuntimeError::ExpectedError)? - } - } -} - -fn locate_field_definition( - schema: &Arc, - parent_type: Type, - field_name: StringKey, - extra_data_provider: &dyn LSPExtraDataProvider, - project_name: StringKey, - root_dir: &std::path::Path, -) -> Result { - let field = schema.field(schema.named_field(parent_type, field_name).ok_or_else(|| { - LSPRuntimeError::UnexpectedError(format!("Could not find field with name {}", field_name,)) - })?); - let parent_type = schema.get_type_name(parent_type); - let provider_response = extra_data_provider.resolve_field_definition( - project_name.to_string(), - parent_type.to_string(), - Some(FieldSchemaInfo { - name: field_name.to_string(), - is_extension: field.is_extension, - }), - ); - Ok(if let Ok(Some(source_info)) = provider_response { - // Step 1: does extra_data_provider know anything about this field? - if source_info.is_local { - GotoDefinitionResponse::Scalar(get_location( - &source_info.file_path, - source_info.line_number, - )?) - } else { - return Err(LSPRuntimeError::ExpectedError); - } - } else if let Ok(location) = - transform_relay_location_to_lsp_location(root_dir, field.name.location) - { - // Step 2: is field a standalone graphql file? - GotoDefinitionResponse::Scalar(location) - } else { - // Give up - return Err(LSPRuntimeError::ExpectedError); - }) -} - -fn get_location(path: &str, line: u64) -> Result { - let start = lsp_types::Position { - line: line as u32, - character: 0, - }; - let range = lsp_types::Range { start, end: start }; - - let uri = Url::parse(&format!("file://{}", path)).map_err(|e| { - LSPRuntimeError::UnexpectedError(format!("Could not parse path as URL: {}", e)) - })?; - - Ok(lsp_types::Location { uri, range }) -} - -pub(crate) enum GetSourceLocationOfTypeDefinition {} - -#[derive(Deserialize, Serialize)] -pub(crate) struct GetSourceLocationOfTypeDefinitionParams { - type_name: String, - field_name: Option, - schema_name: String, -} - -#[derive(Deserialize, Serialize)] -pub(crate) struct GetSourceLocationOfTypeDefinitionResult { - field_definition_source_info: FieldDefinitionSourceInfo, -} - -impl Request for GetSourceLocationOfTypeDefinition { - type Params = GetSourceLocationOfTypeDefinitionParams; - type Result = GetSourceLocationOfTypeDefinitionResult; - const METHOD: &'static str = "relay/getSourceLocationOfTypeDefinition"; -} - -// Specific to schema explorer. -pub(crate) fn on_get_source_location_of_type_definition( - state: &impl GlobalState, - params: ::Params, -) -> LSPRuntimeResult<::Result> { - let schema = state.get_schema(&(¶ms.schema_name as &str).intern())?; - - let type_ = schema - .get_type((¶ms.type_name as &str).intern()) - .ok_or_else(|| { - LSPRuntimeError::UnexpectedError(format!( - "Could not find type with name {}", - ¶ms.type_name - )) - })?; - - let field_info = params - .field_name - .map(|field_name| { - schema - .named_field(type_, (&field_name as &str).intern()) - .ok_or_else(|| { - LSPRuntimeError::UnexpectedError(format!( - "Could not find field with name {}", - field_name - )) - }) - }) - .transpose()? - .map(|field_id| { - let field = schema.field(field_id); - FieldSchemaInfo { - name: field.name.item.to_string(), - is_extension: field.is_extension, - } - }); - - // TODO add go-to-definition for client fields - let field_definition_source_info = get_field_definition_source_info_result( - state.get_extra_data_provider().resolve_field_definition( - params.schema_name, - params.type_name, - field_info, - ), - )?; - - Ok(GetSourceLocationOfTypeDefinitionResult { - field_definition_source_info, - }) -} - -fn get_field_definition_source_info_result( - result: Result, String>, -) -> LSPRuntimeResult { - result - .map_err(LSPRuntimeError::UnexpectedError)? - .ok_or_else(|| { - LSPRuntimeError::UnexpectedError( - "Expected result when resolving field definition location".to_string(), - ) - }) -} diff --git a/compiler/crates/relay-lsp/src/graphql_tools.rs b/compiler/crates/relay-lsp/src/graphql_tools.rs new file mode 100644 index 0000000000000..e74fcb8a13c57 --- /dev/null +++ b/compiler/crates/relay-lsp/src/graphql_tools.rs @@ -0,0 +1,287 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::collections::HashSet; +use std::sync::Arc; + +use common::PerfLogger; +use common::SourceLocationKey; +use graphql_ir::build_ir_with_extra_features; +use graphql_ir::BuilderOptions; +use graphql_ir::ExecutableDefinition; +use graphql_ir::FragmentDefinition; +use graphql_ir::FragmentDefinitionName; +use graphql_ir::FragmentVariablesSemantic; +use graphql_ir::OperationDefinition; +use graphql_ir::OperationDefinitionName; +use graphql_ir::Program; +use graphql_ir::Selection; +use graphql_syntax::parse_executable_with_error_recovery_and_parser_features; +use graphql_text_printer::print_full_operation; +use intern::string_key::Intern; +use intern::string_key::StringKey; +use lsp_types::request::Request; +use lsp_types::Url; +use relay_compiler::config::ProjectConfig; +use relay_compiler::get_parser_features; +use relay_compiler::ProjectName; +use relay_transforms::apply_transforms; +use relay_transforms::CustomTransformsConfig; +use relay_transforms::Programs; +use schema::SDLSchema; +use schema_documentation::SchemaDocumentation; +use serde::Deserialize; +use serde::Serialize; + +use crate::lsp_runtime_error::LSPRuntimeResult; +use crate::server::GlobalState; +use crate::server::LSPState; +use crate::LSPRuntimeError; + +pub(crate) enum GraphQLExecuteQuery {} + +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct GraphQLExecuteQueryParams { + text: String, + document_path: Option, + schema_name: Option, +} + +impl GraphQLExecuteQueryParams { + fn get_url(&self) -> Option { + if let Some(path) = &self.document_path { + Url::parse(&format!("file://{}", path)).ok() + } else { + None + } + } + + fn get_schema_name(&self) -> StringKey { + if let Some(schema_name) = &self.schema_name { + schema_name.clone().intern() + } else { + "facebook".intern() + } + } +} + +impl Request for GraphQLExecuteQuery { + type Params = GraphQLExecuteQueryParams; + type Result = String; + const METHOD: &'static str = "graphql/executeQuery"; +} + +/// This function will return the program that contains only operation +/// and all referenced fragments. +/// We can use it to print the full query text +fn get_operation_only_program( + operation: Arc, + fragments: Vec>, + program: &Program, +) -> Option { + let mut selections_to_visit: Vec<_> = vec![&operation.selections]; + let mut next_program = Program::new(program.schema.clone()); + next_program.insert_operation(Arc::clone(&operation)); + for fragment in fragments.iter() { + selections_to_visit.push(&fragment.selections); + next_program.insert_fragment(Arc::clone(fragment)); + } + + let mut visited_fragments: HashSet = HashSet::default(); + + while !selections_to_visit.is_empty() { + let current_selections = selections_to_visit.pop()?; + for selection in current_selections { + match selection { + Selection::FragmentSpread(spread) => { + // Skip, if we already visited this fragment + if visited_fragments.contains(&spread.fragment.item) { + continue; + } + visited_fragments.insert(spread.fragment.item); + // Second, if this fragment is already in the `next_program`, + // it selection already added to the visiting stack + if next_program.fragment(spread.fragment.item).is_some() { + continue; + } + + // Finally, add all missing fragment spreads from the full program + let fragment = program.fragment(spread.fragment.item)?; + + selections_to_visit.push(&fragment.selections); + next_program.insert_fragment(Arc::clone(fragment)); + } + Selection::Condition(condition) => { + selections_to_visit.push(&condition.selections); + } + Selection::LinkedField(linked_field) => { + selections_to_visit.push(&linked_field.selections); + } + Selection::InlineFragment(inline_fragment) => { + selections_to_visit.push(&inline_fragment.selections); + } + Selection::ScalarField(_) => {} + } + } + } + + Some(next_program) +} + +/// Given the `Program` that contain operation+all its fragments this +/// function will `apply_transforms` and create a full set of programs, including the one +/// that may generate full operation text +fn transform_program( + project_config: &ProjectConfig, + program: Arc, + perf_logger: Arc, + custom_transforms_config: Option<&CustomTransformsConfig>, +) -> Result { + apply_transforms( + project_config, + program, + Default::default(), + perf_logger, + None, + custom_transforms_config, + ) + .map_err(|errors| format!("{:?}", errors)) +} + +fn print_full_operation_text(programs: Programs, operation_name: StringKey) -> Option { + let print_operation_node = programs + .operation_text + .operation(OperationDefinitionName(operation_name))?; + + Some(print_full_operation( + &programs.operation_text, + print_operation_node, + Default::default(), + )) +} + +/// From the list of AST nodes we're trying to extract the operation and possible +/// list of fragment, to construct the initial `Program` that we could later transform +/// and print +fn build_operation_ir_with_fragments( + definitions: &[graphql_syntax::ExecutableDefinition], + schema: Arc, +) -> Result<(Arc, Vec>), String> { + let ir = build_ir_with_extra_features( + &schema, + definitions, + &BuilderOptions { + allow_undefined_fragment_spreads: true, + fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, + relay_mode: Some(graphql_ir::RelayMode), + default_anonymous_operation_name: Some("anonymous".intern()), + allow_custom_scalar_literals: true, // for compatibility + }, + ) + .map_err(|errors| format!("{:?}", errors))?; + + if let Some(operation) = ir.iter().find_map(|item| { + if let ExecutableDefinition::Operation(operation) = item { + Some(Arc::new(operation.clone())) + } else { + None + } + }) { + let fragments = ir + .iter() + .filter_map(|item| { + if let ExecutableDefinition::Fragment(fragment) = item { + Some(Arc::new(fragment.clone())) + } else { + None + } + }) + .collect::>(); + + Ok((operation, fragments)) + } else { + Err("Unable to find an operation.".to_string()) + } +} + +pub(crate) fn get_query_text< + TPerfLogger: PerfLogger + 'static, + TSchemaDocumentation: SchemaDocumentation, +>( + state: &LSPState, + original_text: String, + project_name: ProjectName, +) -> LSPRuntimeResult { + let schema = state.get_schema(&project_name.into())?; + + let project_config = state + .config + .enabled_projects() + .find(|project_config| project_config.name == project_name) + .ok_or_else(|| { + LSPRuntimeError::UnexpectedError(format!( + "Unable to get project config for project {}.", + project_name + )) + })?; + + let result = parse_executable_with_error_recovery_and_parser_features( + &original_text, + SourceLocationKey::Generated, + get_parser_features(project_config), + ); + + if !&result.diagnostics.is_empty() { + let err_string = "".to_string(); + return Err(LSPRuntimeError::UnexpectedError( + result + .diagnostics + .iter() + .fold(err_string, |acc, err| format!("{acc} - {err}\n")), + )); + } + + let (operation, fragments) = + build_operation_ir_with_fragments(&result.item.definitions, schema) + .map_err(LSPRuntimeError::UnexpectedError)?; + + let operation_name = operation.name.item.0; + let program = state.get_program(&project_name.into())?; + + let query_text = + if let Some(program) = get_operation_only_program(operation, fragments, &program) { + let programs = transform_program( + project_config, + Arc::new(program), + Arc::clone(&state.perf_logger), + state.config.custom_transforms.as_ref(), + ) + .map_err(LSPRuntimeError::UnexpectedError)?; + + print_full_operation_text(programs, operation_name).unwrap_or(original_text) + } else { + original_text + }; + + Ok(query_text) +} + +pub(crate) fn on_graphql_execute_query( + state: &impl GlobalState, + params: GraphQLExecuteQueryParams, +) -> LSPRuntimeResult<::Result> { + let project_name = if let Some(url) = ¶ms.get_url() { + state + .extract_project_name_from_url(url) + .unwrap_or_else(|_| params.get_schema_name()) + } else { + params.get_schema_name() + }; + + state.get_full_query_text(params.text, &project_name) +} diff --git a/compiler/crates/relay-lsp/src/graphql_tools/mod.rs b/compiler/crates/relay-lsp/src/graphql_tools/mod.rs deleted file mode 100644 index 978c5517754c9..0000000000000 --- a/compiler/crates/relay-lsp/src/graphql_tools/mod.rs +++ /dev/null @@ -1,280 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::collections::HashSet; -use std::sync::Arc; - -use common::PerfLogger; -use common::SourceLocationKey; -use graphql_ir::build_ir_with_extra_features; -use graphql_ir::BuilderOptions; -use graphql_ir::ExecutableDefinition; -use graphql_ir::FragmentDefinition; -use graphql_ir::FragmentDefinitionName; -use graphql_ir::FragmentVariablesSemantic; -use graphql_ir::OperationDefinition; -use graphql_ir::OperationDefinitionName; -use graphql_ir::Program; -use graphql_ir::Selection; -use graphql_syntax::parse_executable_with_error_recovery; -use graphql_text_printer::print_full_operation; -use intern::string_key::Intern; -use intern::string_key::StringKey; -use lsp_types::request::Request; -use lsp_types::Url; -use relay_compiler::config::ProjectConfig; -use relay_transforms::apply_transforms; -use relay_transforms::CustomTransformsConfig; -use relay_transforms::Programs; -use schema::SDLSchema; -use schema_documentation::SchemaDocumentation; -use serde::Deserialize; -use serde::Serialize; - -use crate::lsp_runtime_error::LSPRuntimeResult; -use crate::server::GlobalState; -use crate::server::LSPState; -use crate::LSPRuntimeError; - -pub(crate) enum GraphQLExecuteQuery {} - -#[derive(Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct GraphQLExecuteQueryParams { - text: String, - document_path: Option, - schema_name: Option, -} - -impl GraphQLExecuteQueryParams { - fn get_url(&self) -> Option { - if let Some(path) = &self.document_path { - Url::parse(&format!("file://{}", path)).ok() - } else { - None - } - } - - fn get_schema_name(&self) -> StringKey { - if let Some(schema_name) = &self.schema_name { - schema_name.clone().intern() - } else { - "facebook".intern() - } - } -} - -impl Request for GraphQLExecuteQuery { - type Params = GraphQLExecuteQueryParams; - type Result = String; - const METHOD: &'static str = "graphql/executeQuery"; -} - -/// This function will return the program that contains only operation -/// and all referenced fragments. -/// We can use it to print the full query text -fn get_operation_only_program( - operation: Arc, - fragments: Vec>, - program: &Program, -) -> Option { - let mut selections_to_visit: Vec<_> = vec![&operation.selections]; - let mut next_program = Program::new(program.schema.clone()); - next_program.insert_operation(Arc::clone(&operation)); - for fragment in fragments.iter() { - selections_to_visit.push(&fragment.selections); - next_program.insert_fragment(Arc::clone(fragment)); - } - - let mut visited_fragments: HashSet = HashSet::default(); - - while !selections_to_visit.is_empty() { - let current_selections = selections_to_visit.pop()?; - for selection in current_selections { - match selection { - Selection::FragmentSpread(spread) => { - // Skip, if we already visited this fragment - if visited_fragments.contains(&spread.fragment.item) { - continue; - } - visited_fragments.insert(spread.fragment.item); - // Second, if this fragment is already in the `next_program`, - // it selection already added to the visiting stack - if next_program.fragment(spread.fragment.item).is_some() { - continue; - } - - // Finally, add all missing fragment spreads from the full program - let fragment = program.fragment(spread.fragment.item)?; - - selections_to_visit.push(&fragment.selections); - next_program.insert_fragment(Arc::clone(fragment)); - } - Selection::Condition(condition) => { - selections_to_visit.push(&condition.selections); - } - Selection::LinkedField(linked_field) => { - selections_to_visit.push(&linked_field.selections); - } - Selection::InlineFragment(inline_fragment) => { - selections_to_visit.push(&inline_fragment.selections); - } - Selection::ScalarField(_) => {} - } - } - } - - Some(next_program) -} - -/// Given the `Program` that contain operation+all its fragments this -/// function will `apply_transforms` and create a full set of programs, including the one -/// that may generate full operation text -fn transform_program( - project_config: &ProjectConfig, - program: Arc, - perf_logger: Arc, - custom_transforms_config: Option<&CustomTransformsConfig>, -) -> Result { - apply_transforms( - project_config, - program, - Default::default(), - perf_logger, - None, - custom_transforms_config, - ) - .map_err(|errors| format!("{:?}", errors)) -} - -fn print_full_operation_text(programs: Programs, operation_name: StringKey) -> Option { - let print_operation_node = programs - .operation_text - .operation(OperationDefinitionName(operation_name))?; - - Some(print_full_operation( - &programs.operation_text, - print_operation_node, - Default::default(), - )) -} - -/// From the list of AST nodes we're trying to extract the operation and possible -/// list of fragment, to construct the initial `Program` that we could later transform -/// and print -fn build_operation_ir_with_fragments( - definitions: &[graphql_syntax::ExecutableDefinition], - schema: Arc, -) -> Result<(Arc, Vec>), String> { - let ir = build_ir_with_extra_features( - &schema, - definitions, - &BuilderOptions { - allow_undefined_fragment_spreads: true, - fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, - relay_mode: Some(graphql_ir::RelayMode), - default_anonymous_operation_name: Some("anonymous".intern()), - }, - ) - .map_err(|errors| format!("{:?}", errors))?; - - if let Some(operation) = ir.iter().find_map(|item| { - if let ExecutableDefinition::Operation(operation) = item { - Some(Arc::new(operation.clone())) - } else { - None - } - }) { - let fragments = ir - .iter() - .filter_map(|item| { - if let ExecutableDefinition::Fragment(fragment) = item { - Some(Arc::new(fragment.clone())) - } else { - None - } - }) - .collect::>(); - - Ok((operation, fragments)) - } else { - Err("Unable to find an operation.".to_string()) - } -} - -pub(crate) fn get_query_text< - TPerfLogger: PerfLogger + 'static, - TSchemaDocumentation: SchemaDocumentation, ->( - state: &LSPState, - original_text: String, - project_name: &StringKey, -) -> LSPRuntimeResult { - let schema = state.get_schema(project_name)?; - - let project_config = state - .config - .enabled_projects() - .find(|project_config| &project_config.name == project_name) - .ok_or_else(|| { - LSPRuntimeError::UnexpectedError(format!( - "Unable to get project config for project {}.", - project_name - )) - })?; - - let result = parse_executable_with_error_recovery(&original_text, SourceLocationKey::Generated); - - if !&result.diagnostics.is_empty() { - return Err(LSPRuntimeError::UnexpectedError( - result - .diagnostics - .iter() - .map(|err| format!("- {}\n", err)) - .collect::(), - )); - } - - let (operation, fragments) = - build_operation_ir_with_fragments(&result.item.definitions, schema) - .map_err(LSPRuntimeError::UnexpectedError)?; - - let operation_name = operation.name.item.0; - let program = state.get_program(project_name)?; - - let query_text = - if let Some(program) = get_operation_only_program(operation, fragments, &program) { - let programs = transform_program( - project_config, - Arc::new(program), - Arc::clone(&state.perf_logger), - state.config.custom_transforms.as_ref(), - ) - .map_err(LSPRuntimeError::UnexpectedError)?; - - print_full_operation_text(programs, operation_name).unwrap_or(original_text) - } else { - original_text - }; - - Ok(query_text) -} - -pub(crate) fn on_graphql_execute_query( - state: &impl GlobalState, - params: GraphQLExecuteQueryParams, -) -> LSPRuntimeResult<::Result> { - let project_name = if let Some(url) = ¶ms.get_url() { - state - .extract_project_name_from_url(url) - .unwrap_or_else(|_| params.get_schema_name()) - } else { - params.get_schema_name() - }; - - state.get_full_query_text(params.text, &project_name) -} diff --git a/compiler/crates/relay-lsp/src/hover.rs b/compiler/crates/relay-lsp/src/hover.rs new file mode 100644 index 0000000000000..c0ea6d36ed953 --- /dev/null +++ b/compiler/crates/relay-lsp/src/hover.rs @@ -0,0 +1,127 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//! Utilities for providing the hover feature + +use intern::Lookup; +use lsp_types::request::HoverRequest; +use lsp_types::request::Request; +use lsp_types::LanguageString; +use lsp_types::MarkedString; +use resolution_path::ResolvePosition; +use serde::Serialize; + +use crate::lsp_runtime_error::LSPRuntimeResult; +use crate::server::GlobalState; + +mod with_resolution_path; +pub use with_resolution_path::get_hover; + +pub use self::with_resolution_path::ContentConsumerType; + +fn graphql_marked_string(value: String) -> MarkedString { + MarkedString::LanguageString(LanguageString { + language: "graphql".to_string(), + value, + }) +} + +/// This will provide a more accurate information about some of the specific Relay directives +/// that cannot be expressed via SDL +fn argument_definition_hover_info(directive_name: &str) -> Option { + match directive_name { + "argumentDefinitions" => Some( + r#" +`@argumentDefinitions` is a directive used to specify arguments taken by a fragment. + +--- +@see: https://relay.dev/docs/en/graphql-in-relay.html#argumentdefinitions +"#, + ), + "arguments" => Some( + r#" +`@arguments` is a directive used to pass arguments to a fragment that was defined using `@argumentDefinitions`. + +--- +@see: https://relay.dev/docs/en/graphql-in-relay.html#arguments +"#, + ), + "uncheckedArguments_DEPRECATED" => Some( + r#" +DEPRECATED version of `@arguments` directive. +`@arguments` is a directive used to pass arguments to a fragment that was defined using `@argumentDefinitions`. + +--- +@see: https://relay.dev/docs/en/graphql-in-relay.html#arguments +"#, + ), + _ => None, + }.map(|s| MarkedString::String(s.to_string())) +} + +pub fn on_hover( + state: &impl GlobalState, + params: ::Params, +) -> LSPRuntimeResult<::Result> { + let (document, position_span) = + state.extract_executable_document_from_text(¶ms.text_document_position_params, 1)?; + + let resolution_path = document.resolve((), position_span); + + let project_name = state + .extract_project_name_from_url(¶ms.text_document_position_params.text_document.uri)?; + + let schema = state.get_schema(&project_name)?; + + let schema_documentation = state.get_schema_documentation(project_name.lookup()); + + Ok(get_hover( + &resolution_path, + &schema, + project_name, + state.get_extra_data_provider(), + &schema_documentation, + &state.get_program(&project_name)?, + state.get_content_consumer_type(), + )) +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +struct GraphQLSchemaExplorerParams<'a> { + path: Vec<&'a str>, + + schema_name: &'a str, + + #[serde(skip_serializing_if = "Option::is_none")] + filter: Option<&'a str>, +} + +fn get_open_schema_explorer_command_link( + text: &str, + params: &GraphQLSchemaExplorerParams<'_>, +) -> String { + format!( + "[{}](command:{})", + text, + get_open_schema_explorer_command(params) + ) +} + +fn get_open_schema_explorer_command(params: &GraphQLSchemaExplorerParams<'_>) -> String { + // see https://docs.rs/percent-encoding/2.1.0/percent_encoding/ + use percent_encoding::utf8_percent_encode; + use percent_encoding::AsciiSet; + use percent_encoding::CONTROLS; + + const FRAGMENT: AsciiSet = CONTROLS.add(b' ').add(b'"').add(b'<').add(b'>').add(b'`'); + + format!( + "nuclide.relay-lsp.openSchemaExplorer?{}", + utf8_percent_encode(&serde_json::to_string(params).unwrap(), &FRAGMENT) + ) +} diff --git a/compiler/crates/relay-lsp/src/hover/mod.rs b/compiler/crates/relay-lsp/src/hover/mod.rs deleted file mode 100644 index d0904ae31e773..0000000000000 --- a/compiler/crates/relay-lsp/src/hover/mod.rs +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -//! Utilities for providing the hover feature - -use intern::Lookup; -use lsp_types::request::HoverRequest; -use lsp_types::request::Request; -use lsp_types::LanguageString; -use lsp_types::MarkedString; -use resolution_path::ResolvePosition; -use serde::Serialize; - -use crate::lsp_runtime_error::LSPRuntimeResult; -use crate::server::GlobalState; - -mod with_resolution_path; -pub use with_resolution_path::get_hover; - -pub use self::with_resolution_path::ContentConsumerType; - -fn graphql_marked_string(value: String) -> MarkedString { - MarkedString::LanguageString(LanguageString { - language: "graphql".to_string(), - value, - }) -} - -/// This will provide a more accurate information about some of the specific Relay directives -/// that cannot be expressed via SDL -fn argument_definition_hover_info(directive_name: &str) -> Option { - match directive_name { - "argumentDefinitions" => Some( - r#" -`@argumentDefinitions` is a directive used to specify arguments taken by a fragment. - ---- -@see: https://relay.dev/docs/en/graphql-in-relay.html#argumentdefinitions -"#, - ), - "arguments" => Some( - r#" -`@arguments` is a directive used to pass arguments to a fragment that was defined using `@argumentDefinitions`. - ---- -@see: https://relay.dev/docs/en/graphql-in-relay.html#arguments -"#, - ), - "uncheckedArguments_DEPRECATED" => Some( - r#" -DEPRECATED version of `@arguments` directive. -`@arguments` is a directive used to pass arguments to a fragment that was defined using `@argumentDefinitions`. - ---- -@see: https://relay.dev/docs/en/graphql-in-relay.html#arguments -"#, - ), - _ => None, - }.map(|s| MarkedString::String(s.to_string())) -} - -pub fn on_hover( - state: &impl GlobalState, - params: ::Params, -) -> LSPRuntimeResult<::Result> { - let (document, position_span) = - state.extract_executable_document_from_text(¶ms.text_document_position_params, 1)?; - - let resolution_path = document.resolve((), position_span); - - let project_name = state - .extract_project_name_from_url(¶ms.text_document_position_params.text_document.uri)?; - - let schema = state.get_schema(&project_name)?; - - let schema_documentation = state.get_schema_documentation(project_name.lookup()); - - Ok(get_hover( - &resolution_path, - &schema, - project_name, - &*state.get_extra_data_provider(), - &schema_documentation, - &state.get_program(&project_name)?, - state.get_content_consumer_type(), - )) -} - -#[derive(Serialize)] -#[serde(rename_all = "camelCase")] -struct GraphQLSchemaExplorerParams<'a> { - path: Vec<&'a str>, - - schema_name: &'a str, - - #[serde(skip_serializing_if = "Option::is_none")] - filter: Option<&'a str>, -} - -fn get_open_schema_explorer_command_link( - text: &str, - params: &GraphQLSchemaExplorerParams<'_>, -) -> String { - format!( - "[{}](command:{})", - text, - get_open_schema_explorer_command(params) - ) -} - -fn get_open_schema_explorer_command(params: &GraphQLSchemaExplorerParams<'_>) -> String { - // see https://docs.rs/percent-encoding/2.1.0/percent_encoding/ - use percent_encoding::utf8_percent_encode; - use percent_encoding::AsciiSet; - use percent_encoding::CONTROLS; - - const FRAGMENT: AsciiSet = CONTROLS.add(b' ').add(b'"').add(b'<').add(b'>').add(b'`'); - - return format!( - "nuclide.relay-lsp.openSchemaExplorer?{}", - utf8_percent_encode(&serde_json::to_string(params).unwrap(), &FRAGMENT) - ); -} diff --git a/compiler/crates/relay-lsp/src/hover/with_resolution_path.rs b/compiler/crates/relay-lsp/src/hover/with_resolution_path.rs index 9d96f68cce176..045960b0f9fa5 100644 --- a/compiler/crates/relay-lsp/src/hover/with_resolution_path.rs +++ b/compiler/crates/relay-lsp/src/hover/with_resolution_path.rs @@ -24,7 +24,8 @@ use lsp_types::HoverContents; use lsp_types::MarkedString; use resolution_path::ArgumentPath; use resolution_path::ArgumentRoot; -use resolution_path::ConstantArgPath; +use resolution_path::ConstantArgumentParent; +use resolution_path::ConstantArgumentPath; use resolution_path::ConstantBooleanPath; use resolution_path::ConstantEnumPath; use resolution_path::ConstantFloatPath; @@ -37,6 +38,7 @@ use resolution_path::ConstantStringPath; use resolution_path::ConstantValueParent; use resolution_path::ConstantValuePath; use resolution_path::ConstantValueRoot; +use resolution_path::DefaultValueParent; use resolution_path::DefaultValuePath; use resolution_path::DirectivePath; use resolution_path::FragmentDefinitionPath; @@ -130,7 +132,7 @@ enum HoverBehavior<'a> { FragmentSpread(&'a FragmentSpreadPath<'a>), Directive(&'a DirectivePath<'a>), FragmentDefinition(&'a FragmentDefinition), - ExecutableDocument, + None, } fn get_hover_behavior_from_resolution_path<'a>(path: &'a ResolutionPath<'a>) -> HoverBehavior<'a> { @@ -172,10 +174,10 @@ fn get_hover_behavior_from_resolution_path<'a>(path: &'a ResolutionPath<'a>) -> ResolutionPath::DefaultValue(DefaultValuePath { inner: _, parent: - VariableDefinitionPath { + DefaultValueParent::VariableDefinition(VariableDefinitionPath { inner: variable_definition, parent: _, - }, + }), }) => HoverBehavior::VariableDefinition(variable_definition), ResolutionPath::VariableDefinition(VariableDefinitionPath { inner: variable_definition, @@ -184,21 +186,21 @@ fn get_hover_behavior_from_resolution_path<'a>(path: &'a ResolutionPath<'a>) -> ResolutionPath::NonNullTypeAnnotation(NonNullTypeAnnotationPath { inner: _, parent: non_null_annotation_parent, - }) => HoverBehavior::VariableDefinition( - non_null_annotation_parent - .parent - .find_variable_definition_path() - .inner, - ), + }) => non_null_annotation_parent + .parent + .find_variable_definition_path() + .map_or(HoverBehavior::None, |path| { + HoverBehavior::VariableDefinition(path.inner) + }), ResolutionPath::ListTypeAnnotation(ListTypeAnnotationPath { inner: _, parent: list_type_annotation_parent, - }) => HoverBehavior::VariableDefinition( - list_type_annotation_parent - .parent - .find_variable_definition_path() - .inner, - ), + }) => list_type_annotation_parent + .parent + .find_variable_definition_path() + .map_or(HoverBehavior::None, |path| { + HoverBehavior::VariableDefinition(path.inner) + }), ResolutionPath::Ident(IdentPath { inner: _, parent: @@ -210,9 +212,11 @@ fn get_hover_behavior_from_resolution_path<'a>(path: &'a ResolutionPath<'a>) -> parent: type_annotation_parent, }, }), - }) => HoverBehavior::VariableDefinition( - type_annotation_parent.find_variable_definition_path().inner, - ), + }) => type_annotation_parent + .find_variable_definition_path() + .map_or(HoverBehavior::None, |path| { + HoverBehavior::VariableDefinition(path.inner) + }), // Explicitly don't show hovers for VariableDefinitionList ResolutionPath::VariableDefinitionList(_) => HoverBehavior::VariableDefinitionList, @@ -274,23 +278,35 @@ fn get_hover_behavior_from_resolution_path<'a>(path: &'a ResolutionPath<'a>) -> ResolutionPath::Ident(IdentPath { inner: _, parent: - IdentParent::ConstantArgKey(ConstantArgPath { + IdentParent::ConstantArgumentKey(ConstantArgumentPath { inner: _, parent: - ConstantObjPath { + ConstantArgumentParent::ConstantObj(ConstantObjPath { inner: _, parent: constant_value_path, - }, + }), }), }) => HoverBehavior::ConstantValue(&constant_value_path.parent), + ResolutionPath::Ident(IdentPath { + inner: _, + parent: + IdentParent::ConstantArgumentKey(ConstantArgumentPath { + inner: _, + parent: ConstantArgumentParent::ConstantDirective(_), + }), + }) => HoverBehavior::None, ResolutionPath::ConstantObj(ConstantObjPath { inner: _, parent: constant_value_path, }) => HoverBehavior::ConstantValue(&constant_value_path.parent), - ResolutionPath::ConstantArg(ConstantArgPath { + ResolutionPath::ConstantArgument(ConstantArgumentPath { inner: _, - parent: constant_obj_path, + parent: ConstantArgumentParent::ConstantObj(constant_obj_path), }) => HoverBehavior::ConstantValue(&constant_obj_path.parent.parent), + ResolutionPath::ConstantArgument(ConstantArgumentPath { + inner: _, + parent: ConstantArgumentParent::ConstantDirective(_), + }) => HoverBehavior::None, // Scalar and linked fields ResolutionPath::ScalarField(ScalarFieldPath { @@ -427,12 +443,117 @@ fn get_hover_behavior_from_resolution_path<'a>(path: &'a ResolutionPath<'a>) -> }) => HoverBehavior::FragmentDefinition(fragment_definition), // Explicitly show no hover content of operation/fragment definitions - ResolutionPath::ExecutableDocument(_) => HoverBehavior::ExecutableDocument, + ResolutionPath::ExecutableDocument(_) => HoverBehavior::None, + ResolutionPath::SchemaDocument(_) => HoverBehavior::None, + ResolutionPath::SchemaDefinition(_) => HoverBehavior::None, + ResolutionPath::SchemaExtension(_) => HoverBehavior::None, + ResolutionPath::OperationTypeDefinition(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::OperationTypeDefinitionType(_), + }) => HoverBehavior::None, + ResolutionPath::DirectiveDefinition(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::DirectiveDefinitionName(_), + }) => HoverBehavior::None, + ResolutionPath::InputValueDefinition(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::InputValueDefinitionName(_), + }) => HoverBehavior::None, + ResolutionPath::DefaultValue(DefaultValuePath { + inner: _, + parent: DefaultValueParent::InputValueDefinition(_), + }) => HoverBehavior::None, + ResolutionPath::UnionTypeDefinition(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::UnionTypeDefinitionName(_), + }) => HoverBehavior::None, + ResolutionPath::UnionTypeExtension(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::UnionTypeExtensionName(_), + }) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::UnionTypeMemberType(_), + }) => HoverBehavior::None, + ResolutionPath::InterfaceTypeDefinition(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::InterfaceTypeDefinitionName(_), + }) => HoverBehavior::None, + ResolutionPath::InterfaceTypeExtension(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::InterfaceTypeExtensionName(_), + }) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ImplementedInterfaceName(_), + }) => HoverBehavior::None, + ResolutionPath::ObjectTypeDefinition(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ObjectTypeDefinitionName(_), + }) => HoverBehavior::None, + ResolutionPath::ObjectTypeExtension(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ObjectTypeExtensionName(_), + }) => HoverBehavior::None, + ResolutionPath::InputObjectTypeDefinition(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::InputObjectTypeDefinitionName(_), + }) => HoverBehavior::None, + ResolutionPath::InputObjectTypeExtension(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::InputObjectTypeExtensionName(_), + }) => HoverBehavior::None, + ResolutionPath::EnumTypeDefinition(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::EnumTypeDefinitionName(_), + }) => HoverBehavior::None, + ResolutionPath::EnumTypeExtension(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::EnumTypeExtensionName(_), + }) => HoverBehavior::None, + ResolutionPath::EnumValueDefinition(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::EnumValueDefinitionName(_), + }) => HoverBehavior::None, + ResolutionPath::ScalarTypeDefinition(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ScalarTypeDefinitionName(_), + }) => HoverBehavior::None, + ResolutionPath::ScalarTypeExtension(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ScalarTypeExtensionName(_), + }) => HoverBehavior::None, + ResolutionPath::FieldDefinition(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::FieldDefinitionName(_), + }) => HoverBehavior::None, + ResolutionPath::ConstantDirective(_) => HoverBehavior::None, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(_), + }) => HoverBehavior::None, } } -fn get_hover_contents<'a>( - hover_behavior: HoverBehavior<'a>, +fn get_hover_contents( + hover_behavior: HoverBehavior<'_>, schema: &SDLSchema, schema_name: StringKey, extra_data_provider: &dyn LSPExtraDataProvider, @@ -454,6 +575,7 @@ fn get_hover_contents<'a>( schema, schema_name, schema_documentation, + program, content_consumer_type, ), HoverBehavior::ScalarOrLinkedField(field_name, selection_path) => { @@ -471,6 +593,7 @@ fn get_hover_contents<'a>( schema, schema_name, schema_documentation, + program, content_consumer_type, ), HoverBehavior::InlineFragment(inline_fragment_path) => on_hover_inline_fragment( @@ -484,6 +607,7 @@ fn get_hover_contents<'a>( fragment_spread_path, schema, schema_name, + schema_documentation, program, content_consumer_type, ), @@ -492,10 +616,11 @@ fn get_hover_contents<'a>( fragment_definition, schema, schema_name, + schema_documentation, content_consumer_type, ), - HoverBehavior::ExecutableDocument => None, + HoverBehavior::None => None, } } @@ -525,11 +650,10 @@ fn on_hover_variable_definition( let variable_identifier = &variable_definition.name; let variable_inner_type = variable_definition.type_.inner().name.value; let variable_type = &variable_definition.type_; - let variable_default_value = variable_definition - .default_value - .as_ref() - .map(|default_value| format!(" with default value `{}`", default_value.value)) - .unwrap_or_else(|| "".to_string()); + let variable_default_value = variable_definition.default_value.as_ref().map_or_else( + || "".to_string(), + |default_value| format!(" with default value `{}`", default_value.value), + ); HoverContents::Scalar(MarkedString::String(format!( "`{}`: **{}**{}", @@ -551,6 +675,7 @@ fn on_hover_constant_value<'a>( schema: &SDLSchema, schema_name: StringKey, schema_documentation: &impl SchemaDocumentation, + program: &Program, content_consumer_type: ContentConsumerType, ) -> Option { match constant_value_parent.find_constant_value_root() { @@ -566,16 +691,20 @@ fn on_hover_constant_value<'a>( schema, schema_name, schema_documentation, + program, content_consumer_type, ), + ConstantValueRoot::InputValueDefinition(_) => None, + ConstantValueRoot::ConstantArgument(_) => None, } } -fn on_hover_argument_path<'a>( - argument_path: &ArgumentPath<'a>, +fn on_hover_argument_path( + argument_path: &ArgumentPath<'_>, schema: &SDLSchema, schema_name: StringKey, schema_documentation: &impl SchemaDocumentation, + program: &Program, content_consumer_type: ContentConsumerType, ) -> Option { let ArgumentPath { @@ -608,10 +737,18 @@ fn on_hover_argument_path<'a>( schema_documentation, content_consumer_type, ), + ArgumentRoot::FragmentSpread(fragment_spread_path) => get_fragment_spread_hover_content( + fragment_spread_path, + schema, + schema_name, + schema_documentation, + program, + content_consumer_type, + ), }?; let mut contents = vec![argument_info]; - contents.extend(field_hover_info.into_iter()); + contents.extend(field_hover_info); Some(HoverContents::Array(contents)) } @@ -678,17 +815,25 @@ fn get_scalar_or_linked_field_hover_content( type_path.push(field_type_name); - hover_contents.push(MarkedString::String(format!( - "Type: **{}**", - content_consumer_type.render_text_with_params( - &schema.get_type_string(&field.type_), - &GraphQLSchemaExplorerParams { - path: type_path, - schema_name: schema_name.lookup(), - filter: None, - } - ) - ))); + let type_name = content_consumer_type.render_text_with_params( + &schema.get_type_string(&field.type_), + &GraphQLSchemaExplorerParams { + path: type_path, + schema_name: schema_name.lookup(), + filter: None, + }, + ); + + if let Some(field_type_hack_source) = schema_documentation.get_hack_source(field_type_name) { + hover_contents.push(MarkedString::String(format!( + "Type: [**{}**]({})", + type_name, + codex_url_for_symbol(field_type_hack_source), + ))); + } else { + hover_contents.push(MarkedString::String(format!("Type: **{}**", type_name,))); + } + if let Some(type_description) = schema_documentation.get_type_description(field_type_name) { hover_contents.push(MarkedString::String(type_description.to_string())); } @@ -702,7 +847,7 @@ fn get_scalar_or_linked_field_hover_content( let arg_type_name = schema.get_type_name(arg.type_.inner()).lookup(); hover_contents.push(MarkedString::from_markdown(format!( "{}: **{}**{}\n\n{}", - arg.name, + arg.name.item, content_consumer_type.render_text_with_params( &schema.get_type_string(&arg.type_), &GraphQLSchemaExplorerParams { @@ -719,7 +864,7 @@ fn get_scalar_or_linked_field_hover_content( if let Some(description) = schema_documentation.get_field_argument_description( parent_type_name, field.name.item.lookup(), - arg.name.0.lookup(), + arg.name.item.0.lookup(), ) { description.to_string() } else { @@ -743,6 +888,17 @@ fn get_scalar_or_linked_field_hover_content( }; hover_contents.push(MarkedString::String(msg.to_string())) } + + if let Some(field_hack_source) = + schema_documentation.get_field_hack_source(parent_type_name, field.name.item.lookup()) + { + hover_contents.push(MarkedString::String(format!( + "View [**{}**]({}) in Codex", + field_hack_source, + codex_url_for_symbol(field_hack_source), + ))); + } + Some(hover_contents) } @@ -797,23 +953,52 @@ fn on_hover_inline_fragment( ) )); + let mut hover_contents: Vec = vec![first_line]; + if let Some(description) = description { - Some(HoverContents::Array(vec![ - first_line, - MarkedString::String(description.to_string()), - ])) - } else { - Some(HoverContents::Scalar(first_line)) + hover_contents.push(MarkedString::String(description.to_string())); } + + if let Some(hack_source) = schema_documentation.get_hack_source(inline_fragment_condition) { + let codex_link = MarkedString::String(format!( + "View [**{}**]({}) in Codex", + hack_source, + codex_url_for_symbol(hack_source), + )); + hover_contents.push(codex_link); + } + + Some(HoverContents::Array(hover_contents)) } fn on_hover_fragment_spread<'a>( fragment_spread_path: &'a FragmentSpreadPath<'a>, schema: &SDLSchema, schema_name: StringKey, + schema_documentation: &impl SchemaDocumentation, program: &Program, content_consumer_type: ContentConsumerType, ) -> Option { + let hover_contents = get_fragment_spread_hover_content( + fragment_spread_path, + schema, + schema_name, + schema_documentation, + program, + content_consumer_type, + )?; + + Some(HoverContents::Array(hover_contents)) +} + +fn get_fragment_spread_hover_content<'a>( + fragment_spread_path: &'a FragmentSpreadPath<'a>, + schema: &SDLSchema, + schema_name: StringKey, + schema_documentation: &impl SchemaDocumentation, + program: &Program, + content_consumer_type: ContentConsumerType, +) -> Option> { // TODO eventually show information about whether the fragment spread is // infallible, fallible, interface-on-interface, etc. @@ -831,17 +1016,19 @@ fn on_hover_fragment_spread<'a>( .get_type_name(fragment_definition.type_condition) .lookup(); + let rendered_fragment_type_name = content_consumer_type.render_text_with_params( + fragment_type_name, + &GraphQLSchemaExplorerParams { + path: vec![fragment_type_name], + schema_name: schema_name.lookup(), + filter: None, + }, + ); + hover_contents.push(MarkedString::String(format!( "fragment {} on {}", fragment_spread.name.value.lookup(), - content_consumer_type.render_text_with_params( - fragment_type_name, - &GraphQLSchemaExplorerParams { - path: vec![fragment_type_name], - schema_name: schema_name.lookup(), - filter: None - } - ) + rendered_fragment_type_name, ))); if !fragment_definition.variable_definitions.is_empty() { @@ -899,19 +1086,36 @@ For example: )); } - Some(HoverContents::Array(hover_contents)) + if let Some(type_description) = schema_documentation.get_type_description(fragment_type_name) { + if let Some(hack_source) = schema_documentation.get_hack_source(fragment_type_name) { + hover_contents.push(MarkedString::String(format!( + "Type Condition: on [**{}**]({})", + rendered_fragment_type_name, + codex_url_for_symbol(hack_source), + ))); + } else { + hover_contents.push(MarkedString::String(format!( + "Type Condition: on **{}**", + rendered_fragment_type_name, + ))); + } + + hover_contents.push(MarkedString::String(type_description.to_string())); + } + + Some(hover_contents) } -fn on_hover_directive<'a>( - directive_path: &DirectivePath<'a>, +fn on_hover_directive( + directive_path: &DirectivePath<'_>, schema: &SDLSchema, ) -> Option { let content = get_directive_hover_content(directive_path, schema)?; Some(HoverContents::Array(content)) } -fn get_directive_hover_content<'a>( - directive_path: &DirectivePath<'a>, +fn get_directive_hover_content( + directive_path: &DirectivePath<'_>, schema: &SDLSchema, ) -> Option> { let DirectivePath { @@ -943,6 +1147,7 @@ fn on_hover_fragment_definition( fragment_definition: &FragmentDefinition, schema: &SDLSchema, schema_name: StringKey, + schema_documentation: &impl SchemaDocumentation, content_consumer_type: ContentConsumerType, ) -> Option { let fragment_name = fragment_definition.name.value; @@ -951,35 +1156,56 @@ fn on_hover_fragment_definition( let type_name = schema.get_type_name(fragment_type); + let rendered_parent_type_name = content_consumer_type.render_text_with_params( + type_name.lookup(), + &GraphQLSchemaExplorerParams { + path: vec![type_name.lookup()], + schema_name: schema_name.lookup(), + filter: None, + }, + ); + let title = MarkedString::from_markdown(format!( "fragment {} on {}", - fragment_name, - content_consumer_type.render_text_with_params( - type_name.lookup(), - &GraphQLSchemaExplorerParams { - path: vec![type_name.lookup()], - schema_name: schema_name.lookup(), - filter: None - } - ) + fragment_name, rendered_parent_type_name )); - let hover_contents = if matches!(content_consumer_type, ContentConsumerType::Relay) { - HoverContents::Array(vec![ - title, - MarkedString::String( - r#"Fragments let you select fields, + let mut hover_contents: Vec = vec![title]; + + if matches!(content_consumer_type, ContentConsumerType::Relay) { + hover_contents.push(MarkedString::String( + r#"Fragments let you select fields, and then include them in queries where you need to. --- @see: https://graphql.org/learn/queries/#fragments "# - .to_string(), - ), - ]) - } else { - HoverContents::Scalar(title) + .to_string(), + )) }; - Some(hover_contents) + if let Some(type_description) = schema_documentation.get_type_description(type_name.lookup()) { + if let Some(hack_source) = schema_documentation.get_hack_source(type_name.lookup()) { + hover_contents.push(MarkedString::String(format!( + "Type Condition: on [**{}**]({})", + rendered_parent_type_name, + codex_url_for_symbol(hack_source), + ))); + } else { + hover_contents.push(MarkedString::String(format!( + "Type Condition: on **{}**", + rendered_parent_type_name + ))); + } + + hover_contents.push(MarkedString::String(type_description.to_string())); + } + + Some(HoverContents::Array(hover_contents)) +} + +fn codex_url_for_symbol(symbol: &str) -> String { + // sanitize the symbol first by replacing instances of "::" with "/" to avoid breaking codex links + let sanitized_symbol = str::replace(symbol, "::", "/"); + format!("https://www.internalfb.com/code/symbol/www/php/{sanitized_symbol}") } diff --git a/compiler/crates/relay-lsp/src/inlay_hints.rs b/compiler/crates/relay-lsp/src/inlay_hints.rs new file mode 100644 index 0000000000000..18cb653953519 --- /dev/null +++ b/compiler/crates/relay-lsp/src/inlay_hints.rs @@ -0,0 +1,169 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::Location; +use common::Span; +use graphql_ir::Argument; +use graphql_ir::FragmentSpread; +use graphql_ir::InlineFragment; +use graphql_ir::Visitor; +use intern::string_key::StringKey; +use lsp_types::request::InlayHintRequest; +use lsp_types::request::Request; +use lsp_types::InlayHint; +use lsp_types::InlayHintLabel; +use lsp_types::InlayHintTooltip; +use lsp_types::MarkupContent; +use schema::SDLSchema; +use schema::Schema; + +use crate::lsp_runtime_error::LSPRuntimeResult; +use crate::server::build_ir_for_lsp; +use crate::server::GlobalState; +use crate::utils::is_file_uri_in_dir; +use crate::LSPRuntimeError; + +pub fn on_inlay_hint_request( + state: &impl GlobalState, + params: ::Params, +) -> LSPRuntimeResult<::Result> { + let uri = params.text_document.uri.clone(); + let root_dir = state.root_dir(); + + if !is_file_uri_in_dir(root_dir, &uri) { + return Err(LSPRuntimeError::ExpectedError); + } + + let project_name = state.extract_project_name_from_url(&uri)?; + let schema = state.get_schema(&project_name)?; + let asts = state.resolve_executable_definitions(&uri)?; + let irs = build_ir_for_lsp(&schema, &asts).map_err(|_| LSPRuntimeError::ExpectedError)?; + let mut visitor = InlayHintVisitor::new(&schema); + for executable_definition in irs { + visitor.visit_executable_definition(&executable_definition); + } + + if visitor.inlay_hints.is_empty() { + return Ok(None); + } + + let inlay_hints = visitor + .inlay_hints + .into_iter() + .filter_map(|hint| hint.into_inlay_hint(state).ok()) + .collect(); + + Ok(Some(inlay_hints)) +} + +// Simplified version of the InlayHint struct that uses Relay location. Assumes +// the following: +// 1. The hint will be placed at the start of the location +// 2. Padding right should be added +// 3. Tooltips are rendered as markdown +struct Hint { + location: Location, + label: String, + tooltip: Option, +} + +impl Hint { + // Resolve Relay location to LSP location and create an InlayHint + fn into_inlay_hint(self, state: &impl GlobalState) -> LSPRuntimeResult { + let lsp_location = + state.transform_relay_location_in_editor_to_lsp_location(self.location)?; + Ok(InlayHint { + position: lsp_location.range.start, + label: InlayHintLabel::String(self.label), + kind: None, + text_edits: None, + tooltip: self.tooltip.map(|tooltip| { + InlayHintTooltip::MarkupContent(MarkupContent { + kind: lsp_types::MarkupKind::Markdown, + value: tooltip, + }) + }), + padding_left: None, + padding_right: Some(true), + data: None, + }) + } +} + +struct InlayHintVisitor<'a> { + schema: &'a SDLSchema, + inlay_hints: Vec, +} + +impl<'a> InlayHintVisitor<'a> { + fn new(schema: &'a SDLSchema) -> Self { + Self { + schema, + inlay_hints: vec![], + } + } + + fn add_alias_hint(&mut self, alias: StringKey, location: Location) { + self.inlay_hints.push(Hint { + location, + label: format!("{}:", alias), + tooltip: Some("Fragment alias from the attached `@alias` directive. [Read More](https://relay.dev/docs/next/guides/alias-directive/).".to_string()), + }); + } + + fn add_field_argument_hints(&mut self, field_def: &schema::Field, arguments: &[Argument]) { + for arg in arguments { + if let Some(arg_def) = field_def.arguments.named(arg.name.item) { + let arg_type = self.schema.get_type_string(&arg_def.type_); + self.inlay_hints.push(Hint { + location: arg.value.location, + label: arg_type, + tooltip: None, + }); + } + } + } +} + +impl Visitor for InlayHintVisitor<'_> { + const NAME: &'static str = "InlayHintVisitor"; + + const VISIT_ARGUMENTS: bool = false; + + const VISIT_DIRECTIVES: bool = false; + + fn visit_scalar_field(&mut self, field: &graphql_ir::ScalarField) { + let field_def = self.schema.field(field.definition.item); + self.add_field_argument_hints(field_def, &field.arguments) + } + + fn visit_linked_field(&mut self, field: &graphql_ir::LinkedField) { + let field_def = self.schema.field(field.definition.item); + self.add_field_argument_hints(field_def, &field.arguments) + } + + fn visit_fragment_spread(&mut self, spread: &FragmentSpread) { + if let Ok(Some(alias)) = spread.alias() { + let initial_span = spread.fragment.location.span(); + + // We don't actually have location information for the `...` in the + // spread, so we adjust the span assuming it's been formatted and the `...` + // immediately precedes the fragment name. + let adjusted_location = Location::with_span( + &spread.fragment.location, + Span::new(initial_span.start - 3, initial_span.end), + ); + self.add_alias_hint(alias.item, adjusted_location) + } + } + + fn visit_inline_fragment(&mut self, fragment: &InlineFragment) { + if let Ok(Some(alias)) = fragment.alias(self.schema) { + self.add_alias_hint(alias.item, fragment.spread_location) + } + } +} diff --git a/compiler/crates/relay-lsp/src/js_language_server.rs b/compiler/crates/relay-lsp/src/js_language_server.rs deleted file mode 100644 index 8d7c5239559e4..0000000000000 --- a/compiler/crates/relay-lsp/src/js_language_server.rs +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use lsp_types::request::CodeActionRequest; -use lsp_types::request::Completion; -use lsp_types::request::Request; -use lsp_types::Url; - -use crate::lsp_runtime_error::LSPRuntimeResult; - -/// Interface for the LSP server to handle JavaScript text -pub trait JSLanguageServer: Send + Sync { - type TState; - - fn process_js_source(&self, url: &Url, text: &str); - fn remove_js_source(&self, url: &Url); - fn on_complete( - &self, - params: &::Params, - state: &Self::TState, - ) -> LSPRuntimeResult<::Result>; - fn on_code_action( - &self, - params: &::Params, - state: &Self::TState, - ) -> LSPRuntimeResult<::Result>; -} diff --git a/compiler/crates/relay-lsp/src/lib.rs b/compiler/crates/relay-lsp/src/lib.rs index 5ab5e9592df47..a9ab71a2661a2 100644 --- a/compiler/crates/relay-lsp/src/lib.rs +++ b/compiler/crates/relay-lsp/src/lib.rs @@ -17,7 +17,7 @@ pub mod find_field_usages; pub mod goto_definition; mod graphql_tools; pub mod hover; -pub mod js_language_server; +mod inlay_hints; pub mod location; mod lsp_extra_data_provider; pub mod lsp_process_error; @@ -38,8 +38,8 @@ use common::PerfLogger; use docblock_resolution_info::DocblockResolutionInfo; pub use extract_graphql::JavaScriptSourceFeature; use graphql_syntax::ExecutableDocument; +use graphql_syntax::SchemaDocument; pub use hover::ContentConsumerType; -pub use js_language_server::JSLanguageServer; use log::debug; pub use lsp_extra_data_provider::DummyExtraDataProvider; pub use lsp_extra_data_provider::FieldDefinitionSourceInfo; @@ -62,8 +62,9 @@ pub use server::Schemas; pub use utils::position_to_offset; pub enum Feature { - GraphQLDocument(ExecutableDocument), + ExecutableDocument(ExecutableDocument), DocblockIr(DocblockIr), + SchemaDocument(SchemaDocument), } #[allow(clippy::large_enum_variant)] @@ -85,9 +86,6 @@ pub async fn start_language_server< perf_logger: Arc, extra_data_provider: Box, schema_documentation_loader: Option>>, - js_language_server: Option< - Box>>, - >, ) -> LSPProcessResult<()> where TPerfLogger: PerfLogger + 'static, @@ -103,7 +101,6 @@ where perf_logger, extra_data_provider, schema_documentation_loader, - js_language_server, ) .await?; io_handles.join()?; diff --git a/compiler/crates/relay-lsp/src/location.rs b/compiler/crates/relay-lsp/src/location.rs index fdf71d2c3ca6c..3c9be55fd4b31 100644 --- a/compiler/crates/relay-lsp/src/location.rs +++ b/compiler/crates/relay-lsp/src/location.rs @@ -11,7 +11,11 @@ use std::path::PathBuf; use common::Location; use common::SourceLocationKey; use common::TextSource; +use dashmap::DashMap; +use extract_graphql::JavaScriptSourceFeature; +use graphql_syntax::GraphQLSource; use intern::Lookup; +use lsp_types::Range; use lsp_types::Url; use crate::lsp_runtime_error::LSPRuntimeError; @@ -19,42 +23,56 @@ use crate::lsp_runtime_error::LSPRuntimeResult; /// Given a root dir and a common::Location, return a Result containing an /// LSPLocation (i.e. lsp_types::Location). -pub fn transform_relay_location_to_lsp_location( +pub fn transform_relay_location_on_disk_to_lsp_location( root_dir: &Path, location: Location, +) -> LSPRuntimeResult { + transform_relay_location_to_lsp_location_with_cache(root_dir, location, None, None) +} + +pub fn transform_relay_location_to_lsp_location_with_cache( + root_dir: &Path, + location: Location, + source_feature_cache: Option<&DashMap>>, + synced_schema_sources: Option<&DashMap>, ) -> LSPRuntimeResult { match location.source_location() { SourceLocationKey::Standalone { path } => { - let abspath = root_dir.join(PathBuf::from(path.lookup())); + let absolute_path = root_dir.join(PathBuf::from(path.lookup())); + let uri = get_uri(&absolute_path)?; - let file_contents = get_file_contents(&abspath)?; + // Standalone locations might be `.graphql` files, so we'll look in the synced + // schema sources cache first. + let range = match synced_schema_sources.and_then(|cache| { + cache + .get(&uri) + .map(|source| source.text_source().to_span_range(location.span())) + }) { + Some(range) => range, + None => { + let file_contents = get_file_contents(&absolute_path)?; + TextSource::from_whole_document(file_contents).to_span_range(location.span()) + } + }; - let uri = get_uri(&abspath)?; - - let range = - TextSource::from_whole_document(file_contents).to_span_range(location.span()); Ok(lsp_types::Location { uri, range }) } SourceLocationKey::Embedded { path, index } => { let path_to_fragment = root_dir.join(PathBuf::from(path.lookup())); let uri = get_uri(&path_to_fragment)?; - let file_contents = get_file_contents(&path_to_fragment)?; - - let response = extract_graphql::extract(&file_contents); - let response_length = response.len(); - let embedded_source = response - .into_iter() - .nth(index.try_into().unwrap()) - .ok_or_else(|| { - LSPRuntimeError::UnexpectedError(format!( - "File {:?} does not contain enough graphql literals: {} needed; {} found", - path_to_fragment, index, response_length - )) - })?; + // Embedded locations are always `.js` files, so we'll look in the + // source feature cache first. + let range = match source_feature_cache.and_then(|cache| cache.get(&uri)) { + Some(response) => feature_location_to_range(&response, index, location), + None => { + // If the file is not in the cache, read it from disk. + let content = get_file_contents(&path_to_fragment)?; + let response = extract_graphql::extract(&content); + feature_location_to_range(&response, index, location) + } + }?; - let text_source = embedded_source.text_source(); - let range = text_source.to_span_range(location.span()); Ok(lsp_types::Location { uri, range }) } _ => Err(LSPRuntimeError::UnexpectedError( @@ -63,8 +81,27 @@ pub fn transform_relay_location_to_lsp_location( } } +fn feature_location_to_range( + source_features: &[JavaScriptSourceFeature], + index: u16, + location: Location, +) -> Result { + let response_length = source_features.len(); + let embedded_source: &JavaScriptSourceFeature = + source_features.get::(index.into()).ok_or_else(|| { + LSPRuntimeError::UnexpectedError(format!( + "File {:?} does not contain enough graphql literals: {} needed; {} found", + location.source_location().path(), + index, + response_length + )) + })?; + let text_source = embedded_source.text_source(); + Ok(text_source.to_span_range(location.span())) +} + fn get_file_contents(path: &Path) -> LSPRuntimeResult { - let file = std::fs::read(&path).map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string()))?; + let file = std::fs::read(path).map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string()))?; String::from_utf8(file).map_err(|e| LSPRuntimeError::UnexpectedError(e.to_string())) } diff --git a/compiler/crates/relay-lsp/src/node_resolution_info.rs b/compiler/crates/relay-lsp/src/node_resolution_info.rs new file mode 100644 index 0000000000000..69d8e654da3fd --- /dev/null +++ b/compiler/crates/relay-lsp/src/node_resolution_info.rs @@ -0,0 +1,421 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::Span; +use graphql_syntax::Argument; +use graphql_syntax::Directive; +use graphql_syntax::ExecutableDefinition; +use graphql_syntax::ExecutableDocument; +use graphql_syntax::FragmentDefinition; +use graphql_syntax::FragmentSpread; +use graphql_syntax::InlineFragment; +use graphql_syntax::LinkedField; +use graphql_syntax::List; +use graphql_syntax::OperationDefinition; +use graphql_syntax::ScalarField; +use graphql_syntax::Selection; +use graphql_syntax::TypeCondition; +use intern::string_key::StringKey; + +use crate::lsp_runtime_error::LSPRuntimeError; +use crate::lsp_runtime_error::LSPRuntimeResult; + +mod type_path; +pub use type_path::TypePath; +pub use type_path::TypePathItem; + +#[derive(Debug, Clone, PartialEq)] +pub enum NodeKind { + OperationDefinition(OperationDefinition), + FragmentDefinition(FragmentDefinition), + FieldName, + FieldArgument(StringKey, StringKey), + FragmentSpread(StringKey), + InlineFragment, + Variable(String), + Directive(StringKey, Option), + TypeCondition(StringKey), +} + +#[derive(Debug)] +pub struct NodeResolutionInfo { + /// The type of the leaf node on which the information request was made + pub kind: NodeKind, + /// A list of type metadata that we can use to resolve the leaf + /// type the request is being made against + pub type_path: TypePath, +} + +impl NodeResolutionInfo { + fn new(kind: NodeKind) -> Self { + Self { + kind, + type_path: Default::default(), + } + } +} + +fn build_node_resolution_for_directive( + directives: &[Directive], + position_span: Span, +) -> Option { + let directive = directives + .iter() + .find(|directive| directive.span.contains(position_span))?; + + let arg_name_opt = if let Some(args) = &directive.arguments { + args.items + .iter() + .find(|arg| arg.span.contains(position_span)) + .map(|arg| arg.name.value) + } else { + None + }; + + Some(NodeResolutionInfo { + kind: NodeKind::Directive(directive.name.value, arg_name_opt), + type_path: Default::default(), + }) +} + +fn type_condition_at_position( + type_condition: &TypeCondition, + position_span: Span, +) -> Option { + if !type_condition.span.contains(position_span) { + return None; + } + + Some(NodeKind::TypeCondition(type_condition.type_.value)) +} + +pub fn create_node_resolution_info( + document: ExecutableDocument, + position_span: Span, +) -> LSPRuntimeResult { + let definition = document + .definitions + .iter() + .find(|definition| definition.location().contains(position_span)) + .ok_or(LSPRuntimeError::ExpectedError)?; + + match definition { + ExecutableDefinition::Operation(operation) => { + if operation.location.contains(position_span) { + let mut node_resolution_info = + NodeResolutionInfo::new(NodeKind::OperationDefinition(operation.clone())); + let OperationDefinition { + selections, + variable_definitions, + .. + } = operation; + + if let Some(variable_definitions) = variable_definitions { + if let Some(variable) = variable_definitions + .items + .iter() + .find(|var| var.span.contains(position_span)) + { + node_resolution_info.kind = NodeKind::Variable(variable.type_.to_string()); + return Ok(node_resolution_info); + } + } + + let (_, kind) = operation.operation.ok_or_else(|| { + LSPRuntimeError::UnexpectedError( + "Expected operation to exist, but it did not".to_string(), + ) + })?; + node_resolution_info + .type_path + .add_type(TypePathItem::Operation(kind)); + + build_node_resolution_info_from_selections( + selections, + position_span, + &mut node_resolution_info, + ); + Ok(node_resolution_info) + } else { + Err(LSPRuntimeError::UnexpectedError(format!( + "Expected operation named {:?} to contain position {:?}, but it did not. Operation span {:?}", + operation.name, operation.location, position_span + ))) + } + } + ExecutableDefinition::Fragment(fragment) => { + if fragment.location.contains(position_span) { + let mut node_resolution_info = + NodeResolutionInfo::new(NodeKind::FragmentDefinition(fragment.clone())); + if let Some(node_resolution_info) = + build_node_resolution_for_directive(&fragment.directives, position_span) + { + return Ok(node_resolution_info); + } + + if let Some(node_kind) = + type_condition_at_position(&fragment.type_condition, position_span) + { + node_resolution_info.kind = node_kind; + return Ok(node_resolution_info); + } + + let type_name = fragment.type_condition.type_.value; + node_resolution_info + .type_path + .add_type(TypePathItem::FragmentDefinition { type_name }); + build_node_resolution_info_from_selections( + &fragment.selections, + position_span, + &mut node_resolution_info, + ); + Ok(node_resolution_info) + } else { + Err(LSPRuntimeError::UnexpectedError(format!( + "Expected fragment named {:?} to contain position {:?}, but it did not. Operation span {:?}", + fragment.name, fragment.location, position_span + ))) + } + } + } +} + +/// If position_span falls into one of the field arguments, +/// we need to display resolution info for this field +fn build_node_resolution_info_for_argument( + field_name: StringKey, + arguments: &Option>, + position_span: Span, + node_resolution_info: &mut NodeResolutionInfo, +) -> Option<()> { + if let Some(arguments) = &arguments { + let argument = arguments + .items + .iter() + .find(|item| item.span.contains(position_span))?; + + node_resolution_info.kind = NodeKind::FieldArgument(field_name, argument.name.value); + + Some(()) + } else { + None + } +} + +fn build_node_resolution_info_from_selections( + selections: &List, + position_span: Span, + node_resolution_info: &mut NodeResolutionInfo, +) { + if let Some(item) = selections + .items + .iter() + .find(|item| item.span().contains(position_span)) + { + if let Some(directive_resolution_info) = + build_node_resolution_for_directive(item.directives(), position_span) + { + node_resolution_info.kind = directive_resolution_info.kind; + return; + } + + match item { + Selection::LinkedField(node) => { + node_resolution_info.kind = NodeKind::FieldName; + let LinkedField { + name, selections, .. + } = node; + if build_node_resolution_info_for_argument( + name.value, + &node.arguments, + position_span, + node_resolution_info, + ) + .is_none() + { + node_resolution_info + .type_path + .add_type(TypePathItem::LinkedField { name: name.value }); + build_node_resolution_info_from_selections( + selections, + position_span, + node_resolution_info, + ); + } + } + Selection::FragmentSpread(spread) => { + let FragmentSpread { name, .. } = spread; + if name.span.contains(position_span) { + node_resolution_info.kind = NodeKind::FragmentSpread(name.value); + } + } + Selection::InlineFragment(node) => { + let InlineFragment { + selections, + type_condition, + .. + } = node; + + node_resolution_info.kind = NodeKind::InlineFragment; + if let Some(type_condition) = type_condition { + let type_name = type_condition.type_.value; + node_resolution_info + .type_path + .add_type(TypePathItem::InlineFragment { type_name }); + + if let Some(node_kind) = + type_condition_at_position(type_condition, position_span) + { + node_resolution_info.kind = node_kind; + } else { + build_node_resolution_info_from_selections( + selections, + position_span, + node_resolution_info, + ) + } + } + } + Selection::ScalarField(node) => { + let ScalarField { name, .. } = node; + + if build_node_resolution_info_for_argument( + name.value, + &node.arguments, + position_span, + node_resolution_info, + ) + .is_none() + { + node_resolution_info.kind = NodeKind::FieldName; + node_resolution_info + .type_path + .add_type(TypePathItem::ScalarField { name: name.value }); + } + } + } + } +} + +#[cfg(test)] +mod test { + use common::SourceLocationKey; + use common::Span; + use graphql_syntax::parse_executable; + use intern::string_key::Intern; + + use super::create_node_resolution_info; + use super::NodeKind; + use super::NodeResolutionInfo; + + fn parse_and_get_node_info(source: &str, pos: u32) -> NodeResolutionInfo { + let document = + parse_executable(source, SourceLocationKey::standalone("/test/file")).unwrap(); + + // Select the `uri` field + let position_span = Span { + start: pos, + end: pos, + }; + + create_node_resolution_info(document, position_span).unwrap() + } + + #[test] + fn create_node_resolution_info_test() { + let node_resolution_info = parse_and_get_node_info( + r#" + fragment User_data on User { + name + profile_picture { + uri + } + } + "#, + // Select the `uri` field + 117, + ); + + assert_eq!(node_resolution_info.kind, NodeKind::FieldName); + } + + #[test] + fn create_node_resolution_info_test_position_outside() { + let document = parse_executable( + r#" + fragment User_data on User { + name + } + "#, + SourceLocationKey::standalone("/test/file"), + ) + .unwrap(); + // Position is outside of the document + let position_span = Span { start: 86, end: 87 }; + let result = create_node_resolution_info(document, position_span); + assert!(result.is_err()); + } + + #[test] + fn create_node_resolution_info_fragment_def_name() { + let node_resolution_info = parse_and_get_node_info( + r#" + fragment User_data on User { + name + } + "#, + // Select the `User_data` fragment name + 26, + ); + + match node_resolution_info.kind { + NodeKind::FragmentDefinition(fragment) => { + assert_eq!(fragment.name.value, "User_data".intern()) + } + node_kind => panic!("Unexpected node node_resolution_info.kind {:?}", node_kind), + } + } + + #[test] + fn create_node_resolution_info_fragment_def_type_condition() { + let node_resolution_info = parse_and_get_node_info( + r#" + fragment User_data on User { + name + } + "#, + // Select the `User` type in fragment declaration + 35, + ); + + assert_eq!( + node_resolution_info.kind, + NodeKind::TypeCondition("User".intern()) + ); + } + + #[test] + fn create_node_resolution_info_inline_fragment_type_condition() { + let node_resolution_info = parse_and_get_node_info( + r#" + fragment User_data on User { + name + ... on User { + id + } + } + "#, + // Select the `User` type in fragment declaration + 84, + ); + + assert_eq!( + node_resolution_info.kind, + NodeKind::TypeCondition("User".intern()) + ); + } +} diff --git a/compiler/crates/relay-lsp/src/node_resolution_info/mod.rs b/compiler/crates/relay-lsp/src/node_resolution_info/mod.rs deleted file mode 100644 index 9d9dc9adc1700..0000000000000 --- a/compiler/crates/relay-lsp/src/node_resolution_info/mod.rs +++ /dev/null @@ -1,421 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::Span; -use graphql_syntax::Argument; -use graphql_syntax::Directive; -use graphql_syntax::ExecutableDefinition; -use graphql_syntax::ExecutableDocument; -use graphql_syntax::FragmentDefinition; -use graphql_syntax::FragmentSpread; -use graphql_syntax::InlineFragment; -use graphql_syntax::LinkedField; -use graphql_syntax::List; -use graphql_syntax::OperationDefinition; -use graphql_syntax::ScalarField; -use graphql_syntax::Selection; -use graphql_syntax::TypeCondition; -use intern::string_key::StringKey; - -use crate::lsp_runtime_error::LSPRuntimeError; -use crate::lsp_runtime_error::LSPRuntimeResult; - -mod type_path; -pub use type_path::TypePath; -pub use type_path::TypePathItem; - -#[derive(Debug, Clone, PartialEq)] -pub enum NodeKind { - OperationDefinition(OperationDefinition), - FragmentDefinition(FragmentDefinition), - FieldName, - FieldArgument(StringKey, StringKey), - FragmentSpread(StringKey), - InlineFragment, - Variable(String), - Directive(StringKey, Option), - TypeCondition(StringKey), -} - -#[derive(Debug)] -pub struct NodeResolutionInfo { - /// The type of the leaf node on which the information request was made - pub kind: NodeKind, - /// A list of type metadata that we can use to resolve the leaf - /// type the request is being made against - pub type_path: TypePath, -} - -impl NodeResolutionInfo { - fn new(kind: NodeKind) -> Self { - Self { - kind, - type_path: Default::default(), - } - } -} - -fn build_node_resolution_for_directive( - directives: &[Directive], - position_span: Span, -) -> Option { - let directive = directives - .iter() - .find(|directive| directive.span.contains(position_span))?; - - let arg_name_opt = if let Some(args) = &directive.arguments { - args.items - .iter() - .find(|arg| arg.span.contains(position_span)) - .map(|arg| arg.name.value) - } else { - None - }; - - Some(NodeResolutionInfo { - kind: NodeKind::Directive(directive.name.value, arg_name_opt), - type_path: Default::default(), - }) -} - -fn type_condition_at_position( - type_condition: &TypeCondition, - position_span: Span, -) -> Option { - if !type_condition.span.contains(position_span) { - return None; - } - - Some(NodeKind::TypeCondition(type_condition.type_.value)) -} - -pub fn create_node_resolution_info( - document: ExecutableDocument, - position_span: Span, -) -> LSPRuntimeResult { - let definition = document - .definitions - .iter() - .find(|definition| definition.location().contains(position_span)) - .ok_or(LSPRuntimeError::ExpectedError)?; - - match definition { - ExecutableDefinition::Operation(operation) => { - if operation.location.contains(position_span) { - let mut node_resolution_info = - NodeResolutionInfo::new(NodeKind::OperationDefinition(operation.clone())); - let OperationDefinition { - selections, - variable_definitions, - .. - } = operation; - - if let Some(variable_definitions) = variable_definitions { - if let Some(variable) = variable_definitions - .items - .iter() - .find(|var| var.span.contains(position_span)) - { - node_resolution_info.kind = NodeKind::Variable(variable.type_.to_string()); - return Ok(node_resolution_info); - } - } - - let (_, kind) = operation.operation.clone().ok_or_else(|| { - LSPRuntimeError::UnexpectedError( - "Expected operation to exist, but it did not".to_string(), - ) - })?; - node_resolution_info - .type_path - .add_type(TypePathItem::Operation(kind)); - - build_node_resolution_info_from_selections( - selections, - position_span, - &mut node_resolution_info, - ); - Ok(node_resolution_info) - } else { - Err(LSPRuntimeError::UnexpectedError(format!( - "Expected operation named {:?} to contain position {:?}, but it did not. Operation span {:?}", - operation.name, operation.location, position_span - ))) - } - } - ExecutableDefinition::Fragment(fragment) => { - if fragment.location.contains(position_span) { - let mut node_resolution_info = - NodeResolutionInfo::new(NodeKind::FragmentDefinition(fragment.clone())); - if let Some(node_resolution_info) = - build_node_resolution_for_directive(&fragment.directives, position_span) - { - return Ok(node_resolution_info); - } - - if let Some(node_kind) = - type_condition_at_position(&fragment.type_condition, position_span) - { - node_resolution_info.kind = node_kind; - return Ok(node_resolution_info); - } - - let type_name = fragment.type_condition.type_.value; - node_resolution_info - .type_path - .add_type(TypePathItem::FragmentDefinition { type_name }); - build_node_resolution_info_from_selections( - &fragment.selections, - position_span, - &mut node_resolution_info, - ); - Ok(node_resolution_info) - } else { - Err(LSPRuntimeError::UnexpectedError(format!( - "Expected fragment named {:?} to contain position {:?}, but it did not. Operation span {:?}", - fragment.name, fragment.location, position_span - ))) - } - } - } -} - -/// If position_span falls into one of the field arguments, -/// we need to display resolution info for this field -fn build_node_resolution_info_for_argument( - field_name: StringKey, - arguments: &Option>, - position_span: Span, - node_resolution_info: &mut NodeResolutionInfo, -) -> Option<()> { - if let Some(arguments) = &arguments { - let argument = arguments - .items - .iter() - .find(|item| item.span.contains(position_span))?; - - node_resolution_info.kind = NodeKind::FieldArgument(field_name, argument.name.value); - - Some(()) - } else { - None - } -} - -fn build_node_resolution_info_from_selections( - selections: &List, - position_span: Span, - node_resolution_info: &mut NodeResolutionInfo, -) { - if let Some(item) = selections - .items - .iter() - .find(|item| item.span().contains(position_span)) - { - if let Some(directive_resolution_info) = - build_node_resolution_for_directive(item.directives(), position_span) - { - node_resolution_info.kind = directive_resolution_info.kind; - return; - } - - match item { - Selection::LinkedField(node) => { - node_resolution_info.kind = NodeKind::FieldName; - let LinkedField { - name, selections, .. - } = node; - if build_node_resolution_info_for_argument( - name.value, - &node.arguments, - position_span, - node_resolution_info, - ) - .is_none() - { - node_resolution_info - .type_path - .add_type(TypePathItem::LinkedField { name: name.value }); - build_node_resolution_info_from_selections( - selections, - position_span, - node_resolution_info, - ); - } - } - Selection::FragmentSpread(spread) => { - let FragmentSpread { name, .. } = spread; - if name.span.contains(position_span) { - node_resolution_info.kind = NodeKind::FragmentSpread(name.value); - } - } - Selection::InlineFragment(node) => { - let InlineFragment { - selections, - type_condition, - .. - } = node; - - node_resolution_info.kind = NodeKind::InlineFragment; - if let Some(type_condition) = type_condition { - let type_name = type_condition.type_.value; - node_resolution_info - .type_path - .add_type(TypePathItem::InlineFragment { type_name }); - - if let Some(node_kind) = - type_condition_at_position(type_condition, position_span) - { - node_resolution_info.kind = node_kind; - } else { - build_node_resolution_info_from_selections( - selections, - position_span, - node_resolution_info, - ) - } - } - } - Selection::ScalarField(node) => { - let ScalarField { name, .. } = node; - - if build_node_resolution_info_for_argument( - name.value, - &node.arguments, - position_span, - node_resolution_info, - ) - .is_none() - { - node_resolution_info.kind = NodeKind::FieldName; - node_resolution_info - .type_path - .add_type(TypePathItem::ScalarField { name: name.value }); - } - } - } - } -} - -#[cfg(test)] -mod test { - use common::SourceLocationKey; - use common::Span; - use graphql_syntax::parse_executable; - use intern::string_key::Intern; - - use super::create_node_resolution_info; - use super::NodeKind; - use super::NodeResolutionInfo; - - fn parse_and_get_node_info(source: &str, pos: u32) -> NodeResolutionInfo { - let document = - parse_executable(source, SourceLocationKey::standalone("/test/file")).unwrap(); - - // Select the `uri` field - let position_span = Span { - start: pos, - end: pos, - }; - - create_node_resolution_info(document, position_span).unwrap() - } - - #[test] - fn create_node_resolution_info_test() { - let node_resolution_info = parse_and_get_node_info( - r#" - fragment User_data on User { - name - profile_picture { - uri - } - } - "#, - // Select the `uri` field - 117, - ); - - assert_eq!(node_resolution_info.kind, NodeKind::FieldName); - } - - #[test] - fn create_node_resolution_info_test_position_outside() { - let document = parse_executable( - r#" - fragment User_data on User { - name - } - "#, - SourceLocationKey::standalone("/test/file"), - ) - .unwrap(); - // Position is outside of the document - let position_span = Span { start: 86, end: 87 }; - let result = create_node_resolution_info(document, position_span); - assert!(result.is_err()); - } - - #[test] - fn create_node_resolution_info_fragment_def_name() { - let node_resolution_info = parse_and_get_node_info( - r#" - fragment User_data on User { - name - } - "#, - // Select the `User_data` fragment name - 26, - ); - - match node_resolution_info.kind { - NodeKind::FragmentDefinition(fragment) => { - assert_eq!(fragment.name.value, "User_data".intern()) - } - node_kind => panic!("Unexpected node node_resolution_info.kind {:?}", node_kind), - } - } - - #[test] - fn create_node_resolution_info_fragment_def_type_condition() { - let node_resolution_info = parse_and_get_node_info( - r#" - fragment User_data on User { - name - } - "#, - // Select the `User` type in fragment declaration - 35, - ); - - assert_eq!( - node_resolution_info.kind, - NodeKind::TypeCondition("User".intern()) - ); - } - - #[test] - fn create_node_resolution_info_inline_fragment_type_condition() { - let node_resolution_info = parse_and_get_node_info( - r#" - fragment User_data on User { - name - ... on User { - id - } - } - "#, - // Select the `User` type in fragment declaration - 84, - ); - - assert_eq!( - node_resolution_info.kind, - NodeKind::TypeCondition("User".intern()) - ); - } -} diff --git a/compiler/crates/relay-lsp/src/references.rs b/compiler/crates/relay-lsp/src/references.rs new file mode 100644 index 0000000000000..0fa55c73a3caa --- /dev/null +++ b/compiler/crates/relay-lsp/src/references.rs @@ -0,0 +1,158 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//! Utilities for providing the goto definition feature + +use std::path::Path; + +use common::Location as IRLocation; +use graphql_ir::FragmentDefinitionName; +use graphql_ir::FragmentSpread; +use graphql_ir::Program; +use graphql_ir::Visitor; +use intern::string_key::StringKey; +use lsp_types::request::References; +use lsp_types::request::Request; +use lsp_types::Location as LSPLocation; +use relay_docblock::DocblockIr; +use relay_docblock::On; +use relay_docblock::ResolverFieldDocblockIr; +use schema::Schema; + +use crate::docblock_resolution_info::DocblockResolutionInfo; +use crate::find_field_usages::find_field_locations; +use crate::find_field_usages::get_usages; +use crate::location::transform_relay_location_on_disk_to_lsp_location; +use crate::lsp_runtime_error::LSPRuntimeError; +use crate::lsp_runtime_error::LSPRuntimeResult; +use crate::node_resolution_info::NodeKind; +use crate::server::GlobalState; +use crate::FeatureResolutionInfo; + +fn get_references_response( + feature_resolution_info: FeatureResolutionInfo, + program: &Program, + root_dir: &Path, +) -> LSPRuntimeResult> { + match feature_resolution_info { + FeatureResolutionInfo::GraphqlNode(node_resolution_info) => { + match node_resolution_info.kind { + NodeKind::FragmentDefinition(fragment) => { + let references = + ReferenceFinder::get_references_to_fragment(program, fragment.name.value) + .into_iter() + .map(|location| { + transform_relay_location_on_disk_to_lsp_location(root_dir, location) + }) + .collect::, LSPRuntimeError>>()?; + + Ok(references) + } + NodeKind::FieldName => { + let (type_, field) = node_resolution_info + .type_path + .resolve_current_field(&program.schema) + .ok_or_else(|| { + LSPRuntimeError::UnexpectedError(" field not found!".to_string()) + })?; + let type_name = program.schema.get_type_name(type_); + let field_name = field.name.item; + + let lsp_locations = + get_usages(program, &program.schema, type_name, field_name)? + .into_iter() + .map(|(_, ir_location)| { + transform_relay_location_on_disk_to_lsp_location( + root_dir, + ir_location, + ) + }) + .collect::, LSPRuntimeError>>()?; + Ok(lsp_locations) + } + _ => Err(LSPRuntimeError::ExpectedError), + } + } + FeatureResolutionInfo::DocblockNode(docblock_node) => { + if let DocblockResolutionInfo::FieldName(field_name) = docblock_node.resolution_info { + let type_name = match docblock_node.ir { + DocblockIr::Field(ResolverFieldDocblockIr::LegacyVerboseResolver( + relay_resolver, + )) => match relay_resolver.on { + On::Type(type_) => type_.value.item, + On::Interface(interface) => interface.value.item, + }, + DocblockIr::Field(ResolverFieldDocblockIr::TerseRelayResolver( + terse_resolver, + )) => terse_resolver.type_.item, + DocblockIr::Type(_) => { + // TODO: Implement support for types. + return Err(LSPRuntimeError::ExpectedError); + } + }; + + let references = find_field_locations(program, field_name, type_name) + .ok_or(LSPRuntimeError::ExpectedError)? + .into_iter() + .map(|location| { + transform_relay_location_on_disk_to_lsp_location(root_dir, location) + }) + .collect::, LSPRuntimeError>>()?; + + Ok(references) + } else { + // Go to reference not implemented for other parts of the docblocks yet. + Err(LSPRuntimeError::ExpectedError) + } + } + } +} + +#[derive(Debug, Clone)] +struct ReferenceFinder { + references: Vec, + name: StringKey, +} + +impl ReferenceFinder { + fn get_references_to_fragment(program: &Program, name: StringKey) -> Vec { + let mut reference_finder = ReferenceFinder { + references: vec![], + name, + }; + reference_finder.visit_program(program); + reference_finder.references + } +} + +impl Visitor for ReferenceFinder { + const NAME: &'static str = "ReferenceFinder"; + const VISIT_ARGUMENTS: bool = false; + const VISIT_DIRECTIVES: bool = false; + + fn visit_fragment_spread(&mut self, spread: &FragmentSpread) { + if spread.fragment.item == FragmentDefinitionName(self.name) { + self.references.push(spread.fragment.location); + } + } +} + +pub fn on_references( + state: &impl GlobalState, + params: ::Params, +) -> LSPRuntimeResult<::Result> { + let node_resolution_info = state.resolve_node(¶ms.text_document_position)?; + let references_response = get_references_response( + node_resolution_info, + &state + .get_program(&state.extract_project_name_from_url( + ¶ms.text_document_position.text_document.uri, + )?)?, + &state.root_dir(), + )?; + Ok(Some(references_response)) +} diff --git a/compiler/crates/relay-lsp/src/references/mod.rs b/compiler/crates/relay-lsp/src/references/mod.rs deleted file mode 100644 index 246adfb674999..0000000000000 --- a/compiler/crates/relay-lsp/src/references/mod.rs +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -//! Utilities for providing the goto definition feature - -use std::path::Path; - -use common::Location as IRLocation; -use graphql_ir::FragmentDefinitionName; -use graphql_ir::FragmentSpread; -use graphql_ir::Program; -use graphql_ir::Visitor; -use intern::string_key::StringKey; -use lsp_types::request::References; -use lsp_types::request::Request; -use lsp_types::Location as LSPLocation; -use relay_docblock::DocblockIr; -use relay_docblock::On; -use schema::Schema; - -use crate::docblock_resolution_info::DocblockResolutionInfo; -use crate::find_field_usages::find_field_locations; -use crate::find_field_usages::get_usages; -use crate::location::transform_relay_location_to_lsp_location; -use crate::lsp_runtime_error::LSPRuntimeError; -use crate::lsp_runtime_error::LSPRuntimeResult; -use crate::node_resolution_info::NodeKind; -use crate::server::GlobalState; -use crate::FeatureResolutionInfo; - -fn get_references_response( - feature_resolution_info: FeatureResolutionInfo, - program: &Program, - root_dir: &Path, -) -> LSPRuntimeResult> { - match feature_resolution_info { - FeatureResolutionInfo::GraphqlNode(node_resolution_info) => { - match node_resolution_info.kind { - NodeKind::FragmentDefinition(fragment) => { - let references = - ReferenceFinder::get_references_to_fragment(program, fragment.name.value) - .into_iter() - .map(|location| { - transform_relay_location_to_lsp_location(root_dir, location) - }) - .collect::, LSPRuntimeError>>()?; - - Ok(references) - } - NodeKind::FieldName => { - let (type_, field) = node_resolution_info - .type_path - .resolve_current_field(&program.schema) - .ok_or_else(|| { - LSPRuntimeError::UnexpectedError(" field not found!".to_string()) - })?; - let type_name = program.schema.get_type_name(type_); - let field_name = field.name.item; - - let lsp_locations = - get_usages(program, &program.schema, type_name, field_name)? - .into_iter() - .map(|(_, ir_location)| { - transform_relay_location_to_lsp_location(root_dir, ir_location) - }) - .collect::, LSPRuntimeError>>()?; - Ok(lsp_locations) - } - _ => Err(LSPRuntimeError::ExpectedError), - } - } - FeatureResolutionInfo::DocblockNode(docblock_node) => { - if let DocblockResolutionInfo::FieldName(field_name) = docblock_node.resolution_info { - let type_name = match docblock_node.ir { - DocblockIr::RelayResolver(relay_resolver) => match relay_resolver.on { - On::Type(type_) => type_.value.item, - On::Interface(interface) => interface.value.item, - }, - DocblockIr::TerseRelayResolver(_) => { - // TODO: Implement support for terse relay resolvers. - return Err(LSPRuntimeError::ExpectedError); - } - DocblockIr::StrongObjectResolver(_) => { - // TODO: Implement support for strong object. - return Err(LSPRuntimeError::ExpectedError); - } - DocblockIr::WeakObjectType(_) => { - // TODO: Implement support for weak object. - return Err(LSPRuntimeError::ExpectedError); - } - }; - - let references = find_field_locations(program, field_name, type_name) - .ok_or(LSPRuntimeError::ExpectedError)? - .into_iter() - .map(|location| transform_relay_location_to_lsp_location(root_dir, location)) - .collect::, LSPRuntimeError>>()?; - - Ok(references) - } else { - // Go to reference not implemented for other parts of the docblocks yet. - Err(LSPRuntimeError::ExpectedError) - } - } - } -} - -#[derive(Debug, Clone)] -struct ReferenceFinder { - references: Vec, - name: StringKey, -} - -impl ReferenceFinder { - fn get_references_to_fragment(program: &Program, name: StringKey) -> Vec { - let mut reference_finder = ReferenceFinder { - references: vec![], - name, - }; - reference_finder.visit_program(program); - reference_finder.references - } -} - -impl Visitor for ReferenceFinder { - const NAME: &'static str = "ReferenceFinder"; - const VISIT_ARGUMENTS: bool = false; - const VISIT_DIRECTIVES: bool = false; - - fn visit_fragment_spread(&mut self, spread: &FragmentSpread) { - if spread.fragment.item == FragmentDefinitionName(self.name) { - self.references.push(spread.fragment.location); - } - } -} - -pub fn on_references( - state: &impl GlobalState, - params: ::Params, -) -> LSPRuntimeResult<::Result> { - let node_resolution_info = state.resolve_node(¶ms.text_document_position)?; - let references_response = get_references_response( - node_resolution_info, - &state - .get_program(&state.extract_project_name_from_url( - ¶ms.text_document_position.text_document.uri, - )?)?, - &state.root_dir(), - )?; - Ok(Some(references_response)) -} diff --git a/compiler/crates/relay-lsp/src/resolved_types_at_location/mod.rs b/compiler/crates/relay-lsp/src/resolved_types_at_location.rs similarity index 100% rename from compiler/crates/relay-lsp/src/resolved_types_at_location/mod.rs rename to compiler/crates/relay-lsp/src/resolved_types_at_location.rs diff --git a/compiler/crates/relay-lsp/src/search_schema_items.rs b/compiler/crates/relay-lsp/src/search_schema_items.rs new file mode 100644 index 0000000000000..13e7c5e110749 --- /dev/null +++ b/compiler/crates/relay-lsp/src/search_schema_items.rs @@ -0,0 +1,194 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use graphql_ir::reexport::StringKey; +use intern::string_key::Intern; +use intern::Lookup; +use lsp_types::request::Request; +use schema::Schema; +use schema_documentation::SchemaDocumentation; +use serde::Deserialize; +use serde::Serialize; + +use crate::lsp_runtime_error::LSPRuntimeResult; +use crate::server::GlobalState; + +pub(crate) enum SearchSchemaItems {} + +#[derive(Deserialize, Serialize)] +pub(crate) struct SchemaSearchItem { + name: String, + description: Option, +} + +#[derive(Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct SearchSchemaItemsResponse { + pub items: Vec, + pub has_more: bool, +} + +#[derive(Deserialize, Serialize)] +pub(crate) struct SearchSchemaItemsParams { + pub filter: Option, + pub schema_name: String, + pub take: Option, + pub skip: Option, +} + +impl Request for SearchSchemaItems { + type Params = SearchSchemaItemsParams; + type Result = SearchSchemaItemsResponse; + const METHOD: &'static str = "relay/searchSchemaItems"; +} + +pub(crate) fn on_search_schema_items( + state: &impl GlobalState, + params: SearchSchemaItemsParams, +) -> LSPRuntimeResult<::Result> { + let filter = params.filter.map(|f| f.to_lowercase()); + let schema_name: &str = ¶ms.schema_name; + let schema = state.get_schema(&schema_name.intern())?; + + let schema_documentation = state.get_schema_documentation(schema_name); + + let objects = filter_and_transform_items( + schema.objects().map(|o| o.name.item.0), + &schema_documentation, + &filter, + ); + let interfaces = filter_and_transform_items( + schema.interfaces().map(|i| i.name.item.0), + &schema_documentation, + &filter, + ); + let enums = filter_and_transform_items( + schema.enums().map(|e| e.name.item.0), + &schema_documentation, + &filter, + ); + let unions = filter_and_transform_items( + schema.unions().map(|u| u.name.item.0), + &schema_documentation, + &filter, + ); + let input_objects = filter_and_transform_items( + schema.input_objects().map(|io| io.name.item.0), + &schema_documentation, + &filter, + ); + let scalars = filter_and_transform_items( + schema.scalars().map(|s| s.name.item.0), + &schema_documentation, + &filter, + ); + + let mut items = objects + .chain(interfaces) + .chain(enums) + .chain(unions) + .chain(input_objects) + .chain(scalars) + .collect::>(); + + items.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase())); + + let skip = *params.skip.as_ref().unwrap_or(&0); + let take = *params.take.as_ref().unwrap_or(&500); + let has_more = items.len() > skip + take; + + let items = items.into_iter().skip(skip).take(take).collect::>(); + + Ok(SearchSchemaItemsResponse { items, has_more }) +} + +fn filter_and_transform_items<'a>( + items: impl Iterator + 'a, + schema_documentation: &'a impl SchemaDocumentation, + filter: &'a Option, +) -> impl Iterator + 'a { + items.filter_map(move |obj| { + let name = obj.lookup(); + let description = schema_documentation + .get_type_description(name) + .map(|s| s.to_string()); + + if should_include_named_item(name, &description, filter) { + Some(SchemaSearchItem { + name: name.to_string(), + description, + }) + } else { + None + } + }) +} + +fn should_include_named_item( + name: &str, + description: &Option, + filter: &Option, +) -> bool { + if let Some(filter) = filter.as_ref() { + if name.to_lowercase().contains(filter) { + true + } else if let Some(description) = description { + description.to_lowercase().contains(filter) + } else { + false + } + } else { + true + } +} + +#[cfg(test)] +mod tests { + use crate::search_schema_items::should_include_named_item; + + #[test] + fn test_no_filter() { + assert!(should_include_named_item( + "Yohan Blake", + &Some("London 2012".to_string()), + &None + )); + assert!(should_include_named_item("Usain Bolt", &None, &None)); + } + + #[test] + fn test_filter_matches_name() { + assert!(should_include_named_item( + "Michael Frater", + &None, + &Some("michael".to_string()) + )); + assert!(should_include_named_item( + "Nesta Carter", + &Some("London 2012".to_string()), + &Some("nesta".to_string()) + )); + } + + #[test] + fn test_filter_matches_description() { + assert!(should_include_named_item( + "Tiana Bartoletta", + &Some("London 2012".to_string()), + &Some("london".to_string()) + )); + } + + #[test] + fn test_non_matching_filter() { + assert!(!should_include_named_item( + "Allyson Felix", + &Some("London 2012".to_string()), + &Some("salt lake city".to_string()) + )); + } +} diff --git a/compiler/crates/relay-lsp/src/search_schema_items/mod.rs b/compiler/crates/relay-lsp/src/search_schema_items/mod.rs deleted file mode 100644 index 952d8be2f93f7..0000000000000 --- a/compiler/crates/relay-lsp/src/search_schema_items/mod.rs +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use graphql_ir::reexport::StringKey; -use intern::string_key::Intern; -use intern::Lookup; -use lsp_types::request::Request; -use schema::Schema; -use schema_documentation::SchemaDocumentation; -use serde::Deserialize; -use serde::Serialize; - -use crate::lsp_runtime_error::LSPRuntimeResult; -use crate::server::GlobalState; - -pub(crate) enum SearchSchemaItems {} - -#[derive(Deserialize, Serialize)] -pub(crate) struct SchemaSearchItem { - name: String, - description: Option, -} - -#[derive(Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct SearchSchemaItemsResponse { - pub items: Vec, - pub has_more: bool, -} - -#[derive(Deserialize, Serialize)] -pub(crate) struct SearchSchemaItemsParams { - pub filter: Option, - pub schema_name: String, - pub take: Option, - pub skip: Option, -} - -impl Request for SearchSchemaItems { - type Params = SearchSchemaItemsParams; - type Result = SearchSchemaItemsResponse; - const METHOD: &'static str = "relay/searchSchemaItems"; -} - -pub(crate) fn on_search_schema_items( - state: &impl GlobalState, - params: SearchSchemaItemsParams, -) -> LSPRuntimeResult<::Result> { - let filter = params.filter.map(|f| f.to_lowercase()); - let schema_name: &str = ¶ms.schema_name; - let schema = state.get_schema(&schema_name.intern())?; - - let schema_documentation = state.get_schema_documentation(schema_name); - - let objects = filter_and_transform_items( - schema.objects().map(|o| o.name.item.0), - &schema_documentation, - &filter, - ); - let interfaces = filter_and_transform_items( - schema.interfaces().map(|i| i.name.item.0), - &schema_documentation, - &filter, - ); - let enums = filter_and_transform_items( - schema.enums().map(|e| e.name.item.0), - &schema_documentation, - &filter, - ); - let unions = filter_and_transform_items( - schema.unions().map(|u| u.name.item), - &schema_documentation, - &filter, - ); - let input_objects = filter_and_transform_items( - schema.input_objects().map(|io| io.name.item.0), - &schema_documentation, - &filter, - ); - let scalars = filter_and_transform_items( - schema.scalars().map(|s| s.name.item.0), - &schema_documentation, - &filter, - ); - - let mut items = objects - .chain(interfaces) - .chain(enums) - .chain(unions) - .chain(input_objects) - .chain(scalars) - .collect::>(); - - items.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase())); - - let skip = *params.skip.as_ref().unwrap_or(&0); - let take = *params.take.as_ref().unwrap_or(&500); - let has_more = items.len() > skip + take; - - let items = items.into_iter().skip(skip).take(take).collect::>(); - - Ok(SearchSchemaItemsResponse { items, has_more }) -} - -fn filter_and_transform_items<'a>( - items: impl Iterator + 'a, - schema_documentation: &'a impl SchemaDocumentation, - filter: &'a Option, -) -> impl Iterator + 'a { - items.filter_map(move |obj| { - let name = obj.lookup(); - let description = schema_documentation - .get_type_description(name) - .map(|s| s.to_string()); - - if should_include_named_item(name, &description, filter) { - Some(SchemaSearchItem { - name: name.to_string(), - description, - }) - } else { - None - } - }) -} - -fn should_include_named_item( - name: &str, - description: &Option, - filter: &Option, -) -> bool { - if let Some(filter) = filter.as_ref() { - if name.to_lowercase().contains(filter) { - true - } else if let Some(description) = description { - description.to_lowercase().contains(filter) - } else { - false - } - } else { - true - } -} - -#[cfg(test)] -mod tests { - use crate::search_schema_items::should_include_named_item; - - #[test] - fn test_no_filter() { - assert_eq!( - should_include_named_item("Yohan Blake", &Some("London 2012".to_string()), &None), - true - ); - assert_eq!(should_include_named_item("Usain Bolt", &None, &None), true); - } - - #[test] - fn test_filter_matches_name() { - assert_eq!( - should_include_named_item("Michael Frater", &None, &Some("michael".to_string())), - true - ); - assert_eq!( - should_include_named_item( - "Nesta Carter", - &Some("London 2012".to_string()), - &Some("nesta".to_string()) - ), - true - ); - } - - #[test] - fn test_filter_matches_description() { - assert_eq!( - should_include_named_item( - "Tiana Bartoletta", - &Some("London 2012".to_string()), - &Some("london".to_string()) - ), - true - ); - } - - #[test] - fn test_non_matching_filter() { - assert_eq!( - should_include_named_item( - "Allyson Felix", - &Some("London 2012".to_string()), - &Some("salt lake city".to_string()) - ), - false - ); - } -} diff --git a/compiler/crates/relay-lsp/src/server.rs b/compiler/crates/relay-lsp/src/server.rs new file mode 100644 index 0000000000000..2c9590226b73a --- /dev/null +++ b/compiler/crates/relay-lsp/src/server.rs @@ -0,0 +1,357 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +mod heartbeat; +mod lsp_notification_dispatch; +mod lsp_request_dispatch; +mod lsp_state; +mod lsp_state_resources; +mod task_queue; + +use std::ops::ControlFlow; +use std::sync::Arc; + +use common::PerfLogEvent; +use common::PerfLogger; +use crossbeam::channel::Receiver; +use crossbeam::select; +use heartbeat::on_heartbeat; +use heartbeat::HeartbeatRequest; +use log::debug; +pub use lsp_notification_dispatch::LSPNotificationDispatch; +pub use lsp_request_dispatch::LSPRequestDispatch; +use lsp_server::Connection; +use lsp_server::ErrorCode; +use lsp_server::Message; +use lsp_server::Notification; +use lsp_server::Response as ServerResponse; +use lsp_server::ResponseError; +pub use lsp_state::build_ir_for_lsp; +pub use lsp_state::GlobalState; +pub use lsp_state::LSPState; +pub use lsp_state::Schemas; +use lsp_types::notification::Cancel; +use lsp_types::notification::DidChangeTextDocument; +use lsp_types::notification::DidCloseTextDocument; +use lsp_types::notification::DidOpenTextDocument; +use lsp_types::notification::DidSaveTextDocument; +use lsp_types::notification::Exit; +use lsp_types::request::CodeActionRequest; +use lsp_types::request::Completion; +use lsp_types::request::GotoDefinition; +use lsp_types::request::HoverRequest; +use lsp_types::request::InlayHintRequest; +use lsp_types::request::References; +use lsp_types::request::ResolveCompletionItem; +use lsp_types::request::Shutdown; +use lsp_types::CodeActionProviderCapability; +use lsp_types::CompletionOptions; +use lsp_types::InitializeParams; +use lsp_types::ServerCapabilities; +use lsp_types::TextDocumentSyncCapability; +use lsp_types::TextDocumentSyncKind; +use lsp_types::WorkDoneProgressOptions; +use relay_compiler::config::Config; +use relay_compiler::NoopArtifactWriter; +use schema_documentation::SchemaDocumentation; +use schema_documentation::SchemaDocumentationLoader; + +use self::task_queue::TaskProcessor; +use crate::code_action::on_code_action; +use crate::completion::on_completion; +use crate::completion::on_resolve_completion_item; +use crate::explore_schema_for_type::on_explore_schema_for_type; +use crate::explore_schema_for_type::ExploreSchemaForType; +use crate::find_field_usages::on_find_field_usages; +use crate::find_field_usages::FindFieldUsages; +use crate::goto_definition::on_get_source_location_of_type_definition; +use crate::goto_definition::on_goto_definition; +use crate::goto_definition::GetSourceLocationOfTypeDefinition; +use crate::graphql_tools::on_graphql_execute_query; +use crate::graphql_tools::GraphQLExecuteQuery; +use crate::hover::on_hover; +use crate::inlay_hints::on_inlay_hint_request; +use crate::lsp_process_error::LSPProcessResult; +use crate::lsp_runtime_error::LSPRuntimeError; +use crate::references::on_references; +use crate::resolved_types_at_location::on_get_resolved_types_at_location; +use crate::resolved_types_at_location::ResolvedTypesAtLocation; +use crate::search_schema_items::on_search_schema_items; +use crate::search_schema_items::SearchSchemaItems; +use crate::server::lsp_state::handle_lsp_state_tasks; +use crate::server::lsp_state_resources::LSPStateResources; +use crate::server::task_queue::TaskQueue; +use crate::shutdown::on_exit; +use crate::shutdown::on_shutdown; +use crate::status_reporter::LSPStatusReporter; +use crate::text_documents::on_cancel; +use crate::text_documents::on_did_change_text_document; +use crate::text_documents::on_did_close_text_document; +use crate::text_documents::on_did_open_text_document; +use crate::text_documents::on_did_save_text_document; +pub use crate::LSPExtraDataProvider; + +/// Initializes an LSP connection, handling the `initialize` message and `initialized` notification +/// handshake. +pub fn initialize(connection: &Connection) -> LSPProcessResult { + let server_capabilities = ServerCapabilities { + // Enable text document syncing so we can know when files are opened/changed/saved/closed + text_document_sync: Some(TextDocumentSyncCapability::Kind(TextDocumentSyncKind::FULL)), + + completion_provider: Some(CompletionOptions { + resolve_provider: Some(true), + trigger_characters: Some(vec!["(".into(), "\n".into(), ",".into(), "@".into()]), + work_done_progress_options: WorkDoneProgressOptions { + work_done_progress: None, + }, + ..Default::default() + }), + + hover_provider: Some(lsp_types::HoverProviderCapability::Simple(true)), + definition_provider: Some(lsp_types::OneOf::Left(true)), + references_provider: Some(lsp_types::OneOf::Left(true)), + code_action_provider: Some(CodeActionProviderCapability::Simple(true)), + inlay_hint_provider: Some(lsp_types::OneOf::Left(true)), + ..Default::default() + }; + + let server_capabilities = serde_json::to_value(server_capabilities)?; + let params = connection.initialize(server_capabilities)?; + let params: InitializeParams = serde_json::from_value(params)?; + Ok(params) +} + +#[derive(Debug)] +pub enum Task { + InboundMessage(lsp_server::Message), + LSPState(lsp_state::Task), +} + +/// Run the main server loop +pub async fn run< + TPerfLogger: PerfLogger + 'static, + TSchemaDocumentation: SchemaDocumentation + 'static, +>( + connection: Connection, + mut config: Config, + _params: InitializeParams, + perf_logger: Arc, + extra_data_provider: Box, + schema_documentation_loader: Option>>, +) -> LSPProcessResult<()> { + debug!( + "Running language server with config root {:?}", + config.root_dir + ); + + let task_processor = LSPTaskProcessor; + let task_queue = TaskQueue::new(Arc::new(task_processor)); + let task_scheduler = task_queue.get_scheduler(); + + config.artifact_writer = Box::new(NoopArtifactWriter); + config.status_reporter = Box::new(LSPStatusReporter::new( + config.root_dir.clone(), + connection.sender.clone(), + )); + + let lsp_state = Arc::new(LSPState::new( + Arc::new(config), + connection.sender.clone(), + Arc::clone(&task_scheduler), + Arc::clone(&perf_logger), + extra_data_provider, + schema_documentation_loader, + )); + + LSPStateResources::new(Arc::clone(&lsp_state)).watch(); + + while let Some(task) = next_task(&connection.receiver, &task_queue.receiver) { + task_queue.process(Arc::clone(&lsp_state), task); + } + + panic!("Client exited without proper shutdown sequence.") +} + +fn next_task( + lsp_receiver: &Receiver, + task_queue_receiver: &Receiver, +) -> Option { + select! { + recv(lsp_receiver) -> message => message.ok().map(Task::InboundMessage), + recv(task_queue_receiver) -> task => task.ok() + } +} + +struct LSPTaskProcessor; + +impl + TaskProcessor, Task> for LSPTaskProcessor +{ + fn process(&self, state: Arc>, task: Task) { + match task { + Task::InboundMessage(Message::Request(request)) => handle_request(state, request), + Task::InboundMessage(Message::Notification(notification)) => { + handle_notification(state, notification); + } + Task::LSPState(lsp_task) => { + handle_lsp_state_tasks(state, lsp_task); + } + Task::InboundMessage(Message::Response(_)) => { + // TODO: handle response from the client -> cancel message, etc + } + } + } +} + +fn handle_request( + lsp_state: Arc>, + request: lsp_server::Request, +) { + debug!("request received {:?}", request); + let lsp_request_event = lsp_state.perf_logger.create_event("lsp_message"); + let get_server_response_bound = |req| dispatch_request(req, lsp_state.as_ref()); + let get_response = with_request_logging(&lsp_request_event, get_server_response_bound); + + lsp_state + .send_message(Message::Response(get_response(request))) + .expect("Unable to send message to a client."); + lsp_request_event.complete(); +} + +fn dispatch_request(request: lsp_server::Request, lsp_state: &impl GlobalState) -> ServerResponse { + // Returns ControlFlow::Break(ServerResponse) if the request + // was handled, ControlFlow::Continue(Request) otherwise. + let get_response = || { + let request = LSPRequestDispatch::new(request, lsp_state) + .on_request_sync::(on_get_resolved_types_at_location)? + .on_request_sync::(on_search_schema_items)? + .on_request_sync::(on_explore_schema_for_type)? + .on_request_sync::( + on_get_source_location_of_type_definition, + )? + .on_request_sync::(on_hover)? + .on_request_sync::(on_goto_definition)? + .on_request_sync::(on_references)? + .on_request_sync::(on_completion)? + .on_request_sync::(on_resolve_completion_item)? + .on_request_sync::(on_code_action)? + .on_request_sync::(on_shutdown)? + .on_request_sync::(on_graphql_execute_query)? + .on_request_sync::(on_heartbeat)? + .on_request_sync::(on_find_field_usages)? + .on_request_sync::(on_inlay_hint_request)? + .request(); + + // If we have gotten here, we have not handled the request + ControlFlow::Continue(request) + }; + + match get_response() { + ControlFlow::Break(response) => response, + ControlFlow::Continue(request) => ServerResponse { + id: request.id, + result: None, + error: Some(ResponseError { + code: ErrorCode::MethodNotFound as i32, + data: None, + message: format!("No handler registered for method '{}'", request.method), + }), + }, + } +} + +fn with_request_logging<'a>( + lsp_request_event: &'a impl PerfLogEvent, + get_response: impl FnOnce(lsp_server::Request) -> ServerResponse + 'a, +) -> impl FnOnce(lsp_server::Request) -> ServerResponse + 'a { + move |request| { + lsp_request_event.string("lsp_method", request.method.clone()); + lsp_request_event.string("lsp_type", "request".to_string()); + let lsp_request_processing_time = lsp_request_event.start("lsp_message_processing_time"); + + let response = get_response(request); + + if response.result.is_some() { + lsp_request_event.string("lsp_outcome", "success".to_string()); + } else if let Some(error) = &response.error { + if error.code == ErrorCode::RequestCanceled as i32 { + lsp_request_event.string("lsp_outcome", "canceled".to_string()); + } else { + lsp_request_event.string("lsp_outcome", "error".to_string()); + lsp_request_event.string("lsp_error_message", error.message.to_string()); + if let Some(data) = &error.data { + lsp_request_event.string("lsp_error_data", data.to_string()); + } + } + } + // N.B. we don't handle the case where the ServerResponse has neither a result nor + // an error, which is an invalid state. + + lsp_request_event.stop(lsp_request_processing_time); + response + } +} + +fn handle_notification< + TPerfLogger: PerfLogger + 'static, + TSchemaDocumentation: SchemaDocumentation, +>( + lsp_state: Arc>, + notification: Notification, +) { + debug!("notification received {:?}", notification); + let lsp_notification_event = lsp_state.perf_logger.create_event("lsp_message"); + lsp_notification_event.string("lsp_method", notification.method.clone()); + lsp_notification_event.string("lsp_type", "notification".to_string()); + let lsp_notification_processing_time = + lsp_notification_event.start("lsp_message_processing_time"); + + let notification_result = dispatch_notification(notification, lsp_state.as_ref()); + + match notification_result { + ControlFlow::Continue(()) => { + // The notification is not handled + lsp_notification_event.string("lsp_outcome", "error".to_string()); + } + ControlFlow::Break(err) => { + if let Some(err) = err { + lsp_notification_event.string("lsp_outcome", "error".to_string()); + if let LSPRuntimeError::UnexpectedError(message) = err { + lsp_notification_event.string("lsp_error_message", message); + } + } else { + lsp_notification_event.string("lsp_outcome", "success".to_string()); + } + } + } + + lsp_notification_event.stop(lsp_notification_processing_time); + lsp_notification_event.complete(); +} + +fn dispatch_notification( + notification: lsp_server::Notification, + lsp_state: &impl GlobalState, +) -> ControlFlow, ()> { + // Returns ControlFlow::Break(Option) if the notification + // was handled, ControlFlow::Continue(()) otherwise. + let notification = LSPNotificationDispatch::new(notification, lsp_state) + .on_notification_sync::(on_did_open_text_document)? + .on_notification_sync::(on_did_close_text_document)? + .on_notification_sync::(on_did_change_text_document)? + .on_notification_sync::(on_did_save_text_document)? + .on_notification_sync::(on_cancel)? + .on_notification_sync::(on_exit)? + .notification(); + + // If we have gotten here, we have not handled the notification + debug!( + "Error: no handler registered for notification '{}'", + notification.method + ); + ControlFlow::Continue(()) +} diff --git a/compiler/crates/relay-lsp/src/server/lsp_state.rs b/compiler/crates/relay-lsp/src/server/lsp_state.rs index bacc67fa43728..7986dafc03ad0 100644 --- a/compiler/crates/relay-lsp/src/server/lsp_state.rs +++ b/compiler/crates/relay-lsp/src/server/lsp_state.rs @@ -8,6 +8,8 @@ use std::path::PathBuf; use std::sync::Arc; +use common::DiagnosticsResult; +use common::Location; use common::PerfLogger; use common::SourceLocationKey; use common::Span; @@ -23,9 +25,10 @@ use graphql_ir::BuilderOptions; use graphql_ir::FragmentVariablesSemantic; use graphql_ir::Program; use graphql_ir::RelayMode; -use graphql_syntax::parse_executable_with_error_recovery; +use graphql_syntax::parse_executable_with_error_recovery_and_parser_features; use graphql_syntax::ExecutableDefinition; use graphql_syntax::ExecutableDocument; +use graphql_syntax::GraphQLSource; use intern::string_key::Intern; use intern::string_key::StringKey; use log::debug; @@ -35,7 +38,10 @@ use lsp_types::Range; use lsp_types::TextDocumentPositionParams; use lsp_types::Url; use relay_compiler::config::Config; +use relay_compiler::get_parser_features; use relay_compiler::FileCategorizer; +use relay_compiler::FileGroup; +use relay_compiler::ProjectName; use relay_docblock::parse_docblock_ast; use relay_docblock::ParseOptions; use relay_transforms::deprecated_fields_for_executable_definition; @@ -49,12 +55,13 @@ use super::task_queue::TaskScheduler; use crate::diagnostic_reporter::DiagnosticReporter; use crate::docblock_resolution_info::create_docblock_resolution_info; use crate::graphql_tools::get_query_text; -use crate::js_language_server::JSLanguageServer; +use crate::location::transform_relay_location_to_lsp_location_with_cache; use crate::lsp_runtime_error::LSPRuntimeResult; use crate::node_resolution_info::create_node_resolution_info; use crate::utils::extract_executable_definitions_from_text_document; use crate::utils::extract_feature_from_text; -use crate::utils::extract_project_name_from_url; +use crate::utils::get_file_group_from_uri; +use crate::utils::get_project_name_from_file_group; use crate::ContentConsumerType; use crate::DocblockNode; use crate::Feature; @@ -115,9 +122,6 @@ pub trait GlobalState { /// For Native - it may be a BuildConfigName. fn extract_project_name_from_url(&self, url: &Url) -> LSPRuntimeResult; - /// Experimental (Relay-only) JS Language Server instance - fn get_js_language_sever(&self) -> Option<&dyn JSLanguageServer>; - /// This is powering the functionality of executing GraphQL query from the IDE fn get_full_query_text( &self, @@ -127,7 +131,7 @@ pub trait GlobalState { fn document_opened(&self, url: &Url, text: &str) -> LSPRuntimeResult<()>; - fn document_changed(&self, url: &Url, full_text: &str) -> LSPRuntimeResult<()>; + fn document_changed(&self, url: &Url, text: &str) -> LSPRuntimeResult<()>; fn document_closed(&self, url: &Url) -> LSPRuntimeResult<()>; @@ -135,6 +139,17 @@ pub trait GlobalState { /// we may need to know who's our current consumer. /// This is mostly for hover handler (where we render markup) fn get_content_consumer_type(&self) -> ContentConsumerType; + + /// Transform Relay location to LSP location. This involves converting + /// character offsets to line/column numbers which means we need access to + /// the text of the file. + /// + /// This variant should be used when the Relay location was derived from an + /// open file which might not have been written to disk. + fn transform_relay_location_in_editor_to_lsp_location( + &self, + location: Location, + ) -> LSPRuntimeResult; } /// This structure contains all available resources that we may use in the Relay LSP message/notification @@ -152,12 +167,12 @@ pub struct LSPState< pub(crate) schemas: Schemas, schema_documentation_loader: Option>>, pub(crate) source_programs: SourcePrograms, - synced_javascript_features: DashMap>, + synced_javascript_sources: DashMap>, + synced_schema_sources: DashMap, pub(crate) perf_logger: Arc, pub(crate) diagnostic_reporter: Arc, pub(crate) notify_lsp_state_resources: Arc, pub(crate) project_status: ProjectStatusMap, - js_resource: Option>>, } impl @@ -173,7 +188,6 @@ impl>, >, - js_resource: Option>>, ) -> Self { debug!("Creating lsp_state..."); let file_categorizer = FileCategorizer::from_config(&config); @@ -197,8 +211,8 @@ impl) { - self.synced_javascript_features.insert(url.clone(), sources); + fn insert_synced_js_sources(&self, url: &Url, sources: Vec) { + self.synced_javascript_sources.insert(url.clone(), sources); } - fn validate_synced_sources(&self, url: &Url) -> LSPRuntimeResult<()> { + fn validate_synced_js_sources(&self, url: &Url) -> LSPRuntimeResult<()> { let mut diagnostics = vec![]; - let javascript_features = self.synced_javascript_features.get(url).ok_or_else(|| { + let javascript_features = self.synced_javascript_sources.get(url).ok_or_else(|| { LSPRuntimeError::UnexpectedError(format!("Expected GraphQL sources for URL {}", url)) })?; let project_name = self.extract_project_name_from_url(url)?; + let project_config = self + .config + .projects + .get(&ProjectName::from(project_name)) + .unwrap(); let schema = self .schemas .get(&project_name) @@ -228,30 +247,19 @@ impl { - let result = parse_executable_with_error_recovery( + let source_location_key = SourceLocationKey::embedded(url.as_ref(), index); + let result = parse_executable_with_error_recovery_and_parser_features( &graphql_source.text_source().text, source_location_key, + get_parser_features(project_config), ); diagnostics.extend(result.diagnostics.iter().map(|diagnostic| { self.diagnostic_reporter .convert_diagnostic(graphql_source.text_source(), diagnostic) })); - - let compiler_diagnostics = match build_ir_with_extra_features( - &schema, - &result.item.definitions, - &BuilderOptions { - allow_undefined_fragment_spreads: true, - fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, - relay_mode: Some(RelayMode), - default_anonymous_operation_name: None, - }, - ) - .and_then(|documents| { + let get_errors_or_warnings = |documents| { let mut warnings = vec![]; for document in documents { // Today the only warning we check for is deprecated @@ -262,10 +270,14 @@ impl warnings, - Err(errors) => errors, }; + let compiler_diagnostics = + match build_ir_for_lsp(&schema, &result.item.definitions) + .and_then(get_errors_or_warnings) + { + Ok(warnings) => warnings, + Err(errors) => errors, + }; diagnostics.extend(compiler_diagnostics.iter().map(|diagnostic| { self.diagnostic_reporter @@ -280,19 +292,22 @@ impl, - ) -> LSPRuntimeResult<()> { - let project_name = self.extract_project_name_from_url(uri)?; - - if let Entry::Vacant(e) = self.project_status.entry(project_name) { - e.insert(ProjectStatus::Activated); - self.notify_lsp_state_resources.notify_one(); - } - - self.insert_synced_sources(uri, sources); + fn process_synced_js_sources(&self, uri: &Url, sources: Vec) { + self.insert_synced_js_sources(uri, sources); self.schedule_task(Task::ValidateSyncedSource(uri.clone())); - - Ok(()) } - fn remove_synced_sources(&self, url: &Url) { - self.synced_javascript_features.remove(url); + fn remove_synced_js_sources(&self, url: &Url) { + self.synced_javascript_sources.remove(url); self.diagnostic_reporter .clear_quick_diagnostics_for_url(url); } + + fn insert_synced_schema_source(&self, url: &Url, graphql_source: GraphQLSource) { + self.synced_schema_sources + .insert(url.clone(), graphql_source); + } + + fn remove_synced_schema_source(&self, url: &Url) { + self.synced_schema_sources.remove(url); + } + + fn initialize_lsp_state_resources(&self, project_name: StringKey) { + if let Entry::Vacant(e) = self.project_status.entry(project_name) { + e.insert(ProjectStatus::Activated); + self.notify_lsp_state_resources.notify_one(); + } + } } impl @@ -400,17 +418,21 @@ impl FeatureResolutionInfo::GraphqlNode( - create_node_resolution_info(executable_document, position_span)?, - ), - Feature::DocblockIr(docblock_ir) => FeatureResolutionInfo::DocblockNode(DocblockNode { - resolution_info: create_docblock_resolution_info(&docblock_ir, position_span) - .ok_or(LSPRuntimeError::ExpectedError)?, - ir: docblock_ir, - }), - }; - Ok(info) + match feature { + Feature::ExecutableDocument(executable_document) => { + Ok(FeatureResolutionInfo::GraphqlNode( + create_node_resolution_info(executable_document, position_span)?, + )) + } + Feature::DocblockIr(docblock_ir) => { + Ok(FeatureResolutionInfo::DocblockNode(DocblockNode { + resolution_info: create_docblock_resolution_info(&docblock_ir, position_span) + .ok_or(LSPRuntimeError::ExpectedError)?, + ir: docblock_ir, + })) + } + Feature::SchemaDocument(_) => Err(LSPRuntimeError::ExpectedError), + } } /// Return a parsed executable document for this LSP request, only if the request occurs @@ -422,8 +444,9 @@ impl LSPRuntimeResult<(ExecutableDocument, Span)> { let (feature, span) = self.extract_feature_from_text(position, index_offset)?; match feature { - Feature::GraphQLDocument(document) => Ok((document, span)), + Feature::ExecutableDocument(document) => Ok((document, span)), Feature::DocblockIr(_) => Err(LSPRuntimeError::ExpectedError), + Feature::SchemaDocument(_) => Err(LSPRuntimeError::ExpectedError), } } @@ -434,12 +457,15 @@ impl LSPRuntimeResult<(Feature, Span)> { - let project_name = self.extract_project_name_from_url(&position.text_document.uri)?; + let project_name: ProjectName = self + .extract_project_name_from_url(&position.text_document.uri)? + .into(); let project_config = self.config.projects.get(&project_name).unwrap(); extract_feature_from_text( project_config, - &self.synced_javascript_features, + &self.synced_javascript_sources, + &self.synced_schema_sources, position, index_offset, ) @@ -459,7 +485,14 @@ impl LSPRuntimeResult { - extract_project_name_from_url(&self.file_categorizer, url, &self.root_dir) + let file_group = get_file_group_from_uri(&self.file_categorizer, url, &self.root_dir)?; + + get_project_name_from_file_group(&file_group).map_err(|msg| { + LSPRuntimeError::UnexpectedError(format!( + "Could not determine project name for \"{}\": {}", + url, msg + )) + }) } fn get_extra_data_provider(&self) -> &dyn LSPExtraDataProvider { @@ -470,9 +503,15 @@ impl LSPRuntimeResult> { + let project_name: ProjectName = self + .extract_project_name_from_url(text_document_uri)? + .into(); + let project_config = self.config.projects.get(&project_name).unwrap(); + extract_executable_definitions_from_text_document( text_document_uri, - &self.synced_javascript_features, + &self.synced_javascript_sources, + get_parser_features(project_config), ) } @@ -481,58 +520,109 @@ impl Option<&dyn JSLanguageServer> { - self.js_resource.as_deref() - } - fn get_full_query_text( &self, query_text: String, project_name: &StringKey, ) -> LSPRuntimeResult { - get_query_text(self, query_text, project_name) + get_query_text(self, query_text, (*project_name).into()) } fn document_opened(&self, uri: &Url, text: &str) -> LSPRuntimeResult<()> { - if let Some(js_server) = self.get_js_language_sever() { - js_server.process_js_source(uri, text); - } + let file_group = get_file_group_from_uri(&self.file_categorizer, uri, &self.root_dir)?; + let project_name = get_project_name_from_file_group(&file_group).map_err(|msg| { + LSPRuntimeError::UnexpectedError(format!( + "Could not determine project name for \"{}\": {}", + uri, msg + )) + })?; + + match file_group { + FileGroup::Schema { project_set: _ } | FileGroup::Extension { project_set: _ } => { + self.initialize_lsp_state_resources(project_name); + self.insert_synced_schema_source(uri, GraphQLSource::new(text, 0, 0)); + + Ok(()) + } + FileGroup::Source { project_set: _ } => { + let mut embedded_sources = extract_graphql::extract(text); + if text.contains("relay:enable-new-relay-resolver") { + embedded_sources + .retain(|source| !matches!(source, JavaScriptSourceFeature::Docblock(_))); + } + + if !embedded_sources.is_empty() { + self.initialize_lsp_state_resources(project_name); + self.process_synced_js_sources(uri, embedded_sources); + } - // First we check to see if this document has any GraphQL documents. - let embedded_sources = extract_graphql::extract(text); - if embedded_sources.is_empty() { - Ok(()) - } else { - self.process_synced_sources(uri, embedded_sources) + Ok(()) + } + _ => Err(LSPRuntimeError::ExpectedError), } } - fn document_changed(&self, uri: &Url, full_text: &str) -> LSPRuntimeResult<()> { - if let Some(js_server) = self.get_js_language_sever() { - js_server.process_js_source(uri, full_text); - } + fn document_changed(&self, uri: &Url, text: &str) -> LSPRuntimeResult<()> { + let file_group = get_file_group_from_uri(&self.file_categorizer, uri, &self.root_dir)?; - // First we check to see if this document has any GraphQL documents. - let embedded_sources = extract_graphql::extract(full_text); - if embedded_sources.is_empty() { - self.remove_synced_sources(uri); - Ok(()) - } else { - self.process_synced_sources(uri, embedded_sources) + match file_group { + FileGroup::Schema { project_set: _ } | FileGroup::Extension { project_set: _ } => { + self.insert_synced_schema_source(uri, GraphQLSource::new(text, 0, 0)); + + Ok(()) + } + FileGroup::Source { project_set: _ } => { + let embedded_sources = extract_graphql::extract(text); + if embedded_sources.is_empty() { + self.remove_synced_js_sources(uri); + } else { + self.process_synced_js_sources(uri, embedded_sources); + } + + Ok(()) + } + _ => Err(LSPRuntimeError::ExpectedError), } } fn document_closed(&self, uri: &Url) -> LSPRuntimeResult<()> { - if let Some(js_server) = self.get_js_language_sever() { - js_server.remove_js_source(uri); - } - self.remove_synced_sources(uri); + self.remove_synced_schema_source(uri); + self.remove_synced_js_sources(uri); Ok(()) } fn get_content_consumer_type(&self) -> ContentConsumerType { ContentConsumerType::Relay } + + fn transform_relay_location_in_editor_to_lsp_location( + &self, + location: Location, + ) -> LSPRuntimeResult { + transform_relay_location_to_lsp_location_with_cache( + &self.root_dir(), + location, + Some(&self.synced_javascript_sources), + Some(&self.synced_schema_sources), + ) + } +} + +pub fn build_ir_for_lsp( + schema: &SDLSchema, + definitions: &[ExecutableDefinition], +) -> DiagnosticsResult> { + build_ir_with_extra_features( + schema, + definitions, + &BuilderOptions { + allow_undefined_fragment_spreads: true, + fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, + relay_mode: Some(RelayMode), + default_anonymous_operation_name: None, + allow_custom_scalar_literals: true, // for compatibility + }, + ) } #[derive(Debug)] @@ -550,10 +640,10 @@ pub(crate) fn handle_lsp_state_tasks< ) { match task { Task::ValidateSyncedSource(url) => { - state.validate_synced_sources(&url).ok(); + state.validate_synced_js_sources(&url).ok(); } Task::ValidateSyncedSources => { - for item in &state.synced_javascript_features { + for item in &state.synced_javascript_sources { state.schedule_task(Task::ValidateSyncedSource(item.key().clone())); } } diff --git a/compiler/crates/relay-lsp/src/server/lsp_state_resources.rs b/compiler/crates/relay-lsp/src/server/lsp_state_resources.rs index 571151852b609..32f2c6886d40e 100644 --- a/compiler/crates/relay-lsp/src/server/lsp_state_resources.rs +++ b/compiler/crates/relay-lsp/src/server/lsp_state_resources.rs @@ -14,29 +14,32 @@ use dashmap::mapref::entry::Entry; use fnv::FnvHashMap; use graphql_ir::FragmentDefinitionNameSet; use graphql_watchman::WatchmanFileSourceSubscriptionNextChange; -use intern::string_key::StringKey; use log::debug; use rayon::iter::ParallelIterator; use relay_compiler::build_project::get_project_asts; +use relay_compiler::build_project::BuildMode; use relay_compiler::build_project::ProjectAstData; use relay_compiler::build_project::ProjectAsts; use relay_compiler::build_raw_program; use relay_compiler::build_schema; use relay_compiler::compiler_state::CompilerState; -use relay_compiler::compiler_state::ProjectName; +use relay_compiler::config::Config; use relay_compiler::config::ProjectConfig; use relay_compiler::errors::BuildProjectError; use relay_compiler::errors::Error; use relay_compiler::transform_program; use relay_compiler::validate_program; +use relay_compiler::ArtifactSourceKey; use relay_compiler::BuildProjectFailure; use relay_compiler::FileSource; use relay_compiler::FileSourceResult; use relay_compiler::FileSourceSubscription; use relay_compiler::FileSourceSubscriptionNextChange; use relay_compiler::GraphQLAsts; +use relay_compiler::ProjectName; use relay_compiler::SourceControlUpdateStatus; use schema::SDLSchema; +use schema_diff::check::SchemaChangeSafety; use schema_documentation::SchemaDocumentation; use tokio::task; use tokio::task::JoinHandle; @@ -239,7 +242,8 @@ impl { - errors.push(err); - } - _ => {} + if let Err(BuildProjectFailure::Error(err)) = build_result { + errors.push(err); } } if errors.is_empty() { @@ -300,20 +308,27 @@ impl, - ) -> Result { + ) -> Result { self.lsp_state .project_status - .insert(project_config.name, ProjectStatus::Completed); + .insert(project_config.name.into(), ProjectStatus::Completed); let log_event = self.lsp_state.perf_logger.create_event("build_lsp_project"); let project_name = project_config.name; let build_time = log_event.start("build_lsp_project_time"); log_event.string("project", project_name.to_string()); let schema = log_event.time("build_schema_time", || { - self.build_schema(compiler_state, project_config, graphql_asts_map) + self.build_schema( + compiler_state, + config, + project_config, + graphql_asts_map, + &log_event, + ) })?; let ProjectAstData { @@ -341,18 +356,26 @@ impl, + log_event: &impl PerfLogEvent, ) -> Result, BuildProjectFailure> { - match self.lsp_state.schemas.entry(project_config.name) { + match self.lsp_state.schemas.entry(project_config.name.into()) { Entry::Vacant(e) => { - let schema = build_schema(compiler_state, project_config, graphql_asts_map) - .map_err(|errors| { - BuildProjectFailure::Error(BuildProjectError::ValidationErrors { - errors, - project_name: project_config.name, - }) - })?; + let schema = build_schema( + compiler_state, + config, + project_config, + graphql_asts_map, + log_event, + ) + .map_err(|errors| { + BuildProjectFailure::Error(BuildProjectError::ValidationErrors { + errors, + project_name: project_config.name, + }) + })?; e.insert(Arc::clone(&schema)); Ok(schema) } @@ -360,14 +383,20 @@ impl, log_event: &impl PerfLogEvent, ) -> Result<(), BuildProjectFailure> { - let is_incremental_build = self + let mut build_mode = if !self .lsp_state .source_programs - .contains_key(&project_config.name) - && compiler_state.has_processed_changes() - && !compiler_state - .has_breaking_schema_change(project_config.name, &project_config.schema_config) - && if let Some(base) = project_config.base { - !compiler_state.has_breaking_schema_change(base, &project_config.schema_config) + .contains_key(&project_config.name.into()) + || !compiler_state.has_processed_changes() + { + BuildMode::Full + } else { + let project_schema_change = compiler_state.schema_change_safety( + log_event, + project_config.name, + &project_config.schema_config, + ); + match project_schema_change { + SchemaChangeSafety::Unsafe => BuildMode::Full, + SchemaChangeSafety::Safe | SchemaChangeSafety::SafeWithIncrementalBuild(_) => { + let base_schema_change = if let Some(base) = project_config.base { + compiler_state.schema_change_safety( + log_event, + base, + &project_config.schema_config, + ) + } else { + SchemaChangeSafety::Safe + }; + match (project_schema_change, base_schema_change) { + (SchemaChangeSafety::Unsafe, _) => BuildMode::Full, + (_, SchemaChangeSafety::Unsafe) => BuildMode::Full, + (SchemaChangeSafety::Safe, SchemaChangeSafety::Safe) => { + BuildMode::Incremental + } + ( + SchemaChangeSafety::SafeWithIncrementalBuild(c), + SchemaChangeSafety::Safe, + ) => BuildMode::IncrementalWithSchemaChanges(c), + ( + SchemaChangeSafety::Safe, + SchemaChangeSafety::SafeWithIncrementalBuild(c), + ) => BuildMode::IncrementalWithSchemaChanges(c), + ( + SchemaChangeSafety::SafeWithIncrementalBuild(c1), + SchemaChangeSafety::SafeWithIncrementalBuild(c2), + ) => BuildMode::IncrementalWithSchemaChanges( + c1.into_iter().chain(c2).collect(), + ), + } + } + } + }; + if !self.lsp_state.config.has_schema_change_incremental_build { + // Killswitch here to bail out of schema based incremental builds + build_mode = if let BuildMode::IncrementalWithSchemaChanges(_) = build_mode { + BuildMode::Full } else { - true - }; + build_mode + } + } + log_event.bool( + "is_incremental_build", + match build_mode { + BuildMode::Incremental | BuildMode::IncrementalWithSchemaChanges(_) => true, + BuildMode::Full => false, + }, + ); + log_event.string( + "build_mode", + match build_mode { + BuildMode::Full => String::from("Full"), + BuildMode::Incremental => String::from("Incremental"), + BuildMode::IncrementalWithSchemaChanges(_) => { + String::from("IncrementalWithSchemaChanges") + } + }, + ); - let (base_program, _) = build_raw_program( - project_config, - project_asts, - schema, - log_event, - is_incremental_build, - )?; + let (base_program, _) = + build_raw_program(project_config, project_asts, schema, log_event, build_mode)?; if compiler_state.should_cancel_current_build() { debug!("Build is cancelled: updates in source code/or new file changes are pending."); return Err(BuildProjectFailure::Cancelled); } - match self.lsp_state.source_programs.entry(project_config.name) { + match self + .lsp_state + .source_programs + .entry(project_config.name.into()) + { Entry::Vacant(e) => { e.insert(base_program.clone()); } Entry::Occupied(mut e) => { let program = e.get_mut(); - let removed_definition_names = graphql_asts - .get(&project_config.name) - .map(|ast| ast.removed_definition_names.as_ref()); + let removed_definition_names = graphql_asts.get(&project_config.name).map(|ast| { + ast.removed_definition_names + .iter() + .filter_map(|artifact_source| match artifact_source { + ArtifactSourceKey::ExecutableDefinition(name) => Some(*name), + ArtifactSourceKey::Schema() | ArtifactSourceKey::ResolverHash(_) => { + // In the LSP program, we only care about tracking user-editable ExecutableDefinitions + None + } + }) + .collect::>() + }); program.merge_program(&base_program, removed_definition_names); } } diff --git a/compiler/crates/relay-lsp/src/server/mod.rs b/compiler/crates/relay-lsp/src/server/mod.rs deleted file mode 100644 index a5fa3abf6d473..0000000000000 --- a/compiler/crates/relay-lsp/src/server/mod.rs +++ /dev/null @@ -1,362 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -mod heartbeat; -mod lsp_notification_dispatch; -mod lsp_request_dispatch; -mod lsp_state; -mod lsp_state_resources; -mod task_queue; - -use std::ops::ControlFlow; -use std::sync::Arc; - -use common::PerfLogEvent; -use common::PerfLogger; -use crossbeam::channel::Receiver; -use crossbeam::select; -use heartbeat::on_heartbeat; -use heartbeat::HeartbeatRequest; -use log::debug; -pub use lsp_notification_dispatch::LSPNotificationDispatch; -pub use lsp_request_dispatch::LSPRequestDispatch; -use lsp_server::Connection; -use lsp_server::ErrorCode; -use lsp_server::Message; -use lsp_server::Notification; -use lsp_server::Response as ServerResponse; -use lsp_server::ResponseError; -pub use lsp_state::GlobalState; -pub use lsp_state::LSPState; -pub use lsp_state::Schemas; -pub use lsp_state::SourcePrograms; -use lsp_types::notification::Cancel; -use lsp_types::notification::DidChangeTextDocument; -use lsp_types::notification::DidCloseTextDocument; -use lsp_types::notification::DidOpenTextDocument; -use lsp_types::notification::DidSaveTextDocument; -use lsp_types::notification::Exit; -use lsp_types::request::CodeActionRequest; -use lsp_types::request::Completion; -use lsp_types::request::GotoDefinition; -use lsp_types::request::HoverRequest; -use lsp_types::request::References; -use lsp_types::request::ResolveCompletionItem; -use lsp_types::request::Shutdown; -use lsp_types::CodeActionProviderCapability; -use lsp_types::CompletionOptions; -use lsp_types::InitializeParams; -use lsp_types::ServerCapabilities; -use lsp_types::TextDocumentSyncCapability; -use lsp_types::TextDocumentSyncKind; -use lsp_types::WorkDoneProgressOptions; -use relay_compiler::config::Config; -use relay_compiler::NoopArtifactWriter; -use schema_documentation::SchemaDocumentation; -use schema_documentation::SchemaDocumentationLoader; - -use self::task_queue::TaskProcessor; -use crate::code_action::on_code_action; -use crate::completion::on_completion; -use crate::completion::on_resolve_completion_item; -use crate::explore_schema_for_type::on_explore_schema_for_type; -use crate::explore_schema_for_type::ExploreSchemaForType; -use crate::find_field_usages::on_find_field_usages; -use crate::find_field_usages::FindFieldUsages; -use crate::goto_definition::on_get_source_location_of_type_definition; -use crate::goto_definition::on_goto_definition; -use crate::goto_definition::GetSourceLocationOfTypeDefinition; -use crate::graphql_tools::on_graphql_execute_query; -use crate::graphql_tools::GraphQLExecuteQuery; -use crate::hover::on_hover; -use crate::js_language_server::JSLanguageServer; -use crate::lsp_process_error::LSPProcessResult; -use crate::lsp_runtime_error::LSPRuntimeError; -use crate::references::on_references; -use crate::resolved_types_at_location::on_get_resolved_types_at_location; -use crate::resolved_types_at_location::ResolvedTypesAtLocation; -use crate::search_schema_items::on_search_schema_items; -use crate::search_schema_items::SearchSchemaItems; -use crate::server::lsp_state::handle_lsp_state_tasks; -use crate::server::lsp_state_resources::LSPStateResources; -use crate::server::task_queue::TaskQueue; -use crate::shutdown::on_exit; -use crate::shutdown::on_shutdown; -use crate::status_reporter::LSPStatusReporter; -use crate::text_documents::on_cancel; -use crate::text_documents::on_did_change_text_document; -use crate::text_documents::on_did_close_text_document; -use crate::text_documents::on_did_open_text_document; -use crate::text_documents::on_did_save_text_document; -pub use crate::LSPExtraDataProvider; - -/// Initializes an LSP connection, handling the `initialize` message and `initialized` notification -/// handshake. -pub fn initialize(connection: &Connection) -> LSPProcessResult { - let server_capabilities = ServerCapabilities { - // Enable text document syncing so we can know when files are opened/changed/saved/closed - text_document_sync: Some(TextDocumentSyncCapability::Kind(TextDocumentSyncKind::FULL)), - - completion_provider: Some(CompletionOptions { - resolve_provider: Some(true), - trigger_characters: Some(vec!["(".into(), "\n".into(), ",".into(), "@".into()]), - work_done_progress_options: WorkDoneProgressOptions { - work_done_progress: None, - }, - ..Default::default() - }), - - hover_provider: Some(lsp_types::HoverProviderCapability::Simple(true)), - definition_provider: Some(lsp_types::OneOf::Left(true)), - references_provider: Some(lsp_types::OneOf::Left(true)), - code_action_provider: Some(CodeActionProviderCapability::Simple(true)), - ..Default::default() - }; - - let server_capabilities = serde_json::to_value(&server_capabilities)?; - let params = connection.initialize(server_capabilities)?; - let params: InitializeParams = serde_json::from_value(params)?; - Ok(params) -} - -#[derive(Debug)] -pub enum Task { - InboundMessage(lsp_server::Message), - LSPState(lsp_state::Task), -} - -/// Run the main server loop -pub async fn run< - TPerfLogger: PerfLogger + 'static, - TSchemaDocumentation: SchemaDocumentation + 'static, ->( - connection: Connection, - mut config: Config, - _params: InitializeParams, - perf_logger: Arc, - extra_data_provider: Box, - schema_documentation_loader: Option>>, - js_resource: Option< - Box>>, - >, -) -> LSPProcessResult<()> -where - TPerfLogger: PerfLogger + 'static, -{ - debug!( - "Running language server with config root {:?}", - config.root_dir - ); - - let task_processor = LSPTaskProcessor; - let task_queue = TaskQueue::new(Arc::new(task_processor)); - let task_scheduler = task_queue.get_scheduler(); - - config.artifact_writer = Box::new(NoopArtifactWriter); - config.status_reporter = Box::new(LSPStatusReporter::new( - config.root_dir.clone(), - connection.sender.clone(), - )); - - let lsp_state = Arc::new(LSPState::new( - Arc::new(config), - connection.sender.clone(), - Arc::clone(&task_scheduler), - Arc::clone(&perf_logger), - extra_data_provider, - schema_documentation_loader, - js_resource, - )); - - LSPStateResources::new(Arc::clone(&lsp_state)).watch(); - - while let Some(task) = next_task(&connection.receiver, &task_queue.receiver) { - task_queue.process(Arc::clone(&lsp_state), task); - } - - panic!("Client exited without proper shutdown sequence.") -} - -fn next_task( - lsp_receiver: &Receiver, - task_queue_receiver: &Receiver, -) -> Option { - select! { - recv(lsp_receiver) -> message => message.ok().map(Task::InboundMessage), - recv(task_queue_receiver) -> task => task.ok() - } -} - -struct LSPTaskProcessor; - -impl - TaskProcessor, Task> for LSPTaskProcessor -{ - fn process(&self, state: Arc>, task: Task) { - match task { - Task::InboundMessage(Message::Request(request)) => handle_request(state, request), - Task::InboundMessage(Message::Notification(notification)) => { - handle_notification(state, notification); - } - Task::LSPState(lsp_task) => { - handle_lsp_state_tasks(state, lsp_task); - } - Task::InboundMessage(Message::Response(_)) => { - // TODO: handle response from the client -> cancel message, etc - } - } - } -} - -fn handle_request( - lsp_state: Arc>, - request: lsp_server::Request, -) { - debug!("request received {:?}", request); - let get_server_response_bound = |req| dispatch_request(req, lsp_state.as_ref()); - let get_response = with_request_logging(&lsp_state.perf_logger, get_server_response_bound); - - lsp_state - .send_message(Message::Response(get_response(request))) - .expect("Unable to send message to a client."); -} - -fn dispatch_request(request: lsp_server::Request, lsp_state: &impl GlobalState) -> ServerResponse { - // Returns ControlFlow::Break(ServerResponse) if the request - // was handled, ControlFlow::Continue(Request) otherwise. - let get_response = || { - let request = LSPRequestDispatch::new(request, lsp_state) - .on_request_sync::(on_get_resolved_types_at_location)? - .on_request_sync::(on_search_schema_items)? - .on_request_sync::(on_explore_schema_for_type)? - .on_request_sync::( - on_get_source_location_of_type_definition, - )? - .on_request_sync::(on_hover)? - .on_request_sync::(on_goto_definition)? - .on_request_sync::(on_references)? - .on_request_sync::(on_completion)? - .on_request_sync::(on_resolve_completion_item)? - .on_request_sync::(on_code_action)? - .on_request_sync::(on_shutdown)? - .on_request_sync::(on_graphql_execute_query)? - .on_request_sync::(on_heartbeat)? - .on_request_sync::(on_find_field_usages)? - .request(); - - // If we have gotten here, we have not handled the request - ControlFlow::Continue(request) - }; - - match get_response() { - ControlFlow::Break(response) => response, - ControlFlow::Continue(request) => ServerResponse { - id: request.id, - result: None, - error: Some(ResponseError { - code: ErrorCode::MethodNotFound as i32, - data: None, - message: format!("No handler registered for method '{}'", request.method), - }), - }, - } -} - -fn with_request_logging<'a, TPerfLogger: PerfLogger + 'static>( - perf_logger: &'a Arc, - get_response: impl FnOnce(lsp_server::Request) -> ServerResponse + 'a, -) -> impl FnOnce(lsp_server::Request) -> ServerResponse + 'a { - move |request| { - let lsp_request_event = perf_logger.create_event("lsp_message"); - lsp_request_event.string("lsp_method", request.method.clone()); - lsp_request_event.string("lsp_type", "request".to_string()); - let lsp_request_processing_time = lsp_request_event.start("lsp_message_processing_time"); - - let response = get_response(request); - - if response.result.is_some() { - lsp_request_event.string("lsp_outcome", "success".to_string()); - } else if let Some(error) = &response.error { - if error.code == ErrorCode::RequestCanceled as i32 { - lsp_request_event.string("lsp_outcome", "canceled".to_string()); - } else { - lsp_request_event.string("lsp_outcome", "error".to_string()); - lsp_request_event.string("lsp_error_message", error.message.to_string()); - if let Some(data) = &error.data { - lsp_request_event.string("lsp_error_data", data.to_string()); - } - } - } - // N.B. we don't handle the case where the ServerResponse has neither a result nor - // an error, which is an invalid state. - - lsp_request_event.stop(lsp_request_processing_time); - lsp_request_event.complete(); - - response - } -} - -fn handle_notification< - TPerfLogger: PerfLogger + 'static, - TSchemaDocumentation: SchemaDocumentation, ->( - lsp_state: Arc>, - notification: Notification, -) { - debug!("notification received {:?}", notification); - let lsp_notification_event = lsp_state.perf_logger.create_event("lsp_message"); - lsp_notification_event.string("lsp_method", notification.method.clone()); - lsp_notification_event.string("lsp_type", "notification".to_string()); - let lsp_notification_processing_time = - lsp_notification_event.start("lsp_message_processing_time"); - - let notification_result = dispatch_notification(notification, lsp_state.as_ref()); - - match notification_result { - ControlFlow::Continue(()) => { - // The notification is not handled - lsp_notification_event.string("lsp_outcome", "error".to_string()); - } - ControlFlow::Break(err) => { - if let Some(err) = err { - lsp_notification_event.string("lsp_outcome", "error".to_string()); - if let LSPRuntimeError::UnexpectedError(message) = err { - lsp_notification_event.string("lsp_error_message", message); - } - } else { - lsp_notification_event.string("lsp_outcome", "success".to_string()); - } - } - } - - lsp_notification_event.stop(lsp_notification_processing_time); - lsp_notification_event.complete(); -} - -fn dispatch_notification( - notification: lsp_server::Notification, - lsp_state: &impl GlobalState, -) -> ControlFlow, ()> { - // Returns ControlFlow::Break(Option) if the notification - // was handled, ControlFlow::Continue(()) otherwise. - let notification = LSPNotificationDispatch::new(notification, lsp_state) - .on_notification_sync::(on_did_open_text_document)? - .on_notification_sync::(on_did_close_text_document)? - .on_notification_sync::(on_did_change_text_document)? - .on_notification_sync::(on_did_save_text_document)? - .on_notification_sync::(on_cancel)? - .on_notification_sync::(on_exit)? - .notification(); - - // If we have gotten here, we have not handled the notification - debug!( - "Error: no handler registered for notification '{}'", - notification.method - ); - ControlFlow::Continue(()) -} diff --git a/compiler/crates/relay-lsp/src/utils.rs b/compiler/crates/relay-lsp/src/utils.rs index dac43e77cc4bc..2195053f8f952 100644 --- a/compiler/crates/relay-lsp/src/utils.rs +++ b/compiler/crates/relay-lsp/src/utils.rs @@ -13,13 +13,16 @@ use common::TextSource; use dashmap::DashMap; use docblock_syntax::parse_docblock; use extract_graphql::JavaScriptSourceFeature; -use graphql_syntax::parse_executable_with_error_recovery; +use graphql_syntax::parse_executable_with_error_recovery_and_parser_features; use graphql_syntax::ExecutableDefinition; +use graphql_syntax::GraphQLSource; +use graphql_syntax::ParserFeatures; use intern::string_key::StringKey; use log::debug; use lsp_types::Position; use lsp_types::TextDocumentPositionParams; use lsp_types::Url; +use relay_compiler::get_parser_features; use relay_compiler::FileCategorizer; use relay_compiler::FileGroup; use relay_compiler::ProjectConfig; @@ -42,6 +45,7 @@ pub fn is_file_uri_in_dir(root_dir: PathBuf, file_uri: &Url) -> bool { pub fn extract_executable_definitions_from_text_document( text_document_uri: &Url, source_feature_cache: &DashMap>, + parser_features: ParserFeatures, ) -> LSPRuntimeResult> { let source_features = source_feature_cache .get(text_document_uri) @@ -49,32 +53,35 @@ pub fn extract_executable_definitions_from_text_document( // the source has no graphql documents. .ok_or(LSPRuntimeError::ExpectedError)?; + let path = text_document_uri.path(); + let definitions = source_features .iter() - .filter_map(|feature| match feature { + .enumerate() + .filter_map(|(i, feature)| match feature { JavaScriptSourceFeature::Docblock(_) => None, - JavaScriptSourceFeature::GraphQL(graphql_source) => Some(graphql_source), + JavaScriptSourceFeature::GraphQL(graphql_source) => Some((i, graphql_source)), }) - .map(|graphql_source| { - let document = parse_executable_with_error_recovery( + .flat_map(|(i, graphql_source)| { + let document = parse_executable_with_error_recovery_and_parser_features( &graphql_source.text_source().text, - SourceLocationKey::standalone(&text_document_uri.to_string()), + SourceLocationKey::embedded(path, i), + parser_features, ) .item; document.definitions }) - .flatten() .collect::>(); Ok(definitions) } -pub fn extract_project_name_from_url( +pub fn get_file_group_from_uri( file_categorizer: &FileCategorizer, url: &Url, root_dir: &PathBuf, -) -> LSPRuntimeResult { +) -> LSPRuntimeResult { let absolute_file_path = url.to_file_path().map_err(|_| { LSPRuntimeError::UnexpectedError(format!("Unable to convert URL to file path: {:?}", url)) })?; @@ -86,40 +93,58 @@ pub fn extract_project_name_from_url( )) })?; - let project_name = if let FileGroup::Source { project_set } = - file_categorizer.categorize(file_path).map_err(|_| { - LSPRuntimeError::UnexpectedError(format!( - "Unable to categorize the file correctly: {:?}", - file_path - )) - })? { - *project_set.first().ok_or_else(|| { - LSPRuntimeError::UnexpectedError(format!( - "Expected to find at least one project for {:?}", - file_path - )) - })? - } else { - return Err(LSPRuntimeError::UnexpectedError(format!( - "File path {:?} is not a source set", + file_categorizer.categorize(file_path).map_err(|_| { + LSPRuntimeError::UnexpectedError(format!( + "Unable to categorize the file correctly: {:?}", file_path - ))); - }; - Ok(project_name) + )) + }) +} + +pub fn get_project_name_from_file_group(file_group: &FileGroup) -> Result { + let project_set = match file_group { + FileGroup::Source { project_set } => Ok(project_set), + FileGroup::Schema { project_set } => Ok(project_set), + FileGroup::Extension { project_set } => Ok(project_set), + _ => Err("Not part of a source set"), + }?; + + let project_name = *project_set + .first() + .ok_or("Expected to find at least one project")?; + + Ok(project_name.into()) } /// Return a parsed executable document, or parsed Docblock IR for this LSP /// request, only if the request occurs within a GraphQL document or Docblock. pub fn extract_feature_from_text( project_config: &ProjectConfig, - source_feature_cache: &DashMap>, + js_source_feature_cache: &DashMap>, + schema_source_cache: &DashMap, text_document_position: &TextDocumentPositionParams, index_offset: usize, ) -> LSPRuntimeResult<(Feature, Span)> { let uri = &text_document_position.text_document.uri; let position = text_document_position.position; - let source_features = source_feature_cache + if let Some(schema_source) = schema_source_cache.get(uri) { + let source_location_key = SourceLocationKey::standalone(uri.as_ref()); + let schema_document = graphql_syntax::parse_schema_document( + &schema_source.text_source().text, + source_location_key, + ) + .map_err(|_| LSPRuntimeError::ExpectedError)?; + + let position_span = position_to_span(&position, schema_source.text_source(), index_offset) + .ok_or_else(|| { + LSPRuntimeError::UnexpectedError("Failed to map positions to spans".to_string()) + })?; + + return Ok((Feature::SchemaDocument(schema_document), position_span)); + } + + let source_features = js_source_feature_cache .get(uri) .ok_or(LSPRuntimeError::ExpectedError)?; @@ -134,11 +159,14 @@ pub fn extract_feature_from_text( let source_location_key = SourceLocationKey::embedded(uri.as_ref(), index); + let parser_features = get_parser_features(project_config); + match javascript_feature { JavaScriptSourceFeature::GraphQL(graphql_source) => { - let document = parse_executable_with_error_recovery( + let document = parse_executable_with_error_recovery_and_parser_features( &graphql_source.text_source().text, source_location_key, + parser_features, ) .item; @@ -160,23 +188,33 @@ pub fn extract_feature_from_text( // since the change event fires before completion. debug!("position_span: {:?}", position_span); - Ok((Feature::GraphQLDocument(document), position_span)) + Ok((Feature::ExecutableDocument(document), position_span)) } JavaScriptSourceFeature::Docblock(docblock_source) => { - let executable_definitions_in_file = - extract_executable_definitions_from_text_document(uri, source_feature_cache)?; - let text_source = &docblock_source.text_source(); let text = &text_source.text; + if text.contains("relay:enable-new-relay-resolver") { + return Err(LSPRuntimeError::ExpectedError); + } + + let executable_definitions_in_file = extract_executable_definitions_from_text_document( + uri, + js_source_feature_cache, + parser_features, + )?; let docblock_ir = parse_docblock(text, source_location_key) .and_then(|ast| { parse_docblock_ast( + &project_config.name, &ast, Some(&executable_definitions_in_file), - ParseOptions { - enable_output_type: &project_config + &ParseOptions { + enable_interface_output_type: &project_config + .feature_flags + .relay_resolver_enable_interface_output_type, + allow_resolver_non_nullable_return_type: &project_config .feature_flags - .relay_resolver_enable_output_type, + .allow_resolver_non_nullable_return_type, }, ) }) diff --git a/compiler/crates/relay-lsp/tests/find_field_usages.rs b/compiler/crates/relay-lsp/tests/find_field_usages.rs new file mode 100644 index 0000000000000..6f1c4318f03e8 --- /dev/null +++ b/compiler/crates/relay-lsp/tests/find_field_usages.rs @@ -0,0 +1,52 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::reexport::Intern; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use relay_lsp::find_field_usages; +use relay_test_schema::get_test_schema; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%arguments%").collect(); + if let [document, arguments] = parts.as_slice() { + if let [type_name, field_name] = arguments + .split_ascii_whitespace() + .collect::>() + .as_slice() + { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let schema = get_test_schema(); + let ast = parse_executable(document, source_location).unwrap(); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(schema, ir); + + let result = find_field_usages::get_usages( + &program, + &get_test_schema(), + type_name.intern(), + field_name.intern(), + ) + .unwrap() + .into_iter() + .map(|location| format!("{:?}\n", location)) + .collect::>(); + + Ok(result.concat()) + } else { + panic!( + "Fixture {} has incorrect # arguments (expected 2)", + fixture.file_name + ); + } + } else { + panic!("Fixture {} missing %arguments%", fixture.file_name); + } +} diff --git a/compiler/crates/relay-lsp/tests/find_field_usages/mod.rs b/compiler/crates/relay-lsp/tests/find_field_usages/mod.rs deleted file mode 100644 index 032f79582a2db..0000000000000 --- a/compiler/crates/relay-lsp/tests/find_field_usages/mod.rs +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::reexport::Intern; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use relay_lsp::find_field_usages; -use relay_test_schema::get_test_schema; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%arguments%").collect(); - if let [document, arguments] = parts.as_slice() { - if let [type_name, field_name] = arguments - .split_ascii_whitespace() - .collect::>() - .as_slice() - { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let schema = get_test_schema(); - let ast = parse_executable(document, source_location).unwrap(); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(schema, ir); - - let result = find_field_usages::get_usages( - &program, - &get_test_schema(), - type_name.intern(), - field_name.intern(), - ) - .unwrap() - .into_iter() - .map(|location| format!("{:?}\n", location)) - .collect::>(); - - Ok(result.concat()) - } else { - panic!( - "Fixture {} has incorrect # arguments (expected 2)", - fixture.file_name - ); - } - } else { - panic!("Fixture {} missing %arguments%", fixture.file_name); - } -} diff --git a/compiler/crates/relay-lsp/tests/find_field_usages_test.rs b/compiler/crates/relay-lsp/tests/find_field_usages_test.rs index dcdfc89363c6b..4e5c36656aaff 100644 --- a/compiler/crates/relay-lsp/tests/find_field_usages_test.rs +++ b/compiler/crates/relay-lsp/tests/find_field_usages_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<1ca9d7eb11468f905882356aaeae390b>> + * @generated SignedSource<<21c22d054bda1a64ac67ae246f83a4fb>> */ mod find_field_usages; @@ -12,72 +12,72 @@ mod find_field_usages; use find_field_usages::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn find_subtype() { +#[tokio::test] +async fn find_subtype() { let input = include_str!("find_field_usages/fixtures/find_subtype.graphql"); let expected = include_str!("find_field_usages/fixtures/find_subtype.expected"); - test_fixture(transform_fixture, "find_subtype.graphql", "find_field_usages/fixtures/find_subtype.expected", input, expected); + test_fixture(transform_fixture, file!(), "find_subtype.graphql", "find_field_usages/fixtures/find_subtype.expected", input, expected).await; } -#[test] -fn find_supertype() { +#[tokio::test] +async fn find_supertype() { let input = include_str!("find_field_usages/fixtures/find_supertype.graphql"); let expected = include_str!("find_field_usages/fixtures/find_supertype.expected"); - test_fixture(transform_fixture, "find_supertype.graphql", "find_field_usages/fixtures/find_supertype.expected", input, expected); + test_fixture(transform_fixture, file!(), "find_supertype.graphql", "find_field_usages/fixtures/find_supertype.expected", input, expected).await; } -#[test] -fn fragment_field() { +#[tokio::test] +async fn fragment_field() { let input = include_str!("find_field_usages/fixtures/fragment_field.graphql"); let expected = include_str!("find_field_usages/fixtures/fragment_field.expected"); - test_fixture(transform_fixture, "fragment_field.graphql", "find_field_usages/fixtures/fragment_field.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_field.graphql", "find_field_usages/fixtures/fragment_field.expected", input, expected).await; } -#[test] -fn fragment_field_nomatch() { +#[tokio::test] +async fn fragment_field_nomatch() { let input = include_str!("find_field_usages/fixtures/fragment_field.nomatch.graphql"); let expected = include_str!("find_field_usages/fixtures/fragment_field.nomatch.expected"); - test_fixture(transform_fixture, "fragment_field.nomatch.graphql", "find_field_usages/fixtures/fragment_field.nomatch.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_field.nomatch.graphql", "find_field_usages/fixtures/fragment_field.nomatch.expected", input, expected).await; } -#[test] -fn inline_fragment() { +#[tokio::test] +async fn inline_fragment() { let input = include_str!("find_field_usages/fixtures/inline_fragment.graphql"); let expected = include_str!("find_field_usages/fixtures/inline_fragment.expected"); - test_fixture(transform_fixture, "inline_fragment.graphql", "find_field_usages/fixtures/inline_fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline_fragment.graphql", "find_field_usages/fixtures/inline_fragment.expected", input, expected).await; } -#[test] -fn inline_fragment_also_matches_outer_type() { +#[tokio::test] +async fn inline_fragment_also_matches_outer_type() { let input = include_str!("find_field_usages/fixtures/inline_fragment_also_matches_outer_type.graphql"); let expected = include_str!("find_field_usages/fixtures/inline_fragment_also_matches_outer_type.expected"); - test_fixture(transform_fixture, "inline_fragment_also_matches_outer_type.graphql", "find_field_usages/fixtures/inline_fragment_also_matches_outer_type.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline_fragment_also_matches_outer_type.graphql", "find_field_usages/fixtures/inline_fragment_also_matches_outer_type.expected", input, expected).await; } -#[test] -fn linked_field() { +#[tokio::test] +async fn linked_field() { let input = include_str!("find_field_usages/fixtures/linked_field.graphql"); let expected = include_str!("find_field_usages/fixtures/linked_field.expected"); - test_fixture(transform_fixture, "linked_field.graphql", "find_field_usages/fixtures/linked_field.expected", input, expected); + test_fixture(transform_fixture, file!(), "linked_field.graphql", "find_field_usages/fixtures/linked_field.expected", input, expected).await; } -#[test] -fn linked_field_nomatch() { +#[tokio::test] +async fn linked_field_nomatch() { let input = include_str!("find_field_usages/fixtures/linked_field.nomatch.graphql"); let expected = include_str!("find_field_usages/fixtures/linked_field.nomatch.expected"); - test_fixture(transform_fixture, "linked_field.nomatch.graphql", "find_field_usages/fixtures/linked_field.nomatch.expected", input, expected); + test_fixture(transform_fixture, file!(), "linked_field.nomatch.graphql", "find_field_usages/fixtures/linked_field.nomatch.expected", input, expected).await; } -#[test] -fn multiple_matches() { +#[tokio::test] +async fn multiple_matches() { let input = include_str!("find_field_usages/fixtures/multiple_matches.graphql"); let expected = include_str!("find_field_usages/fixtures/multiple_matches.expected"); - test_fixture(transform_fixture, "multiple_matches.graphql", "find_field_usages/fixtures/multiple_matches.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple_matches.graphql", "find_field_usages/fixtures/multiple_matches.expected", input, expected).await; } -#[test] -fn query_field() { +#[tokio::test] +async fn query_field() { let input = include_str!("find_field_usages/fixtures/query_field.graphql"); let expected = include_str!("find_field_usages/fixtures/query_field.expected"); - test_fixture(transform_fixture, "query_field.graphql", "find_field_usages/fixtures/query_field.expected", input, expected); + test_fixture(transform_fixture, file!(), "query_field.graphql", "find_field_usages/fixtures/query_field.expected", input, expected).await; } diff --git a/compiler/crates/relay-lsp/tests/hover.rs b/compiler/crates/relay-lsp/tests/hover.rs new file mode 100644 index 0000000000000..0b3e0b7f04855 --- /dev/null +++ b/compiler/crates/relay-lsp/tests/hover.rs @@ -0,0 +1,84 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use common::Span; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::reexport::Intern; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use lsp_types::HoverContents; +use lsp_types::MarkedString; +use relay_lsp::hover::get_hover; +use relay_lsp::ContentConsumerType; +use relay_lsp::DummyExtraDataProvider; +use relay_test_schema::get_test_schema; +use relay_test_schema::get_test_schema_with_extensions; +use resolution_path::ResolvePosition; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extension%").collect(); + let (schema, document) = if let [document, extension] = parts.as_slice() { + (get_test_schema_with_extensions(extension), *document) + } else { + (get_test_schema(), fixture.content) + }; + let cursor_position = document.find('|').unwrap() - 1; + let source = document.replace('|', ""); + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(&source, source_location) + .map_err(|diagnostics| diagnostics_to_sorted_string(&source, &diagnostics))?; + let ir = build(&schema, &ast.definitions) + .map_err(|diagnostics| diagnostics_to_sorted_string(&source, &diagnostics))?; + let program = Program::from_definitions(Arc::clone(&schema), ir); + + let path = ast.resolve((), Span::from_usize(cursor_position, cursor_position)); + + let schema_name = "Some Schema Name".intern(); + + let extra_data_provider = DummyExtraDataProvider::new(); + + let hover_contents = get_hover( + &path, + &schema, + schema_name, + &extra_data_provider, + &schema, + &program, + ContentConsumerType::Relay, + ) + .ok_or("")? + .contents; + + Ok(print_hover_contents(hover_contents)) +} + +fn print_hover_contents(contents: HoverContents) -> String { + match contents { + HoverContents::Scalar(scalar) => print_marked_string(scalar), + HoverContents::Array(arr) => arr + .into_iter() + .map(print_marked_string) + .collect::>() + .join("\n--\n"), + HoverContents::Markup(markup) => markup.value, + } +} + +fn print_marked_string(marked_string: MarkedString) -> String { + match marked_string { + MarkedString::String(string) => string, + MarkedString::LanguageString(language_string) => format!( + "```{}\n{}\n```", + language_string.language, language_string.value + ), + } +} diff --git a/compiler/crates/relay-lsp/tests/hover/mod.rs b/compiler/crates/relay-lsp/tests/hover/mod.rs deleted file mode 100644 index 8c8fd3d34d981..0000000000000 --- a/compiler/crates/relay-lsp/tests/hover/mod.rs +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use common::Span; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::reexport::Intern; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use lsp_types::HoverContents; -use lsp_types::MarkedString; -use relay_lsp::hover::get_hover; -use relay_lsp::ContentConsumerType; -use relay_lsp::DummyExtraDataProvider; -use relay_test_schema::get_test_schema; -use relay_test_schema::get_test_schema_with_extensions; -use resolution_path::ResolvePosition; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extension%").collect(); - let (schema, document) = if let [document, extension] = parts.as_slice() { - (get_test_schema_with_extensions(extension), *document) - } else { - (get_test_schema(), fixture.content) - }; - let cursor_position = document.find('|').unwrap() - 1; - let source = document.replace("|", ""); - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(&source, source_location) - .map_err(|diagnostics| diagnostics_to_sorted_string(&source, &diagnostics))?; - let ir = build(&schema, &ast.definitions) - .map_err(|diagnostics| diagnostics_to_sorted_string(&source, &diagnostics))?; - let program = Program::from_definitions(Arc::clone(&schema), ir); - - let path = ast.resolve((), Span::from_usize(cursor_position, cursor_position)); - - let schema_name = "Some Schema Name".intern(); - - let extra_data_provider = DummyExtraDataProvider::new(); - - let hover_contents = get_hover( - &path, - &schema, - schema_name, - &extra_data_provider, - &schema, - &program, - ContentConsumerType::Relay, - ) - .ok_or("")? - .contents; - - Ok(print_hover_contents(hover_contents)) -} - -fn print_hover_contents(contents: HoverContents) -> String { - match contents { - HoverContents::Scalar(scalar) => print_marked_string(scalar), - HoverContents::Array(arr) => arr - .into_iter() - .map(print_marked_string) - .collect::>() - .join("\n--\n"), - HoverContents::Markup(markup) => markup.value, - } -} - -fn print_marked_string(marked_string: MarkedString) -> String { - match marked_string { - MarkedString::String(string) => string, - MarkedString::LanguageString(language_string) => format!( - "```{}\n{}\n```", - language_string.language, language_string.value - ), - } -} diff --git a/compiler/crates/relay-lsp/tests/hover_test.rs b/compiler/crates/relay-lsp/tests/hover_test.rs index 32d25a37d028f..35848752dc5cd 100644 --- a/compiler/crates/relay-lsp/tests/hover_test.rs +++ b/compiler/crates/relay-lsp/tests/hover_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<7e6c48eb62c1d815fb6644cb2e6e3bb3>> + * @generated SignedSource<<7f7ba42d92c3419f1fffd8013ba3d40f>> */ mod hover; @@ -12,65 +12,65 @@ mod hover; use hover::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn double_underscore_id_field() { +#[tokio::test] +async fn double_underscore_id_field() { let input = include_str!("hover/fixtures/double_underscore_id_field.graphql"); let expected = include_str!("hover/fixtures/double_underscore_id_field.expected"); - test_fixture(transform_fixture, "double_underscore_id_field.graphql", "hover/fixtures/double_underscore_id_field.expected", input, expected); + test_fixture(transform_fixture, file!(), "double_underscore_id_field.graphql", "hover/fixtures/double_underscore_id_field.expected", input, expected).await; } -#[test] -fn double_underscore_typename_field() { +#[tokio::test] +async fn double_underscore_typename_field() { let input = include_str!("hover/fixtures/double_underscore_typename_field.graphql"); let expected = include_str!("hover/fixtures/double_underscore_typename_field.expected"); - test_fixture(transform_fixture, "double_underscore_typename_field.graphql", "hover/fixtures/double_underscore_typename_field.expected", input, expected); + test_fixture(transform_fixture, file!(), "double_underscore_typename_field.graphql", "hover/fixtures/double_underscore_typename_field.expected", input, expected).await; } -#[test] -fn fragment_definition_name() { +#[tokio::test] +async fn fragment_definition_name() { let input = include_str!("hover/fixtures/fragment_definition_name.graphql"); let expected = include_str!("hover/fixtures/fragment_definition_name.expected"); - test_fixture(transform_fixture, "fragment_definition_name.graphql", "hover/fixtures/fragment_definition_name.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_definition_name.graphql", "hover/fixtures/fragment_definition_name.expected", input, expected).await; } -#[test] -fn fragment_spread() { +#[tokio::test] +async fn fragment_spread() { let input = include_str!("hover/fixtures/fragment_spread.graphql"); let expected = include_str!("hover/fixtures/fragment_spread.expected"); - test_fixture(transform_fixture, "fragment_spread.graphql", "hover/fixtures/fragment_spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_spread.graphql", "hover/fixtures/fragment_spread.expected", input, expected).await; } -#[test] -fn scalar_field_from_client_schema_extension() { +#[tokio::test] +async fn scalar_field_from_client_schema_extension() { let input = include_str!("hover/fixtures/scalar_field_from_client_schema_extension.graphql"); let expected = include_str!("hover/fixtures/scalar_field_from_client_schema_extension.expected"); - test_fixture(transform_fixture, "scalar_field_from_client_schema_extension.graphql", "hover/fixtures/scalar_field_from_client_schema_extension.expected", input, expected); + test_fixture(transform_fixture, file!(), "scalar_field_from_client_schema_extension.graphql", "hover/fixtures/scalar_field_from_client_schema_extension.expected", input, expected).await; } -#[test] -fn scalar_field_from_relay_resolver() { +#[tokio::test] +async fn scalar_field_from_relay_resolver() { let input = include_str!("hover/fixtures/scalar_field_from_relay_resolver.graphql"); let expected = include_str!("hover/fixtures/scalar_field_from_relay_resolver.expected"); - test_fixture(transform_fixture, "scalar_field_from_relay_resolver.graphql", "hover/fixtures/scalar_field_from_relay_resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "scalar_field_from_relay_resolver.graphql", "hover/fixtures/scalar_field_from_relay_resolver.expected", input, expected).await; } -#[test] -fn scalar_field_with_description() { +#[tokio::test] +async fn scalar_field_with_description() { let input = include_str!("hover/fixtures/scalar_field_with_description.graphql"); let expected = include_str!("hover/fixtures/scalar_field_with_description.expected"); - test_fixture(transform_fixture, "scalar_field_with_description.graphql", "hover/fixtures/scalar_field_with_description.expected", input, expected); + test_fixture(transform_fixture, file!(), "scalar_field_with_description.graphql", "hover/fixtures/scalar_field_with_description.expected", input, expected).await; } -#[test] -fn whitespace_after_query_selection() { +#[tokio::test] +async fn whitespace_after_query_selection() { let input = include_str!("hover/fixtures/whitespace_after_query_selection.graphql"); let expected = include_str!("hover/fixtures/whitespace_after_query_selection.expected"); - test_fixture(transform_fixture, "whitespace_after_query_selection.graphql", "hover/fixtures/whitespace_after_query_selection.expected", input, expected); + test_fixture(transform_fixture, file!(), "whitespace_after_query_selection.graphql", "hover/fixtures/whitespace_after_query_selection.expected", input, expected).await; } -#[test] -fn whitespace_within_linked_field_selection() { +#[tokio::test] +async fn whitespace_within_linked_field_selection() { let input = include_str!("hover/fixtures/whitespace_within_linked_field_selection.graphql"); let expected = include_str!("hover/fixtures/whitespace_within_linked_field_selection.expected"); - test_fixture(transform_fixture, "whitespace_within_linked_field_selection.graphql", "hover/fixtures/whitespace_within_linked_field_selection.expected", input, expected); + test_fixture(transform_fixture, file!(), "whitespace_within_linked_field_selection.graphql", "hover/fixtures/whitespace_within_linked_field_selection.expected", input, expected).await; } diff --git a/compiler/crates/relay-schema-generation/Cargo.toml b/compiler/crates/relay-schema-generation/Cargo.toml new file mode 100644 index 0000000000000..18c91be0741a5 --- /dev/null +++ b/compiler/crates/relay-schema-generation/Cargo.toml @@ -0,0 +1,43 @@ +# @generated by autocargo from //relay/oss/crates/relay-schema-generation:[relay-schema-generation,relay-schema-generation_docblock_test,relay-schema-generation_extract_test] + +[package] +name = "relay-schema-generation" +version = "0.0.0" +authors = ["Facebook"] +edition = "2021" +repository = "https://github.com/facebook/relay" +license = "MIT" + +[[test]] +name = "relay_schema_generation_docblock_test" +path = "tests/docblock_test.rs" + +[[test]] +name = "relay_schema_generation_extract_test" +path = "tests/extract_test.rs" + +[dependencies] +common = { path = "../common" } +docblock-shared = { path = "../docblock-shared" } +docblock-syntax = { path = "../docblock-syntax" } +errors = { path = "../errors" } +graphql-ir = { path = "../graphql-ir" } +graphql-syntax = { path = "../graphql-syntax" } +hermes_comments = { git = "https://github.com/facebook/hermes.git" } +hermes_estree = { git = "https://github.com/facebook/hermes.git" } +hermes_parser = { git = "https://github.com/facebook/hermes.git" } +intern = { path = "../intern" } +relay-docblock = { path = "../relay-docblock" } +rustc-hash = "1.1.0" +schema-extractor = { path = "../schema-extractor" } +serde = { version = "1.0.185", features = ["derive", "rc"] } +thiserror = "1.0.49" + +[dev-dependencies] +extract-graphql = { path = "../extract-graphql" } +fixture-tests = { path = "../fixture-tests" } +graphql-cli = { path = "../graphql-cli" } +graphql-test-helpers = { path = "../graphql-test-helpers" } +relay-config = { path = "../relay-config" } +relay-test-schema = { path = "../relay-test-schema" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/relay-schema-generation/src/errors.rs b/compiler/crates/relay-schema-generation/src/errors.rs new file mode 100644 index 0000000000000..d7e15549f56de --- /dev/null +++ b/compiler/crates/relay-schema-generation/src/errors.rs @@ -0,0 +1,84 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use intern::string_key::StringKey; +use thiserror::Error; + +use crate::JSImportType; + +#[derive( + Clone, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +pub enum SchemaGenerationError { + #[error( + "Can't find flow type definition for `{name}`. Expected the type to be imported from another module, or exported from the current module" + )] + ExpectedFlowDefinitionForType { name: StringKey }, + #[error("Expected import source to be a string literal")] + ExpectedStringLiteralSource, + #[error("Generic types not supported")] + GenericNotSupported, + #[error("Object types not supported")] + ObjectNotSupported, + #[error("{name} is not supported")] + UnsupportedType { name: &'static str }, + #[error("Type aliases in Relay resolvers are expected to be object types")] + ExpectedTypeAliasToBeObject, + #[error("Expected object definition to include fields")] + ExpectedWeakObjectToHaveFields, + #[error("@RelayResolver annotation is expected to be on a named export")] + ExpectedNamedExport, + #[error("@RelayResolver annotation is expected to be on a named function or type alias")] + ExpectedFunctionOrTypeAlias, + #[error( + "Types used in @RelayResolver definitions should be imported using named or default imports (without using a `*`)" + )] + UseNamedOrDefaultImport, + #[error( + "Failed to find type definition for `{entity_name}` using a {export_type} import from module `{module_name}`. Please make sure `{entity_name}` is imported using a named or default import and that it is a resolver type" + )] + ModuleNotFound { + entity_name: StringKey, + export_type: JSImportType, + module_name: StringKey, + }, + #[error("Not yet implemented")] + TODO, + + #[error("Expected the function name to exist")] + MissingFunctionName, + #[error("Expected the function return type to exist")] + MissingReturnType, + #[error("Expected to have at least one function parameter")] + MissingFunctionParam, + #[error("Expected Relay Resolver function param to include type annotation")] + MissingParamType, + #[error("Cannot use a LiveState that is also optional")] + NoOptionalLiveType, + #[error("Unsupported generic: `{name}`")] + UnSupportedGeneric { name: StringKey }, + #[error( + "Expected resolver arguments to be in the second function argument and in format of `args: {{field1: value1, field2: value2}}`" + )] + IncorrectArgumentsDefinition, + #[error( + "Multiple docblock descriptions found for this @RelayResolver. Please only include one description (a comment in the docblock uninterrupted by a resolver \"@\")" + )] + MultipleDocblockDescriptions, + #[error( + "A nullable strong type is provided, please make the type non-nullable. The type can't be nullable in the runtime." + )] + UnexpectedNullableStrongType, +} diff --git a/compiler/crates/relay-schema-generation/src/find_resolver_imports.rs b/compiler/crates/relay-schema-generation/src/find_resolver_imports.rs new file mode 100644 index 0000000000000..c6dc7843928c0 --- /dev/null +++ b/compiler/crates/relay-schema-generation/src/find_resolver_imports.rs @@ -0,0 +1,114 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +#![deny(warnings)] +#![deny(clippy::all)] + +use ::intern::string_key::Intern; +use ::intern::string_key::StringKey; +use common::Diagnostic; +use common::DiagnosticsResult; +use common::Location; +use common::SourceLocationKey; +use hermes_estree::ImportDeclarationSpecifier; +use hermes_estree::Visitor; +use hermes_estree::_Literal; +use rustc_hash::FxHashMap; +use rustc_hash::FxHashSet; + +use crate::to_location; +use crate::JSImportType; +use crate::ModuleResolutionKey; +use crate::SchemaGenerationError; +pub type Imports = FxHashMap; +pub struct ImportExportVisitor { + imports: Imports, + exports: FxHashSet, + errors: Vec, + location: SourceLocationKey, +} + +impl ImportExportVisitor { + pub fn new(location: SourceLocationKey) -> Self { + Self { + location, + imports: Default::default(), + exports: Default::default(), + errors: vec![], + } + } + + /// Returns imports: a map of local name => module key, and exports: names + pub fn get_all( + mut self, + ast: &'_ hermes_estree::Program, + ) -> DiagnosticsResult<(Imports, FxHashSet)> { + self.visit_program(&ast); + if !self.errors.is_empty() { + Err(self.errors) + } else { + Ok((self.imports, self.exports)) + } + } +} + +impl Visitor<'_> for ImportExportVisitor { + fn visit_import_declaration(&mut self, ast: &'_ hermes_estree::ImportDeclaration) { + let location = to_location(self.location, &ast.source); + let source = match &ast.source { + _Literal::StringLiteral(node) => (&node.value).intern(), + _ => { + self.errors.push(Diagnostic::error( + SchemaGenerationError::ExpectedStringLiteralSource, + location, + )); + return; + } + }; + + self.imports + .extend(ast.specifiers.iter().map(|specifier| match specifier { + ImportDeclarationSpecifier::ImportDefaultSpecifier(node) => ( + (&node.local.name).intern(), + ( + ModuleResolutionKey { + module_name: source, + import_type: JSImportType::Default, + }, + to_location(self.location, &node.local), + ), + ), + ImportDeclarationSpecifier::ImportSpecifier(node) => ( + (&node.local.name).intern(), + ( + ModuleResolutionKey { + module_name: source, + import_type: JSImportType::Named((&node.imported.name).intern()), + }, + to_location(self.location, &node.local), + ), + ), + ImportDeclarationSpecifier::ImportNamespaceSpecifier(node) => ( + (&node.local.name).intern(), + ( + ModuleResolutionKey { + module_name: source, + import_type: JSImportType::Namespace, + }, + to_location(self.location, &node.local), + ), + ), + })); + } + + fn visit_export_named_declaration(&mut self, ast: &'_ hermes_estree::ExportNamedDeclaration) { + if let Some(hermes_estree::Declaration::TypeAlias(node)) = &ast.declaration { + let name = (&node.id.name).intern(); + self.exports.insert(name); + } + } +} diff --git a/compiler/crates/relay-schema-generation/src/lib.rs b/compiler/crates/relay-schema-generation/src/lib.rs new file mode 100644 index 0000000000000..aa102ab75ed6a --- /dev/null +++ b/compiler/crates/relay-schema-generation/src/lib.rs @@ -0,0 +1,1129 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +#![deny(warnings)] +#![deny(rust_2018_idioms)] +#![deny(clippy::all)] + +mod errors; +mod find_resolver_imports; + +use std::fmt; +use std::path::Path; +use std::str::FromStr; + +use ::errors::try_all; +use ::intern::intern; +use ::intern::string_key::Intern; +use ::intern::string_key::StringKey; +use ::intern::Lookup; +use common::Diagnostic; +use common::DiagnosticsResult; +use common::Location; +use common::SourceLocationKey; +use common::Span; +use common::WithLocation; +use docblock_shared::ResolverSourceHash; +use docblock_syntax::parse_docblock; +use docblock_syntax::DocblockAST; +use docblock_syntax::DocblockSection; +use errors::SchemaGenerationError; +use find_resolver_imports::ImportExportVisitor; +use find_resolver_imports::Imports; +use graphql_ir::FragmentDefinitionName; +use graphql_syntax::ExecutableDefinition; +use graphql_syntax::FieldDefinition; +use graphql_syntax::Identifier; +use graphql_syntax::InputValueDefinition; +use graphql_syntax::List; +use graphql_syntax::ListTypeAnnotation; +use graphql_syntax::NamedTypeAnnotation; +use graphql_syntax::NonNullTypeAnnotation; +use graphql_syntax::StringNode; +use graphql_syntax::Token; +use graphql_syntax::TokenKind; +use graphql_syntax::TypeAnnotation; +use hermes_comments::find_nodes_after_comments; +use hermes_estree::Declaration; +use hermes_estree::FlowTypeAnnotation; +use hermes_estree::Function; +use hermes_estree::Introspection; +use hermes_estree::Node; +use hermes_estree::ObjectTypePropertyKey; +use hermes_estree::ObjectTypePropertyType; +use hermes_estree::Pattern; +use hermes_estree::Range; +use hermes_estree::SourceRange; +use hermes_estree::TypeAlias; +use hermes_estree::TypeAnnotationEnum; +use hermes_parser::parse; +use hermes_parser::ParseResult; +use hermes_parser::ParserDialect; +use hermes_parser::ParserFlags; +use relay_docblock::DocblockIr; +use relay_docblock::ResolverTypeDocblockIr; +use relay_docblock::StrongObjectIr; +use relay_docblock::TerseRelayResolverIr; +use relay_docblock::UnpopulatedIrField; +use relay_docblock::WeakObjectIr; +use rustc_hash::FxHashMap; +use rustc_hash::FxHashSet; +use schema_extractor::SchemaExtractor; + +pub static LIVE_FLOW_TYPE_NAME: &str = "LiveState"; + +/** + * Reprensents a subset of supported Flow type definitions + */ +#[derive(Debug)] +pub enum ResolverFlowData { + Strong(FieldData), // strong object or field on an object + Weak(WeakObjectData), +} + +#[derive(Debug)] +pub struct FieldData { + pub field_name: WithLocation, + pub return_type: FlowTypeAnnotation, + pub entity_type: FlowTypeAnnotation, + pub arguments: Option, + pub is_live: Option, +} + +#[derive(Debug)] +pub struct WeakObjectData { + pub field_name: WithLocation, + pub type_alias: FlowTypeAnnotation, +} + +pub struct RelayResolverExtractor { + /// Cross module states + type_definitions: FxHashMap, + unresolved_field_definitions: Vec<(ModuleResolutionKey, UnresolvedFieldDefinition)>, + resolved_field_definitions: Vec, + + // Needs to keep track of source location because hermes_parser currently + // does not embed the information + current_location: SourceLocationKey, +} + +#[derive( + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Debug, + Hash, + Copy, + serde::Serialize +)] +pub enum JSImportType { + Default, + Named(StringKey), + // Note that namespace imports cannot be used for resolver types. Anything namespace + // imported should be a "Named" import instead + Namespace, +} +impl fmt::Display for JSImportType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + JSImportType::Default => write!(f, "default"), + JSImportType::Namespace => write!(f, "namespace"), + JSImportType::Named(_) => write!(f, "named"), + } + } +} + +#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)] +struct ModuleResolutionKey { + module_name: StringKey, + import_type: JSImportType, +} + +struct UnresolvedFieldDefinition { + entity_name: WithLocation, + field_name: WithLocation, + return_type: FlowTypeAnnotation, + arguments: Option, + source_hash: ResolverSourceHash, + is_live: Option, + description: Option>, +} + +impl Default for RelayResolverExtractor { + fn default() -> Self { + Self::new() + } +} + +impl RelayResolverExtractor { + pub fn new() -> Self { + Self { + type_definitions: Default::default(), + unresolved_field_definitions: Default::default(), + resolved_field_definitions: vec![], + current_location: SourceLocationKey::generated(), + } + } + + /// First pass to extract all object definitions and field definitions + pub fn parse_document( + &mut self, + text: &str, + source_module_path: &str, + fragment_definitions: &Option<&Vec>, + ) -> DiagnosticsResult<()> { + // Assume the caller knows the text contains at least one RelayResolver decorator + + self.current_location = SourceLocationKey::standalone(source_module_path); + let source_hash = ResolverSourceHash::new(text); + let ParseResult { ast, comments } = parse( + text, + "", // Not used in hermes_parser + ParserFlags { + strict_mode: true, + enable_jsx: true, + dialect: ParserDialect::Flow, + store_doc_block: false, + store_comments: true, + }, + ) + .map_err(|errs| { + errs.into_iter() + .map(|err| { + let source_span = err.span(); + Diagnostic::error( + err.into_message(), + Location::new( + self.current_location, + Span::new( + source_span.offset().try_into().unwrap(), + (source_span.offset() + source_span.len()) + .try_into() + .unwrap(), + ), + ), + ) + }) + .collect::>() + })?; + + let import_export_visitor = ImportExportVisitor::new(self.current_location); + let (imports, exports) = import_export_visitor.get_all(&ast)?; + + let attached_comments = find_nodes_after_comments(&ast, &comments); + + try_all( + attached_comments + .into_iter() + .filter(|(comment, _, _, _)| comment.contains("@RelayResolver")) + .map(|(comment, comment_range, node, range)| { + // TODO: Handle unwraps + let docblock = parse_docblock(comment, self.current_location)?; + let resolver_value = docblock.find_field(intern!("RelayResolver")).unwrap(); + let description = get_description(&docblock, comment_range)?; + + match self.extract_graphql_types(&node, range)? { + ResolverFlowData::Strong(FieldData { + field_name, + return_type, + entity_type, + arguments, + is_live, + }) => { + let name = resolver_value.field_value.unwrap_or(field_name); + + // Heuristic to treat lowercase name as field definition, otherwise object definition + // if there is a `.` in the name, it is the old resolver synatx, e.g. @RelayResolver Client.field, + // we should treat it as a field definition + let is_field_definition = { + let name_str = name.item.lookup(); + let is_lowercase_initial = + name_str.chars().next().unwrap().is_lowercase(); + is_lowercase_initial || name_str.contains('.') + }; + if is_field_definition { + let entity_name = self.extract_entity_name(entity_type)?; + self.add_field_definition( + &imports, + &exports, + source_module_path, + fragment_definitions, + UnresolvedFieldDefinition { + entity_name, + field_name: name, + return_type, + arguments, + source_hash, + is_live, + description, + }, + )? + } else { + self.add_type_definition( + &imports, + &exports, + source_module_path, + name, + return_type, + source_hash, + is_live, + description, + )? + } + } + ResolverFlowData::Weak(WeakObjectData { + field_name, + type_alias, + }) => { + let name = resolver_value.field_value.unwrap_or(field_name); + self.add_weak_type_definition( + name, + type_alias, + source_hash, + source_module_path, + description, + false, + )? + } + } + Ok(()) + }), + )?; + Ok(()) + } + + /// Second pass to resolve all field definitions + pub fn resolve(mut self) -> DiagnosticsResult<(Vec, Vec)> { + try_all( + self.unresolved_field_definitions + .into_iter() + .map(|(key, field)| { + let entity = match self.type_definitions.get(&key) { + Some(DocblockIr::Type(ResolverTypeDocblockIr::StrongObjectResolver( + object, + ))) => Ok(object + .type_name + .name_with_location(SourceLocationKey::Generated)), + Some(DocblockIr::Type(ResolverTypeDocblockIr::WeakObjectType(object))) => { + Ok(object + .type_name + .name_with_location(SourceLocationKey::Generated)) + } + _ => Err(vec![Diagnostic::error( + SchemaGenerationError::ModuleNotFound { + entity_name: field.entity_name.item, + export_type: key.import_type, + module_name: key.module_name, + }, + field.entity_name.location, + )]), + }?; + let arguments = if let Some(args) = field.arguments { + Some(flow_type_to_field_arguments(self.current_location, &args)?) + } else { + None + }; + let description_node = field.description.map(|desc| StringNode { + token: Token { + span: desc.location.span(), + kind: TokenKind::Empty, + }, + value: desc.item, + }); + let field_definition = FieldDefinition { + name: string_key_to_identifier(field.field_name), + type_: return_type_to_type_annotation( + self.current_location, + &field.return_type, + )?, + arguments, + directives: vec![], + description: description_node, + hack_source: None, + span: field.field_name.location.span(), + }; + let live = field + .is_live + .map(|loc| UnpopulatedIrField { key_location: loc }); + self.resolved_field_definitions.push(TerseRelayResolverIr { + field: field_definition, + type_: entity, + root_fragment: None, + location: field.field_name.location, + deprecated: None, + live, + fragment_arguments: None, + source_hash: field.source_hash, + semantic_non_null: None, + }); + Ok(()) + }), + )?; + Ok(( + self.type_definitions.into_values().collect(), + self.resolved_field_definitions, + )) + } + + fn add_field_definition( + &mut self, + imports: &Imports, + exports: &FxHashSet, + source_module_path: &str, + fragment_definitions: &Option<&Vec>, + field_definition: UnresolvedFieldDefinition, + ) -> DiagnosticsResult<()> { + let name = field_definition.entity_name.item; + let key = if let Some(res) = imports.get(&name) { + res.0.clone() + } else if exports.contains(&name) { + let haste_module_name = Path::new(source_module_path) + .file_stem() + .unwrap() + .to_str() + .unwrap(); + ModuleResolutionKey { + module_name: haste_module_name.intern(), + import_type: JSImportType::Named(name), + } + } else { + return Err(vec![Diagnostic::error( + SchemaGenerationError::ExpectedFlowDefinitionForType { name }, + field_definition.entity_name.location, + )]); + }; + + if key.module_name.lookup().ends_with(".graphql") && name.lookup().ends_with("$key") { + self.add_fragment_field_definition(fragment_definitions, field_definition)? + } else { + self.unresolved_field_definitions + .push((key, field_definition)); + } + Ok(()) + } + + fn add_fragment_field_definition( + &mut self, + fragment_definitions: &Option<&Vec>, + field: UnresolvedFieldDefinition, + ) -> DiagnosticsResult<()> { + let arguments = if let Some(args) = field.arguments { + Some(flow_type_to_field_arguments(self.current_location, &args)?) + } else { + None + }; + let description_node = field.description.map(|desc| StringNode { + token: Token { + span: desc.location.span(), + kind: TokenKind::Empty, + }, + value: desc.item, + }); + let field_definition = FieldDefinition { + name: string_key_to_identifier(field.field_name), + type_: return_type_to_type_annotation(self.current_location, &field.return_type)?, + arguments, + directives: vec![], + description: description_node, + hack_source: None, + span: field.field_name.location.span(), + }; + let fragment_name = field + .entity_name + .item + .lookup() + .strip_suffix("$key") + .unwrap(); + let fragment_definition_result = relay_docblock::assert_fragment_definition( + field.entity_name, + fragment_name.intern(), + *fragment_definitions, + ); + let fragment_definition = fragment_definition_result.map_err(|err| vec![err])?; + + let fragment_type_condition = WithLocation::from_span( + fragment_definition.location.source_location(), + fragment_definition.type_condition.span, + fragment_definition.type_condition.type_.value, + ); + let live = field + .is_live + .map(|loc| UnpopulatedIrField { key_location: loc }); + self.resolved_field_definitions.push(TerseRelayResolverIr { + field: field_definition, + type_: fragment_type_condition, + root_fragment: Some(field.entity_name.map(FragmentDefinitionName)), // this includes the $key + location: field.field_name.location, + deprecated: None, + live, + fragment_arguments: None, // We don't support arguments right now + source_hash: field.source_hash, + semantic_non_null: None, + }); + Ok(()) + } + + fn add_type_definition( + &mut self, + imports: &Imports, + exports: &FxHashSet, + source_module_path: &str, + name: WithLocation, + mut return_type: FlowTypeAnnotation, + source_hash: ResolverSourceHash, + is_live: Option, + description: Option>, + ) -> DiagnosticsResult<()> { + let strong_object = StrongObjectIr { + type_name: string_key_to_identifier(name), + rhs_location: name.location, + root_fragment: WithLocation::new( + name.location, + FragmentDefinitionName(format!("{}__id", name.item).intern()), + ), + description, + deprecated: None, + live: is_live.map(|loc| UnpopulatedIrField { key_location: loc }), + location: name.location, + implements_interfaces: vec![], + source_hash, + semantic_non_null: None, + }; + + // We ignore nullable annotation since both nullable and non-nullable types are okay for + // defining a strong object + return_type = if let FlowTypeAnnotation::NullableTypeAnnotation(return_type) = return_type { + return_type.type_annotation + } else { + return_type + }; + // For now, we assume the flow type for the strong object is always imported + // from a separate file + match return_type { + FlowTypeAnnotation::GenericTypeAnnotation(generic_type) => { + let name = schema_extractor::get_identifier_for_flow_generic(WithLocation { + item: generic_type.as_ref(), + location: self.to_location(generic_type.as_ref()), + })?; + if generic_type.type_parameters.is_some() { + return Err(vec![Diagnostic::error( + SchemaGenerationError::GenericNotSupported, + name.location, + )]); + } + + let key = if let Some((key, import_location)) = imports.get(&name.item) { + if let JSImportType::Namespace = key.import_type { + return Err(vec![ + Diagnostic::error( + SchemaGenerationError::UseNamedOrDefaultImport, + name.location, + ) + .annotate(format!("{} is imported from", name.item), *import_location), + ]); + }; + key.clone() + } else if exports.contains(&name.item) { + let haste_module_name = Path::new(source_module_path) + .file_stem() + .unwrap() + .to_str() + .unwrap(); + ModuleResolutionKey { + module_name: haste_module_name.intern(), + import_type: JSImportType::Named(name.item), + } + } else { + return Err(vec![Diagnostic::error( + SchemaGenerationError::ExpectedFlowDefinitionForType { name: name.item }, + name.location, + )]); + }; + + self.type_definitions.insert( + key.clone(), + DocblockIr::Type(ResolverTypeDocblockIr::StrongObjectResolver(strong_object)), + ); + + Ok(()) + } + FlowTypeAnnotation::ObjectTypeAnnotation(object_type) => Err(vec![Diagnostic::error( + SchemaGenerationError::ObjectNotSupported, + self.to_location(object_type.as_ref()), + )]), + _ => self.error_result( + SchemaGenerationError::UnsupportedType { + name: return_type.name(), + }, + &return_type, + ), + } + } + + fn add_weak_type_definition( + &mut self, + name: WithLocation, + type_alias: FlowTypeAnnotation, + source_hash: ResolverSourceHash, + source_module_path: &str, + description: Option>, + should_generate_fields: bool, + ) -> DiagnosticsResult<()> { + let weak_object = WeakObjectIr { + type_name: string_key_to_identifier(name), + rhs_location: name.location, + description, + hack_source: None, + deprecated: None, + location: name.location, + implements_interfaces: vec![], + source_hash, + }; + let haste_module_name = Path::new(source_module_path) + .file_stem() + .unwrap() + .to_str() + .unwrap(); + let key = ModuleResolutionKey { + module_name: haste_module_name.intern(), + import_type: JSImportType::Named(name.item), + }; + + // TODO: this generates the IR but not the runtime JS + if should_generate_fields { + if let FlowTypeAnnotation::ObjectTypeAnnotation(object_node) = type_alias { + let field_map = self.get_object_fields(&object_node)?; + if !field_map.is_empty() { + try_all(field_map.into_iter().map(|(field_name, field_type)| { + let field_definition = FieldDefinition { + name: string_key_to_identifier(field_name), + type_: return_type_to_type_annotation( + self.current_location, + field_type, + )?, + arguments: None, + directives: vec![], + description: None, + hack_source: None, + span: field_name.location.span(), + }; + + self.resolved_field_definitions.push(TerseRelayResolverIr { + field: field_definition, + type_: weak_object + .type_name + .name_with_location(SourceLocationKey::Generated), + root_fragment: None, + location: field_name.location, + deprecated: None, + live: None, + fragment_arguments: None, + source_hash, + semantic_non_null: None, + }); + Ok(()) + }))?; + } else { + let location = self.to_location(object_node.as_ref()); + return Err(vec![Diagnostic::error( + SchemaGenerationError::ExpectedWeakObjectToHaveFields, + location, + )]); + } + } else { + return Err(vec![Diagnostic::error( + SchemaGenerationError::ExpectedTypeAliasToBeObject, + self.to_location(&type_alias), + )]); + } + } + + // Add weak object + self.type_definitions.insert( + key.clone(), + DocblockIr::Type(ResolverTypeDocblockIr::WeakObjectType(weak_object)), + ); + Ok(()) + } + + pub fn extract_function(&self, node: &Function) -> DiagnosticsResult { + let ident = node.id.as_ref().ok_or_else(|| { + Diagnostic::error( + SchemaGenerationError::MissingFunctionName, + self.to_location(node), + ) + })?; + let field_name = WithLocation { + item: (&ident.name).intern(), + location: self.to_location(ident), + }; + + let return_type_annotation = node.return_type.as_ref().ok_or_else(|| { + Diagnostic::error( + SchemaGenerationError::MissingReturnType, + self.to_location(node), + ) + })?; + let flow_return_type = self.unwrap_annotation_enum(return_type_annotation)?; + let (return_type_with_live, is_optional) = + schema_extractor::unwrap_nullable_type(flow_return_type); + + // unwrap is_live from the return type + let (return_type, is_live) = match return_type_with_live { + FlowTypeAnnotation::GenericTypeAnnotation(type_node) => { + let name = schema_extractor::get_identifier_for_flow_generic(WithLocation { + item: type_node, + location: self.to_location(type_node.as_ref()), + })?; + if let Some(type_param) = &type_node.type_parameters { + match type_param.params.as_slice() { + [param] => { + if name.item.lookup() == LIVE_FLOW_TYPE_NAME { + if is_optional { + return Err(vec![Diagnostic::error( + SchemaGenerationError::NoOptionalLiveType, + name.location, + )]); + } + (param, Some(name.location)) + } else { + (flow_return_type, None) + } + } + _ => { + // Does not support multiple type params for now + return self.error_result( + SchemaGenerationError::UnsupportedType { + name: "Multiple type params", + }, + type_node.as_ref(), + ); + } + } + } else { + (flow_return_type, None) + } + } + _ => (flow_return_type, None), + }; + + if node.params.is_empty() { + return self.error_result(SchemaGenerationError::MissingFunctionParam, node); + } + let param = &node.params[0]; + let entity_type = if let Pattern::Identifier(identifier) = param { + let type_annotation = identifier.type_annotation.as_ref().ok_or_else(|| { + Diagnostic::error( + SchemaGenerationError::MissingParamType, + self.to_location(param), + ) + })?; + if let TypeAnnotationEnum::FlowTypeAnnotation(type_) = &type_annotation.type_annotation + { + type_ + } else { + return self.error_result( + SchemaGenerationError::UnsupportedType { name: param.name() }, + param, + ); + } + } else { + return self.error_result( + SchemaGenerationError::UnsupportedType { name: param.name() }, + param, + ); + }; + + let arguments = if node.params.len() > 1 { + let arg_param = &node.params[1]; + let args = if let Pattern::Identifier(identifier) = arg_param { + let type_annotation = identifier.type_annotation.as_ref().ok_or_else(|| { + Diagnostic::error( + SchemaGenerationError::MissingParamType, + self.to_location(param), + ) + })?; + if let TypeAnnotationEnum::FlowTypeAnnotation(type_) = + &type_annotation.type_annotation + { + Some(type_) + } else { + None + } + } else { + None + }; + if args.is_none() { + return self.error_result( + SchemaGenerationError::IncorrectArgumentsDefinition, + arg_param, + ); + } + args + } else { + None + }; + + Ok(ResolverFlowData::Strong(FieldData { + field_name, + return_type: return_type.clone(), + entity_type: entity_type.clone(), + arguments: arguments.cloned(), + is_live, + })) + } + + fn extract_type_alias(&self, node: &TypeAlias) -> DiagnosticsResult { + let field_name = WithLocation { + item: (&node.id.name).intern(), + location: self.to_location(&node.id), + }; + Ok(WeakObjectData { + field_name, + type_alias: node.right.clone(), + }) + } + + fn extract_graphql_types( + &self, + node: &Node<'_>, + range: SourceRange, + ) -> DiagnosticsResult { + if let Node::ExportNamedDeclaration(node) = node { + match node.declaration { + Some(Declaration::FunctionDeclaration(ref node)) => { + self.extract_function(&node.function) + } + Some(Declaration::TypeAlias(ref node)) => { + let data = self.extract_type_alias(node)?; + Ok(ResolverFlowData::Weak(data)) + } + _ => Err(vec![Diagnostic::error( + SchemaGenerationError::ExpectedFunctionOrTypeAlias, + Location::new(self.current_location, Span::new(range.start, range.end)), + )]), + } + } else { + Err(vec![Diagnostic::error( + SchemaGenerationError::ExpectedNamedExport, + Location::new(self.current_location, Span::new(range.start, range.end)), + )]) + } + } + + fn extract_entity_name( + &self, + entity_type: FlowTypeAnnotation, + ) -> DiagnosticsResult> { + match entity_type { + FlowTypeAnnotation::NumberTypeAnnotation(annot) => Ok(WithLocation { + item: intern!("Float"), + location: self.to_location(annot.as_ref()), + }), + FlowTypeAnnotation::StringTypeAnnotation(annot) => Ok(WithLocation { + item: intern!("String"), + location: self.to_location(annot.as_ref()), + }), + FlowTypeAnnotation::GenericTypeAnnotation(annot) => { + let id = schema_extractor::get_identifier_for_flow_generic(WithLocation { + item: &annot, + location: self.to_location(annot.as_ref()), + })?; + if annot.type_parameters.is_some() { + return Err(vec![Diagnostic::error( + SchemaGenerationError::GenericNotSupported, + self.to_location(annot.as_ref()), + )]); + } + Ok(id) + } + FlowTypeAnnotation::NullableTypeAnnotation(annot) => Err(vec![Diagnostic::error( + SchemaGenerationError::UnexpectedNullableStrongType, + self.to_location(annot.as_ref()), + )]), + _ => Err(vec![Diagnostic::error( + SchemaGenerationError::UnsupportedType { + name: entity_type.name(), + }, + self.to_location(&entity_type), + )]), + } + } +} + +impl SchemaExtractor for RelayResolverExtractor { + fn to_location(&self, node: &T) -> Location { + to_location(self.current_location, node) + } +} + +fn to_location(source_location: SourceLocationKey, node: &T) -> Location { + let range = node.range(); + Location::new(source_location, Span::new(range.start, range.end)) +} + +fn string_key_to_identifier(name: WithLocation) -> Identifier { + Identifier { + span: name.location.span(), + token: Token { + span: name.location.span(), + kind: TokenKind::Identifier, + }, + value: name.item, + } +} + +fn return_type_to_type_annotation( + source_location: SourceLocationKey, + return_type: &FlowTypeAnnotation, +) -> DiagnosticsResult { + let (return_type, mut is_optional) = schema_extractor::unwrap_nullable_type(return_type); + let location = to_location(source_location, return_type); + let type_annotation = match return_type { + FlowTypeAnnotation::GenericTypeAnnotation(node) => { + let identifier = schema_extractor::get_identifier_for_flow_generic(WithLocation { + item: node, + location: to_location(source_location, node.as_ref()), + })?; + match &node.type_parameters { + None => TypeAnnotation::Named(NamedTypeAnnotation { + name: string_key_to_identifier(identifier), + }), + Some(type_parameters) if type_parameters.params.len() == 1 => { + let identifier_name = identifier.item.lookup(); + match identifier_name { + "Array" | "$ReadOnlyArray" => { + let param = &type_parameters.params[0]; + TypeAnnotation::List(Box::new(ListTypeAnnotation { + span: location.span(), + open: generated_token(), + type_: return_type_to_type_annotation(source_location, param)?, + close: generated_token(), + })) + } + "IdOf" => { + let param = &type_parameters.params[0]; + let location = to_location(source_location, param); + if let FlowTypeAnnotation::StringLiteralTypeAnnotation(node) = param { + TypeAnnotation::Named(NamedTypeAnnotation { + name: Identifier { + span: location.span(), + token: Token { + span: location.span(), + kind: TokenKind::Identifier, + }, + value: (&node.value).intern(), + }, + }) + } else { + return Err(vec![Diagnostic::error( + SchemaGenerationError::TODO, + location, + )]); + } + } + "RelayResolverValue" => { + // Special case for `RelayResolverValue`, it is always optional + is_optional = true; + TypeAnnotation::Named(NamedTypeAnnotation { + name: Identifier { + span: location.span(), + token: Token { + span: location.span(), + kind: TokenKind::Identifier, + }, + value: intern!("RelayResolverValue"), + }, + }) + } + _ => { + return Err(vec![Diagnostic::error( + SchemaGenerationError::UnSupportedGeneric { + name: identifier.item, + }, + location, + )]); + } + } + } + _ => { + return Err(vec![Diagnostic::error( + SchemaGenerationError::TODO, + location, + )]); + } + } + } + FlowTypeAnnotation::StringTypeAnnotation(node) => { + let identifier = WithLocation { + item: intern!("String"), + location: to_location(source_location, node.as_ref()), + }; + TypeAnnotation::Named(NamedTypeAnnotation { + name: string_key_to_identifier(identifier), + }) + } + FlowTypeAnnotation::NumberTypeAnnotation(node) => { + let identifier = WithLocation { + item: intern!("Float"), + location: to_location(source_location, node.as_ref()), + }; + TypeAnnotation::Named(NamedTypeAnnotation { + name: string_key_to_identifier(identifier), + }) + } + FlowTypeAnnotation::BooleanTypeAnnotation(node) => { + let identifier = WithLocation { + item: intern!("Boolean"), + location: to_location(source_location, node.as_ref()), + }; + TypeAnnotation::Named(NamedTypeAnnotation { + name: string_key_to_identifier(identifier), + }) + } + FlowTypeAnnotation::BooleanLiteralTypeAnnotation(node) => { + let identifier = WithLocation { + item: intern!("Boolean"), + location: to_location(source_location, node.as_ref()), + }; + TypeAnnotation::Named(NamedTypeAnnotation { + name: string_key_to_identifier(identifier), + }) + } + _ => { + return Err(vec![Diagnostic::error( + SchemaGenerationError::UnsupportedType { + name: return_type.name(), + }, + location, + )]); + } + }; + + if !is_optional { + let non_null_annotation = TypeAnnotation::NonNull(Box::new(NonNullTypeAnnotation { + span: location.span(), + type_: type_annotation, + exclamation: generated_token(), + })); + Ok(non_null_annotation) + } else { + Ok(type_annotation) + } +} + +fn flow_type_to_field_arguments( + source_location: SourceLocationKey, + args_type: &FlowTypeAnnotation, +) -> DiagnosticsResult> { + let obj = if let FlowTypeAnnotation::ObjectTypeAnnotation(type_) = &args_type { + // unwrap the ref then the box, then re-add the ref + type_ + } else { + return Err(vec![Diagnostic::error( + SchemaGenerationError::IncorrectArgumentsDefinition, + to_location(source_location, args_type), + )]); + }; + let mut items = vec![]; + for prop_type in obj.properties.iter() { + let prop_span = to_location(source_location, prop_type).span(); + if let ObjectTypePropertyType::ObjectTypeProperty(prop) = prop_type { + let ident = if let ObjectTypePropertyKey::Identifier(ident) = &prop.key { + ident + } else { + return Err(vec![Diagnostic::error( + SchemaGenerationError::IncorrectArgumentsDefinition, + to_location(source_location, &prop.key), + )]); + }; + + let ident_node: &hermes_estree::Identifier = ident; + let name_span = to_location(source_location, ident_node).span(); + let arg = InputValueDefinition { + name: graphql_syntax::Identifier { + span: name_span, + token: Token { + span: name_span, + kind: TokenKind::Identifier, + }, + value: StringKey::from_str(&ident.name).map_err(|_| { + vec![Diagnostic::error( + SchemaGenerationError::IncorrectArgumentsDefinition, + to_location(source_location, args_type), + )] + })?, + }, + type_: return_type_to_type_annotation(source_location, &prop.value)?, + default_value: None, + directives: vec![], + span: prop_span, + }; + items.push(arg); + } + } + + let list_start: u32 = args_type.range().start; + let list_end: u32 = args_type.range().end; + Ok(List { + items, + span: to_location(source_location, args_type).span(), + start: Token { + span: Span { + start: list_start, + end: list_start + 1, + }, + kind: TokenKind::OpenBrace, + }, + end: Token { + span: Span { + start: list_end - 1, + end: list_end, + }, + kind: TokenKind::CloseBrace, + }, + }) +} + +fn get_description( + docblock: &DocblockAST, + range: SourceRange, +) -> DiagnosticsResult>> { + let mut description = None; + for section in docblock.sections.iter() { + match section { + DocblockSection::Field(_) => (), + DocblockSection::FreeText(text) => { + let location = Location::new( + text.location.source_location(), + Span::new(range.start, range.end), + ); + if description.is_none() { + description = Some(WithLocation { + location, + item: text.item, + }) + } else { + return Err(vec![Diagnostic::error( + SchemaGenerationError::MultipleDocblockDescriptions, + location, + )]); + } + } + } + } + Ok(description) +} + +fn generated_token() -> Token { + Token { + span: Span::empty(), + kind: TokenKind::Empty, + } +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock.rs b/compiler/crates/relay-schema-generation/tests/docblock.rs new file mode 100644 index 0000000000000..a0bad7fa0da3c --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock.rs @@ -0,0 +1,137 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::path::Path; +use std::sync::Arc; + +use common::Diagnostic; +use common::SourceLocationKey; +use common::TextSource; +use extract_graphql::JavaScriptSourceFeature; +use fixture_tests::Fixture; +use graphql_cli::DiagnosticPrinter; +use graphql_syntax::ExecutableDefinition; +use graphql_test_helpers::ProjectFixture; +use intern::Lookup; +use relay_config::ProjectName; +use relay_docblock::extend_schema_with_resolver_type_system_definition; +use relay_docblock::DocblockIr; +use relay_docblock::ResolverFieldDocblockIr; +use relay_schema_generation::RelayResolverExtractor; +use relay_test_schema::get_test_schema_with_extensions; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let mut extractor = RelayResolverExtractor::new(); + + let project_name = ProjectName::default(); + let mut schema = get_test_schema_with_extensions(""); + + let mut errors: Vec = vec![]; + let project_fixture = ProjectFixture::deserialize(fixture.content); + + project_fixture.files().iter().for_each(|(path, content)| { + let gql_operations = parse_document_definitions(content, path); + if let Err(err) = extractor.parse_document( + content, + path.to_string_lossy().as_ref(), + &Some(&gql_operations), + ) { + errors.extend(err); + } + }); + + let out = match extractor.resolve() { + Ok((objects, fields)) => objects + .into_iter() + .chain( + fields.into_iter().map(|field| { + DocblockIr::Field(ResolverFieldDocblockIr::TerseRelayResolver(field)) + }), + ) + .map(|ir| { + // Extend schema with the IR and print SDL + let schema_document = + ir.clone() + .to_graphql_schema_ast(project_name, &schema, &Default::default())?; + for definition in &schema_document.definitions { + extend_schema_with_resolver_type_system_definition( + definition.clone(), + Arc::get_mut(&mut schema) + .expect("Expected to be able to get mutable reference to schema"), + schema_document.location, + )?; + } + + let sdl = ir + .clone() + .to_sdl_string(project_name, &schema, &Default::default())?; + + Ok(format!("{:#?}\n{}", &ir, sdl)) + }) + .collect::>>>(), + Err(err) => { + errors.extend(err); + Default::default() + } + }; + + let mut ok_out = vec![]; + + for o in out.into_iter() { + match o { + Err(errs) => { + errors.extend(errs); + } + Ok(o) => { + ok_out.push(o); + } + } + } + + let err = diagnostics_to_sorted_string(&project_fixture, &errors); + + ok_out.sort(); + Ok(ok_out.join("\n\n") + "\n\n" + &err) +} + +fn parse_document_definitions(content: &str, path: &Path) -> Vec { + let features = extract_graphql::extract(content); + features + .into_iter() + .filter_map(|feature| { + if let JavaScriptSourceFeature::GraphQL(graphql_source) = feature { + Some(graphql_source.to_text_source().text) + } else { + None + } + }) + .flat_map(|query_text| { + graphql_syntax::parse_executable( + &query_text, + SourceLocationKey::standalone(path.to_str().unwrap()), + ) + .unwrap() + .definitions + }) + .collect() +} + +fn diagnostics_to_sorted_string(fixtures: &ProjectFixture, diagnostics: &[Diagnostic]) -> String { + let printer = DiagnosticPrinter::new(|source_location| match source_location { + SourceLocationKey::Standalone { path } => { + let source = fixtures.files().get(Path::new(path.lookup())).unwrap(); + Some(TextSource::from_whole_document(source)) + } + SourceLocationKey::Embedded { .. } | SourceLocationKey::Generated => unreachable!(), + }); + let mut printed = diagnostics + .iter() + .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) + .collect::>(); + printed.sort(); + printed.join("\n\n") +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/arguments.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/arguments.expected new file mode 100644 index 0000000000000..3e2f47aba1975 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/arguments.expected @@ -0,0 +1,291 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type Page from 'Page'; +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType {} + +/** + * @RelayResolver + */ +export function favorite_page(cat: CatFlowType, args: {id: ID}): Page {} + +/** + * @RelayResolver + */ +export function all_pages(cat: CatFlowType, args: {limit: ?number}): Array {} +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 377:390, + token: Token { + span: 377:390, + kind: Identifier, + }, + value: "favorite_page", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 426:430, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 426:430, + token: Token { + span: 426:430, + kind: Identifier, + }, + value: "Page", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: Some( + List { + span: 415:423, + start: Token { + span: 415:416, + kind: OpenBrace, + }, + items: [ + InputValueDefinition { + name: Identifier { + span: 416:418, + token: Token { + span: 416:418, + kind: Identifier, + }, + value: "id", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 420:422, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 420:422, + token: Token { + span: 420:422, + kind: Identifier, + }, + value: "ID", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + default_value: None, + directives: [], + span: 416:422, + }, + ], + end: Token { + span: 422:423, + kind: CloseBrace, + }, + }, + ), + directives: [], + description: None, + hack_source: None, + span: 377:390, + }, + type_: WithLocation { + location: :302:305, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:377:390, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "e641f1651df716303d2bc5e2ac33e799", + ), + }, + ), +) +extend type Cat { + favorite_page(id: ID!): Page! @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", import_name: "favorite_page", import_path: "module.js") @resolver_source_hash(value: "e641f1651df716303d2bc5e2ac33e799") +} + + +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 477:486, + token: Token { + span: 477:486, + kind: Identifier, + }, + value: "all_pages", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 530:541, + type_: List( + ListTypeAnnotation { + span: 530:541, + open: Token { + span: 0:0, + kind: Empty, + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 536:540, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 536:540, + token: Token { + span: 536:540, + kind: Identifier, + }, + value: "Page", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + close: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: Some( + List { + span: 511:527, + start: Token { + span: 511:512, + kind: OpenBrace, + }, + items: [ + InputValueDefinition { + name: Identifier { + span: 512:517, + token: Token { + span: 512:517, + kind: Identifier, + }, + value: "limit", + }, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 520:526, + token: Token { + span: 520:526, + kind: Identifier, + }, + value: "Float", + }, + }, + ), + default_value: None, + directives: [], + span: 512:526, + }, + ], + end: Token { + span: 526:527, + kind: CloseBrace, + }, + }, + ), + directives: [], + description: None, + hack_source: None, + span: 477:486, + }, + type_: WithLocation { + location: :302:305, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:477:486, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "e641f1651df716303d2bc5e2ac33e799", + ), + }, + ), +) +extend type Cat { + all_pages(limit: Float): [Page!]! @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", import_name: "all_pages", import_path: "module.js") @resolver_source_hash(value: "e641f1651df716303d2bc5e2ac33e799") +} + + +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 302:305, + token: Token { + span: 302:305, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: module.js:302:305, + root_fragment: WithLocation { + location: module.js:302:305, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: module.js:302:305, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "e641f1651df716303d2bc5e2ac33e799", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "module.js", inject_fragment_data: "id") @resolver_source_hash(value: "e641f1651df716303d2bc5e2ac33e799") @unselectable(reason: "This field is intended only for Relay's internal use") +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/arguments.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/arguments.input new file mode 100644 index 0000000000000..490f5b4bedcee --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/arguments.input @@ -0,0 +1,26 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type Page from 'Page'; +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType {} + +/** + * @RelayResolver + */ +export function favorite_page(cat: CatFlowType, args: {id: ID}): Page {} + +/** + * @RelayResolver + */ +export function all_pages(cat: CatFlowType, args: {limit: ?number}): Array {} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/description.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/description.expected new file mode 100644 index 0000000000000..ad3634bd3a434 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/description.expected @@ -0,0 +1,239 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType {} + +/** + * @RelayResolver + * + * This is the age of the cat + * in years + */ +export function age(cat: CatFlowType): number {} + +/** + * @RelayResolver fullname + * + * This is the fullname of the cat + */ +export function name(cat: CatFlowType): string {} + +/** + * @RelayResolver + * + * This is the length of the cat. + * + * @dummyField + * + * This extra descriptive line should error. + */ +export function length(cat: CatFlowType): number {} +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 20:28, + token: Token { + span: 20:28, + kind: Identifier, + }, + value: "fullname", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 539:545, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 539:545, + token: Token { + span: 539:545, + kind: Identifier, + }, + value: "String", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: Some( + StringNode { + token: Token { + span: 426:498, + kind: Empty, + }, + value: "\nThis is the fullname of the cat", + }, + ), + hack_source: None, + span: 20:28, + }, + type_: WithLocation { + location: :272:275, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:20:28, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "35158d341ad2013ce52f60be49082ec8", + ), + }, + ), +) +extend type Cat { + fullname: String! @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "fullname", import_path: "module.js") @resolver_source_hash(value: "35158d341ad2013ce52f60be49082ec8") +} + + +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 392:395, + token: Token { + span: 392:395, + kind: Identifier, + }, + value: "age", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 415:421, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 415:421, + token: Token { + span: 415:421, + kind: Identifier, + }, + value: "Float", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: Some( + StringNode { + token: Token { + span: 305:375, + kind: Empty, + }, + value: "\nThis is the age of the cat\nin years", + }, + ), + hack_source: None, + span: 392:395, + }, + type_: WithLocation { + location: :272:275, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:392:395, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "35158d341ad2013ce52f60be49082ec8", + ), + }, + ), +) +extend type Cat { + age: Float! @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "age", import_path: "module.js") @resolver_source_hash(value: "35158d341ad2013ce52f60be49082ec8") +} + + +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 272:275, + token: Token { + span: 272:275, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: module.js:272:275, + root_fragment: WithLocation { + location: module.js:272:275, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: module.js:272:275, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "35158d341ad2013ce52f60be49082ec8", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "module.js", inject_fragment_data: "id") @resolver_source_hash(value: "35158d341ad2013ce52f60be49082ec8") @unselectable(reason: "This field is intended only for Relay's internal use") +} + + +✖︎ Multiple docblock descriptions found for this @RelayResolver. Please only include one description (a comment in the docblock uninterrupted by a resolver "@") + + module.js:31:1 + 29 │ export function name(cat: CatFlowType): string {} + 30 │ + 31 │ /** + │ ^^^ + 32 │ * @RelayResolver + │ ^^^^^^^^^^^^^^^^^ + 33 │ * + │ ^^ + 34 │ * This is the length of the cat. + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 35 │ * + │ ^^ + 36 │ * @dummyField + │ ^^^^^^^^^^^^^^ + 37 │ * + │ ^^ + 38 │ * This extra descriptive line should error. + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 39 │ */ + │ ^^^ + 40 │ export function length(cat: CatFlowType): number {} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/description.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/description.input new file mode 100644 index 0000000000000..f2ecc1ee2db7f --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/description.input @@ -0,0 +1,41 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType {} + +/** + * @RelayResolver + * + * This is the age of the cat + * in years + */ +export function age(cat: CatFlowType): number {} + +/** + * @RelayResolver fullname + * + * This is the fullname of the cat + */ +export function name(cat: CatFlowType): string {} + +/** + * @RelayResolver + * + * This is the length of the cat. + * + * @dummyField + * + * This extra descriptive line should error. + */ +export function length(cat: CatFlowType): number {} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/idof.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/idof.expected new file mode 100644 index 0000000000000..d1f1f18d34c70 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/idof.expected @@ -0,0 +1,249 @@ +==================================== INPUT ==================================== +//- module.js + +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type CatFlowType from 'Cat'; +import type Dog from 'Dog'; + +import type { IdOf } from 'relay-runtime'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +/** + * @RelayResolver + */ +export function friend(cat: CatFlowType): IdOf<"Cat"> { + return {id: '1'}; +} + +/** + * @RelayResolver + */ +export function Dog(id: DataID): Dog { + return {}; +} +/** + * @RelayResolver + */ +export function neighbor(cat: CatFlowType): IdOf<"Dog"> { + return {id: '1'}; +} + + +/** + * @RelayResolver + */ +export function undefined(cat: CatFlowType): IdOf<"Mouse"> { + return {id: '1'}; +} +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 433:439, + token: Token { + span: 433:439, + kind: Identifier, + }, + value: "friend", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 459:470, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 464:469, + token: Token { + span: 464:469, + kind: Identifier, + }, + value: "Cat", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 433:439, + }, + type_: WithLocation { + location: :344:347, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:433:439, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "68fe0e748a29b143c69966ed811edec6", + ), + }, + ), +) +extend type Cat { + friend: Cat! @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", import_name: "friend", import_path: "module.js") @resolver_source_hash(value: "68fe0e748a29b143c69966ed811edec6") +} + + +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 618:626, + token: Token { + span: 618:626, + kind: Identifier, + }, + value: "neighbor", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 646:657, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 651:656, + token: Token { + span: 651:656, + kind: Identifier, + }, + value: "Dog", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 618:626, + }, + type_: WithLocation { + location: :344:347, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:618:626, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "68fe0e748a29b143c69966ed811edec6", + ), + }, + ), +) +extend type Cat { + neighbor: Dog! @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", import_name: "neighbor", import_path: "module.js") @resolver_source_hash(value: "68fe0e748a29b143c69966ed811edec6") +} + + +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 344:347, + token: Token { + span: 344:347, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: module.js:344:347, + root_fragment: WithLocation { + location: module.js:344:347, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: module.js:344:347, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "68fe0e748a29b143c69966ed811edec6", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "module.js", inject_fragment_data: "id") @resolver_source_hash(value: "68fe0e748a29b143c69966ed811edec6") @unselectable(reason: "This field is intended only for Relay's internal use") +} + + +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 538:541, + token: Token { + span: 538:541, + kind: Identifier, + }, + value: "Dog", + }, + rhs_location: module.js:538:541, + root_fragment: WithLocation { + location: module.js:538:541, + item: FragmentDefinitionName( + "Dog__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: module.js:538:541, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "68fe0e748a29b143c69966ed811edec6", + ), + }, + ), +) +type Dog @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Dog__id", import_name: "Dog", import_path: "module.js", inject_fragment_data: "id") @resolver_source_hash(value: "68fe0e748a29b143c69966ed811edec6") @unselectable(reason: "This field is intended only for Relay's internal use") +} + + +✖︎ Reference to undefined type 'Mouse'. + + module.js:45:51 + 44 │ */ + 45 │ export function undefined(cat: CatFlowType): IdOf<"Mouse"> { + │ ^^^^^^^ + 46 │ return {id: '1'}; diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/idof.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/idof.input new file mode 100644 index 0000000000000..9186bf3fe81fd --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/idof.input @@ -0,0 +1,48 @@ +//- module.js + +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type CatFlowType from 'Cat'; +import type Dog from 'Dog'; + +import type { IdOf } from 'relay-runtime'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +/** + * @RelayResolver + */ +export function friend(cat: CatFlowType): IdOf<"Cat"> { + return {id: '1'}; +} + +/** + * @RelayResolver + */ +export function Dog(id: DataID): Dog { + return {}; +} +/** + * @RelayResolver + */ +export function neighbor(cat: CatFlowType): IdOf<"Dog"> { + return {id: '1'}; +} + + +/** + * @RelayResolver + */ +export function undefined(cat: CatFlowType): IdOf<"Mouse"> { + return {id: '1'}; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/incorrect-export-error.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/incorrect-export-error.expected new file mode 100644 index 0000000000000..bfee5988ff36b --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/incorrect-export-error.expected @@ -0,0 +1,27 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +//- module.js + +import type CatFlowType from "Cat"; + +/** + * @RelayResolver + */ +const MyResolver = {x: number}; +==================================== OUTPUT =================================== + + +✖︎ @RelayResolver annotation is expected to be on a named export + + module.js:15:1 + 13 │ * @RelayResolver + 14 │ */ + 15 │ const MyResolver = {x: number}; + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/incorrect-export-error.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/incorrect-export-error.input new file mode 100644 index 0000000000000..87375c2cd198d --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/incorrect-export-error.input @@ -0,0 +1,16 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +//- module.js + +import type CatFlowType from "Cat"; + +/** + * @RelayResolver + */ +const MyResolver = {x: number}; diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/incorrect-type-error.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/incorrect-type-error.expected new file mode 100644 index 0000000000000..0ac8820696401 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/incorrect-type-error.expected @@ -0,0 +1,27 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +//- module.js + +import type CatFlowType from "Cat"; + +/** + * @RelayResolver + */ +export const MyResolver = string; +==================================== OUTPUT =================================== + + +✖︎ @RelayResolver annotation is expected to be on a named function or type alias + + module.js:15:1 + 13 │ * @RelayResolver + 14 │ */ + 15 │ export const MyResolver = string; + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/incorrect-type-error.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/incorrect-type-error.input new file mode 100644 index 0000000000000..88bf616f696d4 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/incorrect-type-error.input @@ -0,0 +1,16 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +//- module.js + +import type CatFlowType from "Cat"; + +/** + * @RelayResolver + */ +export const MyResolver = string; diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/live.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/live.expected new file mode 100644 index 0000000000000..56779fdc6ede1 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/live.expected @@ -0,0 +1,130 @@ +==================================== INPUT ==================================== +//- module.js + +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type CatFlowType from 'Cat'; +import type { LiveState } from 'relay-runtime'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +/** + * @RelayResolver + */ +export function mood(cat: CatFlowType): LiveState { + return { + read: () => store.getState().counter, + subscribe: (cb) => { + return store.subscribe(cb); + }, + }; +} +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 409:413, + token: Token { + span: 409:413, + kind: Identifier, + }, + value: "mood", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 443:449, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 443:449, + token: Token { + span: 443:449, + kind: Identifier, + }, + value: "String", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 409:413, + }, + type_: WithLocation { + location: :320:323, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: Some( + UnpopulatedIrField { + key_location: module.js:433:450, + }, + ), + location: module.js:409:413, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "9f3118df130187c7be17ebe0dc65e441", + ), + }, + ), +) +extend type Cat { + mood: String! @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "mood", import_path: "module.js", live: true) @resolver_source_hash(value: "9f3118df130187c7be17ebe0dc65e441") +} + + +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 320:323, + token: Token { + span: 320:323, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: module.js:320:323, + root_fragment: WithLocation { + location: module.js:320:323, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: module.js:320:323, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "9f3118df130187c7be17ebe0dc65e441", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "module.js", inject_fragment_data: "id") @resolver_source_hash(value: "9f3118df130187c7be17ebe0dc65e441") @unselectable(reason: "This field is intended only for Relay's internal use") +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/live.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/live.input new file mode 100644 index 0000000000000..a39a94528e949 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/live.input @@ -0,0 +1,30 @@ +//- module.js + +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type CatFlowType from 'Cat'; +import type { LiveState } from 'relay-runtime'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +/** + * @RelayResolver + */ +export function mood(cat: CatFlowType): LiveState { + return { + read: () => store.getState().counter, + subscribe: (cb) => { + return store.subscribe(cb); + }, + }; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/missing-param-type-error.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/missing-param-type-error.expected new file mode 100644 index 0000000000000..1d2baddb11779 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/missing-param-type-error.expected @@ -0,0 +1,28 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type CatFlowType from "Cat"; + +/** + * @RelayResolver + */ +export function Cat(id): CatFlowType { + return {}; +} +==================================== OUTPUT =================================== + + +✖︎ Expected Relay Resolver function param to include type annotation + + module.js:14:21 + 13 │ */ + 14 │ export function Cat(id): CatFlowType { + │ ^^ + 15 │ return {}; diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/missing-param-type-error.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/missing-param-type-error.input new file mode 100644 index 0000000000000..e801ef5d0908b --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/missing-param-type-error.input @@ -0,0 +1,17 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type CatFlowType from "Cat"; + +/** + * @RelayResolver + */ +export function Cat(id): CatFlowType { + return {}; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/multiple-modules.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/multiple-modules.expected new file mode 100644 index 0000000000000..c74a49ca97443 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/multiple-modules.expected @@ -0,0 +1,96 @@ +==================================== INPUT ==================================== +//- Cat.js + +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +//- Cat-name.js + +import type CatFlowType from 'Cat'; +/** + * @RelayResolver + */ +export function name(cat: CatFlowType): string { + return "cat"; +} + +//- Cat-hungry.js + +/** + * @RelayResolver is_hungry + */ +export function hungry(cat: CatFlowType): number { + return 5; +} + +//- Dog.js + +import * as DogNamespace from 'Dog'; + +/** + * @RelayResolver + */ +export function Dog(id: DataID): DogNamespace { + return {}; +} + +//- Dog-name.js + +import type DogFlowType from 'Dog.flow'; + +/** + * @RelayResolver + */ +export function name(dog: DogFlowType): string { + return "dog"; +} +==================================== OUTPUT =================================== + + +✖︎ Can't find flow type definition for `CatFlowType`. Expected the type to be imported from another module, or exported from the current module + + Cat-hungry.js:5:29 + 4 │ */ + 5 │ export function hungry(cat: CatFlowType): number { + │ ^^^^^^^^^^^ + 6 │ return 5; + + +✖︎ Failed to find type definition for `DogFlowType` using a default import from module `Dog.flow`. Please make sure `DogFlowType` is imported using a named or default import and that it is a resolver type + + Dog-name.js:7:27 + 6 │ */ + 7 │ export function name(dog: DogFlowType): string { + │ ^^^^^^^^^^^ + 8 │ return "dog"; + + +✖︎ Types used in @RelayResolver definitions should be imported using named or default imports (without using a `*`) + + Dog.js:7:34 + 6 │ */ + 7 │ export function Dog(id: DataID): DogNamespace { + │ ^^^^^^^^^^^^ + 8 │ return {}; + + ℹ︎ DogNamespace is imported from + + Dog.js:2:13 + 1 │ + 2 │ import * as DogNamespace from 'Dog'; + │ ^^^^^^^^^^^^ + 3 │ diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/multiple-modules.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/multiple-modules.input new file mode 100644 index 0000000000000..979397f489afd --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/multiple-modules.input @@ -0,0 +1,59 @@ +//- Cat.js + +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +//- Cat-name.js + +import type CatFlowType from 'Cat'; +/** + * @RelayResolver + */ +export function name(cat: CatFlowType): string { + return "cat"; +} + +//- Cat-hungry.js + +/** + * @RelayResolver is_hungry + */ +export function hungry(cat: CatFlowType): number { + return 5; +} + +//- Dog.js + +import * as DogNamespace from 'Dog'; + +/** + * @RelayResolver + */ +export function Dog(id: DataID): DogNamespace { + return {}; +} + +//- Dog-name.js + +import type DogFlowType from 'Dog.flow'; + +/** + * @RelayResolver + */ +export function name(dog: DogFlowType): string { + return "dog"; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/optional-strong-type.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/optional-strong-type.expected new file mode 100644 index 0000000000000..c8fa694e21df6 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/optional-strong-type.expected @@ -0,0 +1,70 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- Cat.js + +export type CatFlowType = {id: string} + +/** + * @RelayResolver + */ +export function Cat(id: DataID): ?CatFlowType { + return {}; +} + + +/** + * @RelayResolver + */ +export function name(cat: ?CatFlowType): string { + return "cat"; +} +==================================== OUTPUT =================================== +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 275:278, + token: Token { + span: 275:278, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: Cat.js:275:278, + root_fragment: WithLocation { + location: Cat.js:275:278, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: Cat.js:275:278, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "4325ac2bef4354788b5ccb1c5f5ffd53", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "Cat.js", inject_fragment_data: "id") @resolver_source_hash(value: "4325ac2bef4354788b5ccb1c5f5ffd53") @unselectable(reason: "This field is intended only for Relay's internal use") +} + + +✖︎ A nullable strong type is provided, please make the type non-nullable. The type can't be nullable in the runtime. + + Cat.js:22:27 + 21 │ */ + 22 │ export function name(cat: ?CatFlowType): string { + │ ^^^^^^^^^^^^ + 23 │ return "cat"; diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/optional-strong-type.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/optional-strong-type.input new file mode 100644 index 0000000000000..f605d534c61e3 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/optional-strong-type.input @@ -0,0 +1,25 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- Cat.js + +export type CatFlowType = {id: string} + +/** + * @RelayResolver + */ +export function Cat(id: DataID): ?CatFlowType { + return {}; +} + + +/** + * @RelayResolver + */ +export function name(cat: ?CatFlowType): string { + return "cat"; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/parse_error.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/parse_error.expected new file mode 100644 index 0000000000000..1b1565ac5edb8 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/parse_error.expected @@ -0,0 +1,43 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +//- module.js + +import type CatFlowType from "Cat"; + +/ @RelayResolver +func() {} +==================================== OUTPUT =================================== + + +✖︎ ';' expected + + module.js:13:8 + 12 │ / @RelayResolver + 13 │ func() {} + │ ^ + + +✖︎ non-terminated regular expression literal + + module.js:12:17 + 11 │ + 12 │ / @RelayResolver + │ ^ + 13 │ func() {} + + +✖︎ regular expression started here + + module.js:12:1 + 10 │ import type CatFlowType from "Cat"; + 11 │ + 12 │ / @RelayResolver + │ ^ + 13 │ func() {} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/parse_error.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/parse_error.input new file mode 100644 index 0000000000000..d918ab82ef7cb --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/parse_error.input @@ -0,0 +1,14 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +//- module.js + +import type CatFlowType from "Cat"; + +/ @RelayResolver +func() {} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/plural-optional.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/plural-optional.expected new file mode 100644 index 0000000000000..6a6868059747b --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/plural-optional.expected @@ -0,0 +1,195 @@ +==================================== INPUT ==================================== +//- Cat.js + +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +//- Cat-name.js + +import type CatFlowType from 'Cat'; +/** + * @RelayResolver + */ +export function name(cat: CatFlowType): ?string { + return "cat"; +} + +/** + * @RelayResolver + */ +export function names(cat: CatFlowType): Array { + return 5; +} +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 174:179, + token: Token { + span: 174:179, + kind: Identifier, + }, + value: "names", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 199:213, + type_: List( + ListTypeAnnotation { + span: 199:213, + open: Token { + span: 0:0, + kind: Empty, + }, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 206:212, + token: Token { + span: 206:212, + kind: Identifier, + }, + value: "String", + }, + }, + ), + close: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 174:179, + }, + type_: WithLocation { + location: :273:276, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: Cat-name.js:174:179, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "b4f1a8f1551cd4c93a9b61b8c25d2e61", + ), + }, + ), +) +extend type Cat { + names: [String]! @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "names", import_path: "Cat-name.js") @resolver_source_hash(value: "b4f1a8f1551cd4c93a9b61b8c25d2e61") +} + + +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 79:83, + token: Token { + span: 79:83, + kind: Identifier, + }, + value: "name", + }, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 104:110, + token: Token { + span: 104:110, + kind: Identifier, + }, + value: "String", + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 79:83, + }, + type_: WithLocation { + location: :273:276, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: Cat-name.js:79:83, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "b4f1a8f1551cd4c93a9b61b8c25d2e61", + ), + }, + ), +) +extend type Cat { + name: String @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "name", import_path: "Cat-name.js") @resolver_source_hash(value: "b4f1a8f1551cd4c93a9b61b8c25d2e61") +} + + +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 273:276, + token: Token { + span: 273:276, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: Cat.js:273:276, + root_fragment: WithLocation { + location: Cat.js:273:276, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: Cat.js:273:276, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "b0ce1a838dad74fce5422c930d8d9fc3", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "Cat.js", inject_fragment_data: "id") @resolver_source_hash(value: "b0ce1a838dad74fce5422c930d8d9fc3") @unselectable(reason: "This field is intended only for Relay's internal use") +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/plural-optional.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/plural-optional.input new file mode 100644 index 0000000000000..d810b5b593797 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/plural-optional.input @@ -0,0 +1,35 @@ +//- Cat.js + +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +//- Cat-name.js + +import type CatFlowType from 'Cat'; +/** + * @RelayResolver + */ +export function name(cat: CatFlowType): ?string { + return "cat"; +} + +/** + * @RelayResolver + */ +export function names(cat: CatFlowType): Array { + return 5; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/primitive-types.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/primitive-types.expected new file mode 100644 index 0000000000000..f265de97a4c85 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/primitive-types.expected @@ -0,0 +1,296 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + + +/** + * @RelayResolver + */ +export function ages(cat: CatFlowType): ?number { + return 2; +} + + +/** + * @RelayResolver + */ +export function hungry1(cat: CatFlowType): ?boolean { + return false; +} + +/** + * @RelayResolver + */ +export function hungry2(cat: CatFlowType): ?false { + return false; +} + +/** + * @RelayResolver + */ +export function hungry3(cat: CatFlowType): true { + return false; +} +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 362:366, + token: Token { + span: 362:366, + kind: Identifier, + }, + value: "ages", + }, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 387:393, + token: Token { + span: 387:393, + kind: Identifier, + }, + value: "Float", + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 362:366, + }, + type_: WithLocation { + location: :272:275, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:362:366, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "5e42d8055bf96703110b4485122e0e0f", + ), + }, + ), +) +extend type Cat { + ages: Float @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "ages", import_path: "module.js") @resolver_source_hash(value: "5e42d8055bf96703110b4485122e0e0f") +} + + +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 454:461, + token: Token { + span: 454:461, + kind: Identifier, + }, + value: "hungry1", + }, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 482:489, + token: Token { + span: 482:489, + kind: Identifier, + }, + value: "Boolean", + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 454:461, + }, + type_: WithLocation { + location: :272:275, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:454:461, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "5e42d8055bf96703110b4485122e0e0f", + ), + }, + ), +) +extend type Cat { + hungry1: Boolean @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "hungry1", import_path: "module.js") @resolver_source_hash(value: "5e42d8055bf96703110b4485122e0e0f") +} + + +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 553:560, + token: Token { + span: 553:560, + kind: Identifier, + }, + value: "hungry2", + }, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 581:586, + token: Token { + span: 581:586, + kind: Identifier, + }, + value: "Boolean", + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 553:560, + }, + type_: WithLocation { + location: :272:275, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:553:560, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "5e42d8055bf96703110b4485122e0e0f", + ), + }, + ), +) +extend type Cat { + hungry2: Boolean @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "hungry2", import_path: "module.js") @resolver_source_hash(value: "5e42d8055bf96703110b4485122e0e0f") +} + + +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 650:657, + token: Token { + span: 650:657, + kind: Identifier, + }, + value: "hungry3", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 677:681, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 677:681, + token: Token { + span: 677:681, + kind: Identifier, + }, + value: "Boolean", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 650:657, + }, + type_: WithLocation { + location: :272:275, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:650:657, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "5e42d8055bf96703110b4485122e0e0f", + ), + }, + ), +) +extend type Cat { + hungry3: Boolean! @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "hungry3", import_path: "module.js") @resolver_source_hash(value: "5e42d8055bf96703110b4485122e0e0f") +} + + +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 272:275, + token: Token { + span: 272:275, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: module.js:272:275, + root_fragment: WithLocation { + location: module.js:272:275, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: module.js:272:275, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "5e42d8055bf96703110b4485122e0e0f", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "module.js", inject_fragment_data: "id") @resolver_source_hash(value: "5e42d8055bf96703110b4485122e0e0f") @unselectable(reason: "This field is intended only for Relay's internal use") +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/primitive-types.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/primitive-types.input new file mode 100644 index 0000000000000..cef6503712012 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/primitive-types.input @@ -0,0 +1,47 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + + +/** + * @RelayResolver + */ +export function ages(cat: CatFlowType): ?number { + return 2; +} + + +/** + * @RelayResolver + */ +export function hungry1(cat: CatFlowType): ?boolean { + return false; +} + +/** + * @RelayResolver + */ +export function hungry2(cat: CatFlowType): ?false { + return false; +} + +/** + * @RelayResolver + */ +export function hungry3(cat: CatFlowType): true { + return false; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-optional-strong-object.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-optional-strong-object.expected new file mode 100644 index 0000000000000..d5d4a0bf51d50 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-optional-strong-object.expected @@ -0,0 +1,115 @@ +==================================== INPUT ==================================== +//- Cat.js + +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +//- Cat-name.js + +import type CatFlowType from 'Cat'; +/** + * @RelayResolver + */ +export function friend(cat: CatFlowType): ?IdOf<"Cat"> { + return {}; +} +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 79:85, + token: Token { + span: 79:85, + kind: Identifier, + }, + value: "friend", + }, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 111:116, + token: Token { + span: 111:116, + kind: Identifier, + }, + value: "Cat", + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 79:85, + }, + type_: WithLocation { + location: :273:276, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: Cat-name.js:79:85, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "2cd51ab90c9beb1c509ff624a8ba6609", + ), + }, + ), +) +extend type Cat { + friend: Cat @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", import_name: "friend", import_path: "Cat-name.js") @resolver_source_hash(value: "2cd51ab90c9beb1c509ff624a8ba6609") +} + + +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 273:276, + token: Token { + span: 273:276, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: Cat.js:273:276, + root_fragment: WithLocation { + location: Cat.js:273:276, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: Cat.js:273:276, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "b0ce1a838dad74fce5422c930d8d9fc3", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "Cat.js", inject_fragment_data: "id") @resolver_source_hash(value: "b0ce1a838dad74fce5422c930d8d9fc3") @unselectable(reason: "This field is intended only for Relay's internal use") +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-optional-strong-object.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-optional-strong-object.input new file mode 100644 index 0000000000000..ff145160c5048 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-optional-strong-object.input @@ -0,0 +1,28 @@ +//- Cat.js + +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +//- Cat-name.js + +import type CatFlowType from 'Cat'; +/** + * @RelayResolver + */ +export function friend(cat: CatFlowType): ?IdOf<"Cat"> { + return {}; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-optional-weak-object.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-optional-weak-object.expected new file mode 100644 index 0000000000000..784dbd984785f --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-optional-weak-object.expected @@ -0,0 +1,147 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +/** + * @RelayResolver + */ +export type FullName = {first: string, last: string}; + +/** + * @RelayResolver + */ +export function fullName(cat: CatFlowType): ?FullName { + return {first: "Mr", last: "Cat"}; +} +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 442:450, + token: Token { + span: 442:450, + kind: Identifier, + }, + value: "fullName", + }, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 471:479, + token: Token { + span: 471:479, + kind: Identifier, + }, + value: "FullName", + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 442:450, + }, + type_: WithLocation { + location: :272:275, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:442:450, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "1b4f4f49bebe8b72971595382a3f9b57", + ), + }, + ), +) +extend type Cat { + fullName: FullName @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "fullName", import_path: "module.js") @resolver_source_hash(value: "1b4f4f49bebe8b72971595382a3f9b57") +} + + +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 272:275, + token: Token { + span: 272:275, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: module.js:272:275, + root_fragment: WithLocation { + location: module.js:272:275, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: module.js:272:275, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "1b4f4f49bebe8b72971595382a3f9b57", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "module.js", inject_fragment_data: "id") @resolver_source_hash(value: "1b4f4f49bebe8b72971595382a3f9b57") @unselectable(reason: "This field is intended only for Relay's internal use") +} + + +Type( + WeakObjectType( + WeakObjectIr { + type_name: Identifier { + span: 357:365, + token: Token { + span: 357:365, + kind: Identifier, + }, + value: "FullName", + }, + rhs_location: module.js:357:365, + description: None, + hack_source: None, + deprecated: None, + location: module.js:357:365, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "1b4f4f49bebe8b72971595382a3f9b57", + ), + }, + ), +) +scalar FullNameModel @__RelayCustomScalar(path: "module.js", export_name: "FullName") + + +type FullName @__RelayResolverModel @RelayOutputType @__RelayWeakObject { + __relay_model_instance: FullNameModel! @resolver_source_hash(value: "1b4f4f49bebe8b72971595382a3f9b57") @unselectable(reason: "This field is intended only for Relay's internal use") +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-optional-weak-object.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-optional-weak-object.input new file mode 100644 index 0000000000000..a50b8654cf051 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-optional-weak-object.input @@ -0,0 +1,29 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +/** + * @RelayResolver + */ +export type FullName = {first: string, last: string}; + +/** + * @RelayResolver + */ +export function fullName(cat: CatFlowType): ?FullName { + return {first: "Mr", last: "Cat"}; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-relay-resolver-value.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-relay-resolver-value.expected new file mode 100644 index 0000000000000..a0de3c7ec9912 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-relay-resolver-value.expected @@ -0,0 +1,229 @@ +==================================== INPUT ==================================== +//- module.js + +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type CatFlowType from 'Cat'; + +import type { RelayResolverValue } from 'relay-runtime'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +/** + * @RelayResolver + */ +export function complexValue(cat: CatFlowType): RelayResolverValue<{a: 1, b: 2}> { + return {a: 1, b: 2}; +} + +/** + * @RelayResolver + */ +export function optionalRelayResolverValue(cat: CatFlowType): ?RelayResolverValue<{a: 1, b: 2}> { + return null; +} + +/** + * @RelayResolver + */ +export function relayResolveValueOverridesAllOtherAnnotation(cat: CatFlowType): ?RelayResolverValue> { + return {id: '1'}; +} +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 419:431, + token: Token { + span: 419:431, + kind: Identifier, + }, + value: "complexValue", + }, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 451:483, + token: Token { + span: 451:483, + kind: Identifier, + }, + value: "RelayResolverValue", + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 419:431, + }, + type_: WithLocation { + location: :330:333, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:419:431, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "fc15c065174264428a3632fe9cf329d6", + ), + }, + ), +) +extend type Cat { + complexValue: RelayResolverValue @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "complexValue", import_path: "module.js") @resolver_source_hash(value: "fc15c065174264428a3632fe9cf329d6") +} + + +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 554:580, + token: Token { + span: 554:580, + kind: Identifier, + }, + value: "optionalRelayResolverValue", + }, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 601:633, + token: Token { + span: 601:633, + kind: Identifier, + }, + value: "RelayResolverValue", + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 554:580, + }, + type_: WithLocation { + location: :330:333, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:554:580, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "fc15c065174264428a3632fe9cf329d6", + ), + }, + ), +) +extend type Cat { + optionalRelayResolverValue: RelayResolverValue @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "optionalRelayResolverValue", import_path: "module.js") @resolver_source_hash(value: "fc15c065174264428a3632fe9cf329d6") +} + + +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 696:740, + token: Token { + span: 696:740, + kind: Identifier, + }, + value: "relayResolveValueOverridesAllOtherAnnotation", + }, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 761:792, + token: Token { + span: 761:792, + kind: Identifier, + }, + value: "RelayResolverValue", + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 696:740, + }, + type_: WithLocation { + location: :330:333, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:696:740, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "fc15c065174264428a3632fe9cf329d6", + ), + }, + ), +) +extend type Cat { + relayResolveValueOverridesAllOtherAnnotation: RelayResolverValue @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "relayResolveValueOverridesAllOtherAnnotation", import_path: "module.js") @resolver_source_hash(value: "fc15c065174264428a3632fe9cf329d6") +} + + +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 330:333, + token: Token { + span: 330:333, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: module.js:330:333, + root_fragment: WithLocation { + location: module.js:330:333, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: module.js:330:333, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "fc15c065174264428a3632fe9cf329d6", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "module.js", inject_fragment_data: "id") @resolver_source_hash(value: "fc15c065174264428a3632fe9cf329d6") @unselectable(reason: "This field is intended only for Relay's internal use") +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-relay-resolver-value.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-relay-resolver-value.input new file mode 100644 index 0000000000000..4ea710cfa4e2f --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/return-relay-resolver-value.input @@ -0,0 +1,40 @@ +//- module.js + +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type CatFlowType from 'Cat'; + +import type { RelayResolverValue } from 'relay-runtime'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +/** + * @RelayResolver + */ +export function complexValue(cat: CatFlowType): RelayResolverValue<{a: 1, b: 2}> { + return {a: 1, b: 2}; +} + +/** + * @RelayResolver + */ +export function optionalRelayResolverValue(cat: CatFlowType): ?RelayResolverValue<{a: 1, b: 2}> { + return null; +} + +/** + * @RelayResolver + */ +export function relayResolveValueOverridesAllOtherAnnotation(cat: CatFlowType): ?RelayResolverValue> { + return {id: '1'}; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/root-fragment.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/root-fragment.expected new file mode 100644 index 0000000000000..529d626508518 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/root-fragment.expected @@ -0,0 +1,138 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type Cat from 'Cat.js'; +import type {CatIsHungryFragment$key} from 'CatIsHungryFragment.graphql'; + +import {graphql} from 'relay-runtime'; +import {readFragment} from 'relay-runtime/store/ResolverFragments'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): Cat { + return {} +} + +/** + * @RelayResolver + */ +export function full_name(key: CatIsHungryFragment$key): string { + const cat = readFragment(graphql` + fragment CatIsHungryFragment on Cat { + first_name + last_name + } + `, key); + + return `${first_name} ${last_name}`; +} +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 529:538, + token: Token { + span: 529:538, + kind: Identifier, + }, + value: "full_name", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 570:576, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 570:576, + token: Token { + span: 570:576, + kind: Identifier, + }, + value: "String", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 529:538, + }, + type_: WithLocation { + location: module.js:34:40, + item: "Cat", + }, + root_fragment: Some( + WithLocation { + location: module.js:544:567, + item: FragmentDefinitionName( + "CatIsHungryFragment$key", + ), + }, + ), + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:529:538, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "97c71c89ad8ccb1e0ff10de8887a25e0", + ), + }, + ), +) +extend type Cat { + full_name: String! @relay_resolver(has_output_type: true, import_name: "full_name", import_path: "module.js", fragment_name: "CatIsHungryFragment$key") @resolver_source_hash(value: "97c71c89ad8ccb1e0ff10de8887a25e0") +} + + +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 449:452, + token: Token { + span: 449:452, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: module.js:449:452, + root_fragment: WithLocation { + location: module.js:449:452, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: module.js:449:452, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "97c71c89ad8ccb1e0ff10de8887a25e0", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "module.js", inject_fragment_data: "id") @resolver_source_hash(value: "97c71c89ad8ccb1e0ff10de8887a25e0") @unselectable(reason: "This field is intended only for Relay's internal use") +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/root-fragment.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/root-fragment.input new file mode 100644 index 0000000000000..25e8da84ee36a --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/root-fragment.input @@ -0,0 +1,35 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type Cat from 'Cat.js'; +import type {CatIsHungryFragment$key} from 'CatIsHungryFragment.graphql'; + +import {graphql} from 'relay-runtime'; +import {readFragment} from 'relay-runtime/store/ResolverFragments'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): Cat { + return {} +} + +/** + * @RelayResolver + */ +export function full_name(key: CatIsHungryFragment$key): string { + const cat = readFragment(graphql` + fragment CatIsHungryFragment on Cat { + first_name + last_name + } + `, key); + + return `${first_name} ${last_name}`; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/single-module.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/single-module.expected new file mode 100644 index 0000000000000..a372c6ddf8e58 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/single-module.expected @@ -0,0 +1,189 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + + +/** + * @RelayResolver + */ +export function name(cat: CatFlowType): string { + return "cat"; +} + + +/** + * @RelayResolver is_hungry + */ +export function hungry(cat: CatFlowType): number { + return 5; +} +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 20:29, + token: Token { + span: 20:29, + kind: Identifier, + }, + value: "is_hungry", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 493:499, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 493:499, + token: Token { + span: 493:499, + kind: Identifier, + }, + value: "Float", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 20:29, + }, + type_: WithLocation { + location: :272:275, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:20:29, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "09dcae06200cddceb5b4fde5706f8a47", + ), + }, + ), +) +extend type Cat { + is_hungry: Float! @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "is_hungry", import_path: "module.js") @resolver_source_hash(value: "09dcae06200cddceb5b4fde5706f8a47") +} + + +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 362:366, + token: Token { + span: 362:366, + kind: Identifier, + }, + value: "name", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 386:392, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 386:392, + token: Token { + span: 386:392, + kind: Identifier, + }, + value: "String", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 362:366, + }, + type_: WithLocation { + location: :272:275, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:362:366, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "09dcae06200cddceb5b4fde5706f8a47", + ), + }, + ), +) +extend type Cat { + name: String! @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "name", import_path: "module.js") @resolver_source_hash(value: "09dcae06200cddceb5b4fde5706f8a47") +} + + +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 272:275, + token: Token { + span: 272:275, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: module.js:272:275, + root_fragment: WithLocation { + location: module.js:272:275, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: module.js:272:275, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "09dcae06200cddceb5b4fde5706f8a47", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "module.js", inject_fragment_data: "id") @resolver_source_hash(value: "09dcae06200cddceb5b4fde5706f8a47") @unselectable(reason: "This field is intended only for Relay's internal use") +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/single-module.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/single-module.input new file mode 100644 index 0000000000000..ea1b6bfcc1c1c --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/single-module.input @@ -0,0 +1,33 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + + +/** + * @RelayResolver + */ +export function name(cat: CatFlowType): string { + return "cat"; +} + + +/** + * @RelayResolver is_hungry + */ +export function hungry(cat: CatFlowType): number { + return 5; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/strong-type-define-flow-within.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/strong-type-define-flow-within.expected new file mode 100644 index 0000000000000..62ebe5e6ea560 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/strong-type-define-flow-within.expected @@ -0,0 +1,193 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- Cat.js + +export type CatFlowType = {id: string} + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + + +/** + * @RelayResolver + */ +export function name(cat: CatFlowType): string { + return "cat"; +} + + +//- Cat-fields.js + +import type {CatFlowType} from 'Cat'; + +/** + * @RelayResolver + */ +export function age(cat: CatFlowType): number { + return 2; +} +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 365:369, + token: Token { + span: 365:369, + kind: Identifier, + }, + value: "name", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 389:395, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 389:395, + token: Token { + span: 389:395, + kind: Identifier, + }, + value: "String", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 365:369, + }, + type_: WithLocation { + location: :275:278, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: Cat.js:365:369, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "3df3c8ffe9e77951c10886db6f829fc8", + ), + }, + ), +) +extend type Cat { + name: String! @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "name", import_path: "Cat.js") @resolver_source_hash(value: "3df3c8ffe9e77951c10886db6f829fc8") +} + + +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 82:85, + token: Token { + span: 82:85, + kind: Identifier, + }, + value: "age", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 105:111, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 105:111, + token: Token { + span: 105:111, + kind: Identifier, + }, + value: "Float", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 82:85, + }, + type_: WithLocation { + location: :275:278, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: Cat-fields.js:82:85, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "51df4151c38bd1df6bb8ad577a47490b", + ), + }, + ), +) +extend type Cat { + age: Float! @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "age", import_path: "Cat-fields.js") @resolver_source_hash(value: "51df4151c38bd1df6bb8ad577a47490b") +} + + +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 275:278, + token: Token { + span: 275:278, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: Cat.js:275:278, + root_fragment: WithLocation { + location: Cat.js:275:278, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: Cat.js:275:278, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "3df3c8ffe9e77951c10886db6f829fc8", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "Cat.js", inject_fragment_data: "id") @resolver_source_hash(value: "3df3c8ffe9e77951c10886db6f829fc8") @unselectable(reason: "This field is intended only for Relay's internal use") +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/strong-type-define-flow-within.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/strong-type-define-flow-within.input new file mode 100644 index 0000000000000..01e4190670345 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/strong-type-define-flow-within.input @@ -0,0 +1,37 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- Cat.js + +export type CatFlowType = {id: string} + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + + +/** + * @RelayResolver + */ +export function name(cat: CatFlowType): string { + return "cat"; +} + + +//- Cat-fields.js + +import type {CatFlowType} from 'Cat'; + +/** + * @RelayResolver + */ +export function age(cat: CatFlowType): number { + return 2; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/unsupported-type.error.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/unsupported-type.error.expected new file mode 100644 index 0000000000000..86f33a2955095 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/unsupported-type.error.expected @@ -0,0 +1,70 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + + +/** + * @RelayResolver + */ +export function name(id: CatFlowType | DogFlowType): boolean { + return {}; +} +==================================== OUTPUT =================================== +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 272:275, + token: Token { + span: 272:275, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: module.js:272:275, + root_fragment: WithLocation { + location: module.js:272:275, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: module.js:272:275, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "dbaa81766321d75c0da7fa6c70895562", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "module.js", inject_fragment_data: "id") @resolver_source_hash(value: "dbaa81766321d75c0da7fa6c70895562") @unselectable(reason: "This field is intended only for Relay's internal use") +} + + +✖︎ UnionTypeAnnotation is not supported + + module.js:22:26 + 21 │ */ + 22 │ export function name(id: CatFlowType | DogFlowType): boolean { + │ ^^^^^^^^^^^^^^^^^^^^^^^^^ + 23 │ return {}; diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/unsupported-type.error.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/unsupported-type.error.input new file mode 100644 index 0000000000000..921fc24d9411c --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/unsupported-type.error.input @@ -0,0 +1,25 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + + +/** + * @RelayResolver + */ +export function name(id: CatFlowType | DogFlowType): boolean { + return {}; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/weak-object.expected b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/weak-object.expected new file mode 100644 index 0000000000000..991f4269706cb --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/weak-object.expected @@ -0,0 +1,291 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +/** + * @RelayResolver + */ +export type FullName = {first: string, last: string}; + + +/** + * @RelayResolver + */ +export function first(name: FullName): string { + return name.first; +} + +/** + * @RelayResolver + */ +export function last(name: FullName): string { + return name.last; +} + +/** + * @RelayResolver + */ +export function fullName(cat: CatFlowType): FullName { + return {first: "Mr", last: "Cat"}; +} +==================================== OUTPUT =================================== +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 443:448, + token: Token { + span: 443:448, + kind: Identifier, + }, + value: "first", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 466:472, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 466:472, + token: Token { + span: 466:472, + kind: Identifier, + }, + value: "String", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 443:448, + }, + type_: WithLocation { + location: :357:365, + item: "FullName", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:443:448, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "cbee4c20757f831a7a766d841ecacc1e", + ), + }, + ), +) +extend type FullName { + first: String! @relay_resolver(fragment_name: "FullName____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "first", import_path: "module.js") @resolver_source_hash(value: "cbee4c20757f831a7a766d841ecacc1e") +} + + +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 541:545, + token: Token { + span: 541:545, + kind: Identifier, + }, + value: "last", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 563:569, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 563:569, + token: Token { + span: 563:569, + kind: Identifier, + }, + value: "String", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 541:545, + }, + type_: WithLocation { + location: :357:365, + item: "FullName", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:541:545, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "cbee4c20757f831a7a766d841ecacc1e", + ), + }, + ), +) +extend type FullName { + last: String! @relay_resolver(fragment_name: "FullName____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "last", import_path: "module.js") @resolver_source_hash(value: "cbee4c20757f831a7a766d841ecacc1e") +} + + +Field( + TerseRelayResolver( + TerseRelayResolverIr { + field: FieldDefinition { + name: Identifier { + span: 637:645, + token: Token { + span: 637:645, + kind: Identifier, + }, + value: "fullName", + }, + type_: NonNull( + NonNullTypeAnnotation { + span: 665:673, + type_: Named( + NamedTypeAnnotation { + name: Identifier { + span: 665:673, + token: Token { + span: 665:673, + kind: Identifier, + }, + value: "FullName", + }, + }, + ), + exclamation: Token { + span: 0:0, + kind: Empty, + }, + }, + ), + arguments: None, + directives: [], + description: None, + hack_source: None, + span: 637:645, + }, + type_: WithLocation { + location: :272:275, + item: "Cat", + }, + root_fragment: None, + deprecated: None, + semantic_non_null: None, + live: None, + location: module.js:637:645, + fragment_arguments: None, + source_hash: ResolverSourceHash( + "cbee4c20757f831a7a766d841ecacc1e", + ), + }, + ), +) +extend type Cat { + fullName: FullName! @relay_resolver(fragment_name: "Cat____relay_model_instance", generated_fragment: true, inject_fragment_data: "__relay_model_instance", has_output_type: true, import_name: "fullName", import_path: "module.js") @resolver_source_hash(value: "cbee4c20757f831a7a766d841ecacc1e") +} + + +Type( + StrongObjectResolver( + StrongObjectIr { + type_name: Identifier { + span: 272:275, + token: Token { + span: 272:275, + kind: Identifier, + }, + value: "Cat", + }, + rhs_location: module.js:272:275, + root_fragment: WithLocation { + location: module.js:272:275, + item: FragmentDefinitionName( + "Cat__id", + ), + }, + description: None, + deprecated: None, + live: None, + semantic_non_null: None, + location: module.js:272:275, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "cbee4c20757f831a7a766d841ecacc1e", + ), + }, + ), +) +type Cat @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue! @relay_resolver(generated_fragment: true, fragment_name: "Cat__id", import_name: "Cat", import_path: "module.js", inject_fragment_data: "id") @resolver_source_hash(value: "cbee4c20757f831a7a766d841ecacc1e") @unselectable(reason: "This field is intended only for Relay's internal use") +} + + +Type( + WeakObjectType( + WeakObjectIr { + type_name: Identifier { + span: 357:365, + token: Token { + span: 357:365, + kind: Identifier, + }, + value: "FullName", + }, + rhs_location: module.js:357:365, + description: None, + hack_source: None, + deprecated: None, + location: module.js:357:365, + implements_interfaces: [], + source_hash: ResolverSourceHash( + "cbee4c20757f831a7a766d841ecacc1e", + ), + }, + ), +) +scalar FullNameModel @__RelayCustomScalar(path: "module.js", export_name: "FullName") + + +type FullName @__RelayResolverModel @RelayOutputType @__RelayWeakObject { + __relay_model_instance: FullNameModel! @resolver_source_hash(value: "cbee4c20757f831a7a766d841ecacc1e") @unselectable(reason: "This field is intended only for Relay's internal use") +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock/fixtures/weak-object.input b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/weak-object.input new file mode 100644 index 0000000000000..bf1cf1e17ee97 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock/fixtures/weak-object.input @@ -0,0 +1,44 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//- module.js + +import type CatFlowType from 'Cat'; + +/** + * @RelayResolver + */ +export function Cat(id: DataID): CatFlowType { + return {}; +} + +/** + * @RelayResolver + */ +export type FullName = {first: string, last: string}; + + +/** + * @RelayResolver + */ +export function first(name: FullName): string { + return name.first; +} + +/** + * @RelayResolver + */ +export function last(name: FullName): string { + return name.last; +} + +/** + * @RelayResolver + */ +export function fullName(cat: CatFlowType): FullName { + return {first: "Mr", last: "Cat"}; +} diff --git a/compiler/crates/relay-schema-generation/tests/docblock_test.rs b/compiler/crates/relay-schema-generation/tests/docblock_test.rs new file mode 100644 index 0000000000000..9016f6cf5f601 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/docblock_test.rs @@ -0,0 +1,153 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @generated SignedSource<<7f0997fb1dbf297de82340a1c311a2b0>> + */ + +mod docblock; + +use docblock::transform_fixture; +use fixture_tests::test_fixture; + +#[tokio::test] +async fn arguments() { + let input = include_str!("docblock/fixtures/arguments.input"); + let expected = include_str!("docblock/fixtures/arguments.expected"); + test_fixture(transform_fixture, file!(), "arguments.input", "docblock/fixtures/arguments.expected", input, expected).await; +} + +#[tokio::test] +async fn description() { + let input = include_str!("docblock/fixtures/description.input"); + let expected = include_str!("docblock/fixtures/description.expected"); + test_fixture(transform_fixture, file!(), "description.input", "docblock/fixtures/description.expected", input, expected).await; +} + +#[tokio::test] +async fn idof() { + let input = include_str!("docblock/fixtures/idof.input"); + let expected = include_str!("docblock/fixtures/idof.expected"); + test_fixture(transform_fixture, file!(), "idof.input", "docblock/fixtures/idof.expected", input, expected).await; +} + +#[tokio::test] +async fn incorrect_export_error() { + let input = include_str!("docblock/fixtures/incorrect-export-error.input"); + let expected = include_str!("docblock/fixtures/incorrect-export-error.expected"); + test_fixture(transform_fixture, file!(), "incorrect-export-error.input", "docblock/fixtures/incorrect-export-error.expected", input, expected).await; +} + +#[tokio::test] +async fn incorrect_type_error() { + let input = include_str!("docblock/fixtures/incorrect-type-error.input"); + let expected = include_str!("docblock/fixtures/incorrect-type-error.expected"); + test_fixture(transform_fixture, file!(), "incorrect-type-error.input", "docblock/fixtures/incorrect-type-error.expected", input, expected).await; +} + +#[tokio::test] +async fn live() { + let input = include_str!("docblock/fixtures/live.input"); + let expected = include_str!("docblock/fixtures/live.expected"); + test_fixture(transform_fixture, file!(), "live.input", "docblock/fixtures/live.expected", input, expected).await; +} + +#[tokio::test] +async fn missing_param_type_error() { + let input = include_str!("docblock/fixtures/missing-param-type-error.input"); + let expected = include_str!("docblock/fixtures/missing-param-type-error.expected"); + test_fixture(transform_fixture, file!(), "missing-param-type-error.input", "docblock/fixtures/missing-param-type-error.expected", input, expected).await; +} + +#[tokio::test] +async fn multiple_modules() { + let input = include_str!("docblock/fixtures/multiple-modules.input"); + let expected = include_str!("docblock/fixtures/multiple-modules.expected"); + test_fixture(transform_fixture, file!(), "multiple-modules.input", "docblock/fixtures/multiple-modules.expected", input, expected).await; +} + +#[tokio::test] +async fn optional_strong_type() { + let input = include_str!("docblock/fixtures/optional-strong-type.input"); + let expected = include_str!("docblock/fixtures/optional-strong-type.expected"); + test_fixture(transform_fixture, file!(), "optional-strong-type.input", "docblock/fixtures/optional-strong-type.expected", input, expected).await; +} + +#[tokio::test] +async fn parse_error() { + let input = include_str!("docblock/fixtures/parse_error.input"); + let expected = include_str!("docblock/fixtures/parse_error.expected"); + test_fixture(transform_fixture, file!(), "parse_error.input", "docblock/fixtures/parse_error.expected", input, expected).await; +} + +#[tokio::test] +async fn plural_optional() { + let input = include_str!("docblock/fixtures/plural-optional.input"); + let expected = include_str!("docblock/fixtures/plural-optional.expected"); + test_fixture(transform_fixture, file!(), "plural-optional.input", "docblock/fixtures/plural-optional.expected", input, expected).await; +} + +#[tokio::test] +async fn primitive_types() { + let input = include_str!("docblock/fixtures/primitive-types.input"); + let expected = include_str!("docblock/fixtures/primitive-types.expected"); + test_fixture(transform_fixture, file!(), "primitive-types.input", "docblock/fixtures/primitive-types.expected", input, expected).await; +} + +#[tokio::test] +async fn return_optional_strong_object() { + let input = include_str!("docblock/fixtures/return-optional-strong-object.input"); + let expected = include_str!("docblock/fixtures/return-optional-strong-object.expected"); + test_fixture(transform_fixture, file!(), "return-optional-strong-object.input", "docblock/fixtures/return-optional-strong-object.expected", input, expected).await; +} + +#[tokio::test] +async fn return_optional_weak_object() { + let input = include_str!("docblock/fixtures/return-optional-weak-object.input"); + let expected = include_str!("docblock/fixtures/return-optional-weak-object.expected"); + test_fixture(transform_fixture, file!(), "return-optional-weak-object.input", "docblock/fixtures/return-optional-weak-object.expected", input, expected).await; +} + +#[tokio::test] +async fn return_relay_resolver_value() { + let input = include_str!("docblock/fixtures/return-relay-resolver-value.input"); + let expected = include_str!("docblock/fixtures/return-relay-resolver-value.expected"); + test_fixture(transform_fixture, file!(), "return-relay-resolver-value.input", "docblock/fixtures/return-relay-resolver-value.expected", input, expected).await; +} + +#[tokio::test] +async fn root_fragment() { + let input = include_str!("docblock/fixtures/root-fragment.input"); + let expected = include_str!("docblock/fixtures/root-fragment.expected"); + test_fixture(transform_fixture, file!(), "root-fragment.input", "docblock/fixtures/root-fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn single_module() { + let input = include_str!("docblock/fixtures/single-module.input"); + let expected = include_str!("docblock/fixtures/single-module.expected"); + test_fixture(transform_fixture, file!(), "single-module.input", "docblock/fixtures/single-module.expected", input, expected).await; +} + +#[tokio::test] +async fn strong_type_define_flow_within() { + let input = include_str!("docblock/fixtures/strong-type-define-flow-within.input"); + let expected = include_str!("docblock/fixtures/strong-type-define-flow-within.expected"); + test_fixture(transform_fixture, file!(), "strong-type-define-flow-within.input", "docblock/fixtures/strong-type-define-flow-within.expected", input, expected).await; +} + +#[tokio::test] +async fn unsupported_type_error() { + let input = include_str!("docblock/fixtures/unsupported-type.error.input"); + let expected = include_str!("docblock/fixtures/unsupported-type.error.expected"); + test_fixture(transform_fixture, file!(), "unsupported-type.error.input", "docblock/fixtures/unsupported-type.error.expected", input, expected).await; +} + +#[tokio::test] +async fn weak_object() { + let input = include_str!("docblock/fixtures/weak-object.input"); + let expected = include_str!("docblock/fixtures/weak-object.expected"); + test_fixture(transform_fixture, file!(), "weak-object.input", "docblock/fixtures/weak-object.expected", input, expected).await; +} diff --git a/compiler/crates/relay-schema-generation/tests/extract.rs b/compiler/crates/relay-schema-generation/tests/extract.rs new file mode 100644 index 0000000000000..8b72a1b0ceee9 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/extract.rs @@ -0,0 +1,77 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::Diagnostic; +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use graphql_cli::DiagnosticPrinter; +use hermes_comments::find_nodes_after_comments; +use hermes_estree::IntoFunction; +use hermes_estree::Node; +use hermes_parser::parse; +use hermes_parser::ParserDialect; +use hermes_parser::ParserFlags; +use relay_schema_generation::RelayResolverExtractor; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let result = parse( + fixture.content, + fixture.file_name, + ParserFlags { + strict_mode: true, + enable_jsx: false, + dialect: ParserDialect::Flow, + store_doc_block: false, + store_comments: true, + }, + ) + .unwrap(); + + let attached_comments = find_nodes_after_comments(&result.ast, &result.comments); + + let extractor = RelayResolverExtractor::new(); + + let output = attached_comments + .into_iter() + .filter_map(|(comment, _, node, _)| { + let comment = comment.trim(); + match comment { + "extract" => match node { + Node::FunctionDeclaration(node) => { + Some(extractor.extract_function(node.function())) + } + _ => None, + }, + _ => None, + } + }) + .map(|result| match result { + Ok(data) => { + format!("{:#?}", data) + } + Err(diag) => diagnostics_to_sorted_string(fixture.content, &diag), + }) + .collect::>() + .join("\n\n"); + + Ok(output) +} + +fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { + let printer = DiagnosticPrinter::new(|source_location| match source_location { + SourceLocationKey::Embedded { .. } => unreachable!(), + SourceLocationKey::Standalone { .. } => unreachable!(), + SourceLocationKey::Generated => Some(TextSource::from_whole_document(source)), + }); + let mut printed = diagnostics + .iter() + .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) + .collect::>(); + printed.sort(); + printed.join("\n\n") +} diff --git a/compiler/crates/relay-schema-generation/tests/extract/fixtures/arguments.expected b/compiler/crates/relay-schema-generation/tests/extract/fixtures/arguments.expected new file mode 100644 index 0000000000000..d18504b0a4225 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/extract/fixtures/arguments.expected @@ -0,0 +1,131 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//extract +function favorite_page(user: User, args: {id: ID}): Page {} +==================================== OUTPUT =================================== +Strong( + FieldData { + field_name: WithLocation { + location: :211:224, + item: "favorite_page", + }, + return_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "Page", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 254, + end: 258, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 254, + end: 258, + }, + }, + ), + entity_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "User", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 231, + end: 235, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 231, + end: 235, + }, + }, + ), + arguments: Some( + ObjectTypeAnnotation( + ObjectTypeAnnotation { + properties: [ + ObjectTypeProperty( + ObjectTypeProperty { + key: Identifier( + Identifier { + name: "id", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 244, + end: 246, + }, + }, + ), + value: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "ID", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 248, + end: 250, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 248, + end: 250, + }, + }, + ), + method: false, + optional: false, + is_static: false, + proto: false, + variance: None, + kind: Init, + loc: None, + range: SourceRange { + start: 244, + end: 250, + }, + }, + ), + ], + indexers: [], + call_properties: [], + internal_slots: [], + inexact: false, + exact: false, + loc: None, + range: SourceRange { + start: 243, + end: 251, + }, + }, + ), + ), + is_live: None, + }, +) diff --git a/compiler/crates/relay-schema-generation/tests/extract/fixtures/arguments.js b/compiler/crates/relay-schema-generation/tests/extract/fixtures/arguments.js new file mode 100644 index 0000000000000..3a9924f5acf43 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/extract/fixtures/arguments.js @@ -0,0 +1,9 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//extract +function favorite_page(user: User, args: {id: ID}): Page {} diff --git a/compiler/crates/relay-schema-generation/tests/extract/fixtures/functions.unsupported.expected b/compiler/crates/relay-schema-generation/tests/extract/fixtures/functions.unsupported.expected new file mode 100644 index 0000000000000..cde50ae28e41b --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/extract/fixtures/functions.unsupported.expected @@ -0,0 +1,49 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +//extract +function name(): string { +} + +//extract +function name(user: User): User.Name { +} + +//extract +function name(user: User): Genric { +} +==================================== OUTPUT =================================== +✖︎ Expected to have at least one function parameter + + :10:1 + 8 │ + 9 │ //extract + 10 │ function name(): string { + │ ^^^^^^^^^^^^^^^^^^^^^^^^^ + 11 │ } + │ ^ + 12 │ + + +✖︎ Unsupported type + + :14:28 + 13 │ //extract + 14 │ function name(user: User): User.Name { + │ ^^^^^^^^^ + 15 │ } + + +✖︎ Multiple type params is not supported + + :18:28 + 17 │ //extract + 18 │ function name(user: User): Genric { + │ ^^^^^^^^^^^^^^^^^^^^^^^ + 19 │ } diff --git a/compiler/crates/relay-schema-generation/tests/extract/fixtures/functions.unsupported.js b/compiler/crates/relay-schema-generation/tests/extract/fixtures/functions.unsupported.js new file mode 100644 index 0000000000000..2b3730263f25f --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/extract/fixtures/functions.unsupported.js @@ -0,0 +1,19 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +//extract +function name(): string { +} + +//extract +function name(user: User): User.Name { +} + +//extract +function name(user: User): Genric { +} diff --git a/compiler/crates/relay-schema-generation/tests/extract/fixtures/generics.expected b/compiler/crates/relay-schema-generation/tests/extract/fixtures/generics.expected new file mode 100644 index 0000000000000..7bb6a2c72983e --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/extract/fixtures/generics.expected @@ -0,0 +1,292 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//extract +function live_user(user: User): LiveState {} + +//extract +function live_generic_user(user: User): LiveState> {} + +//extract +function live_array_generic_optinal_user(user: User): LiveState>> {} +==================================== OUTPUT =================================== +Strong( + FieldData { + field_name: WithLocation { + location: :211:220, + item: "live_user", + }, + return_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "User", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 244, + end: 248, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 244, + end: 248, + }, + }, + ), + entity_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "User", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 227, + end: 231, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 227, + end: 231, + }, + }, + ), + arguments: None, + is_live: Some( + :234:249, + ), + }, +) + +Strong( + FieldData { + field_name: WithLocation { + location: :273:290, + item: "live_generic_user", + }, + return_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "Generic", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 314, + end: 321, + }, + }, + ), + type_parameters: Some( + TypeParameterInstantiation { + params: [ + GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "User", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 322, + end: 326, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 322, + end: 326, + }, + }, + ), + ], + loc: None, + range: SourceRange { + start: 321, + end: 327, + }, + }, + ), + loc: None, + range: SourceRange { + start: 314, + end: 327, + }, + }, + ), + entity_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "User", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 297, + end: 301, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 297, + end: 301, + }, + }, + ), + arguments: None, + is_live: Some( + :304:328, + ), + }, +) + +Strong( + FieldData { + field_name: WithLocation { + location: :352:383, + item: "live_array_generic_optinal_user", + }, + return_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "Array", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 407, + end: 412, + }, + }, + ), + type_parameters: Some( + TypeParameterInstantiation { + params: [ + GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "Generic", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 413, + end: 420, + }, + }, + ), + type_parameters: Some( + TypeParameterInstantiation { + params: [ + NullableTypeAnnotation( + NullableTypeAnnotation { + type_annotation: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "User", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 422, + end: 426, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 422, + end: 426, + }, + }, + ), + loc: None, + range: SourceRange { + start: 421, + end: 426, + }, + }, + ), + ], + loc: None, + range: SourceRange { + start: 420, + end: 427, + }, + }, + ), + loc: None, + range: SourceRange { + start: 413, + end: 427, + }, + }, + ), + ], + loc: None, + range: SourceRange { + start: 412, + end: 428, + }, + }, + ), + loc: None, + range: SourceRange { + start: 407, + end: 428, + }, + }, + ), + entity_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "User", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 390, + end: 394, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 390, + end: 394, + }, + }, + ), + arguments: None, + is_live: Some( + :397:429, + ), + }, +) diff --git a/compiler/crates/relay-schema-generation/tests/extract/fixtures/generics.js b/compiler/crates/relay-schema-generation/tests/extract/fixtures/generics.js new file mode 100644 index 0000000000000..e68a0bb62c585 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/extract/fixtures/generics.js @@ -0,0 +1,15 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//extract +function live_user(user: User): LiveState {} + +//extract +function live_generic_user(user: User): LiveState> {} + +//extract +function live_array_generic_optinal_user(user: User): LiveState>> {} diff --git a/compiler/crates/relay-schema-generation/tests/extract/fixtures/plural-optional.expected b/compiler/crates/relay-schema-generation/tests/extract/fixtures/plural-optional.expected new file mode 100644 index 0000000000000..85fc7529fa935 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/extract/fixtures/plural-optional.expected @@ -0,0 +1,352 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +//extract +function plural_string(user: User): Array {} + +//extract +function plural_optional_string(user: User): Array {} + +//extract +function optional_plural_string(user: User): ?Array {} + +//extract +function optional_plural_optional_string(user: User): ?Array {} +==================================== OUTPUT =================================== +Strong( + FieldData { + field_name: WithLocation { + location: :212:225, + item: "plural_string", + }, + return_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "Array", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 239, + end: 244, + }, + }, + ), + type_parameters: Some( + TypeParameterInstantiation { + params: [ + StringTypeAnnotation( + StringTypeAnnotation { + loc: None, + range: SourceRange { + start: 245, + end: 251, + }, + }, + ), + ], + loc: None, + range: SourceRange { + start: 244, + end: 252, + }, + }, + ), + loc: None, + range: SourceRange { + start: 239, + end: 252, + }, + }, + ), + entity_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "User", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 232, + end: 236, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 232, + end: 236, + }, + }, + ), + arguments: None, + is_live: None, + }, +) + +Strong( + FieldData { + field_name: WithLocation { + location: :276:298, + item: "plural_optional_string", + }, + return_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "Array", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 312, + end: 317, + }, + }, + ), + type_parameters: Some( + TypeParameterInstantiation { + params: [ + NullableTypeAnnotation( + NullableTypeAnnotation { + type_annotation: StringTypeAnnotation( + StringTypeAnnotation { + loc: None, + range: SourceRange { + start: 319, + end: 325, + }, + }, + ), + loc: None, + range: SourceRange { + start: 318, + end: 325, + }, + }, + ), + ], + loc: None, + range: SourceRange { + start: 317, + end: 326, + }, + }, + ), + loc: None, + range: SourceRange { + start: 312, + end: 326, + }, + }, + ), + entity_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "User", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 305, + end: 309, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 305, + end: 309, + }, + }, + ), + arguments: None, + is_live: None, + }, +) + +Strong( + FieldData { + field_name: WithLocation { + location: :350:372, + item: "optional_plural_string", + }, + return_type: NullableTypeAnnotation( + NullableTypeAnnotation { + type_annotation: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "Array", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 387, + end: 392, + }, + }, + ), + type_parameters: Some( + TypeParameterInstantiation { + params: [ + StringTypeAnnotation( + StringTypeAnnotation { + loc: None, + range: SourceRange { + start: 393, + end: 399, + }, + }, + ), + ], + loc: None, + range: SourceRange { + start: 392, + end: 400, + }, + }, + ), + loc: None, + range: SourceRange { + start: 387, + end: 400, + }, + }, + ), + loc: None, + range: SourceRange { + start: 386, + end: 400, + }, + }, + ), + entity_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "User", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 379, + end: 383, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 379, + end: 383, + }, + }, + ), + arguments: None, + is_live: None, + }, +) + +Strong( + FieldData { + field_name: WithLocation { + location: :424:455, + item: "optional_plural_optional_string", + }, + return_type: NullableTypeAnnotation( + NullableTypeAnnotation { + type_annotation: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "Array", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 470, + end: 475, + }, + }, + ), + type_parameters: Some( + TypeParameterInstantiation { + params: [ + NullableTypeAnnotation( + NullableTypeAnnotation { + type_annotation: StringTypeAnnotation( + StringTypeAnnotation { + loc: None, + range: SourceRange { + start: 477, + end: 483, + }, + }, + ), + loc: None, + range: SourceRange { + start: 476, + end: 483, + }, + }, + ), + ], + loc: None, + range: SourceRange { + start: 475, + end: 484, + }, + }, + ), + loc: None, + range: SourceRange { + start: 470, + end: 484, + }, + }, + ), + loc: None, + range: SourceRange { + start: 469, + end: 484, + }, + }, + ), + entity_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "User", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 462, + end: 466, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 462, + end: 466, + }, + }, + ), + arguments: None, + is_live: None, + }, +) diff --git a/compiler/crates/relay-schema-generation/tests/extract/fixtures/plural-optional.js b/compiler/crates/relay-schema-generation/tests/extract/fixtures/plural-optional.js new file mode 100644 index 0000000000000..465d83bf44cd9 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/extract/fixtures/plural-optional.js @@ -0,0 +1,19 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + + +//extract +function plural_string(user: User): Array {} + +//extract +function plural_optional_string(user: User): Array {} + +//extract +function optional_plural_string(user: User): ?Array {} + +//extract +function optional_plural_optional_string(user: User): ?Array {} \ No newline at end of file diff --git a/compiler/crates/relay-schema-generation/tests/extract/fixtures/primitives.expected b/compiler/crates/relay-schema-generation/tests/extract/fixtures/primitives.expected new file mode 100644 index 0000000000000..b70dc34fe036b --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/extract/fixtures/primitives.expected @@ -0,0 +1,155 @@ +==================================== INPUT ==================================== +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//extract +function id(user: User): ID {} + +//extract +function number(user: User): number {} + +//extract +function string(user: User): string {} +==================================== OUTPUT =================================== +Strong( + FieldData { + field_name: WithLocation { + location: :211:213, + item: "id", + }, + return_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "ID", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 227, + end: 229, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 227, + end: 229, + }, + }, + ), + entity_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "User", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 220, + end: 224, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 220, + end: 224, + }, + }, + ), + arguments: None, + is_live: None, + }, +) + +Strong( + FieldData { + field_name: WithLocation { + location: :253:259, + item: "number", + }, + return_type: NumberTypeAnnotation( + NumberTypeAnnotation { + loc: None, + range: SourceRange { + start: 273, + end: 279, + }, + }, + ), + entity_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "User", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 266, + end: 270, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 266, + end: 270, + }, + }, + ), + arguments: None, + is_live: None, + }, +) + +Strong( + FieldData { + field_name: WithLocation { + location: :303:309, + item: "string", + }, + return_type: StringTypeAnnotation( + StringTypeAnnotation { + loc: None, + range: SourceRange { + start: 323, + end: 329, + }, + }, + ), + entity_type: GenericTypeAnnotation( + GenericTypeAnnotation { + id: Identifier( + Identifier { + name: "User", + binding: None, + type_annotation: None, + loc: None, + range: SourceRange { + start: 316, + end: 320, + }, + }, + ), + type_parameters: None, + loc: None, + range: SourceRange { + start: 316, + end: 320, + }, + }, + ), + arguments: None, + is_live: None, + }, +) diff --git a/compiler/crates/relay-schema-generation/tests/extract/fixtures/primitives.js b/compiler/crates/relay-schema-generation/tests/extract/fixtures/primitives.js new file mode 100644 index 0000000000000..d3fb2fcec6bb2 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/extract/fixtures/primitives.js @@ -0,0 +1,16 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +//extract +function id(user: User): ID {} + +//extract +function number(user: User): number {} + +//extract +function string(user: User): string {} + diff --git a/compiler/crates/relay-schema-generation/tests/extract_test.rs b/compiler/crates/relay-schema-generation/tests/extract_test.rs new file mode 100644 index 0000000000000..80138eb198579 --- /dev/null +++ b/compiler/crates/relay-schema-generation/tests/extract_test.rs @@ -0,0 +1,48 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @generated SignedSource<> + */ + +mod extract; + +use extract::transform_fixture; +use fixture_tests::test_fixture; + +#[tokio::test] +async fn arguments() { + let input = include_str!("extract/fixtures/arguments.js"); + let expected = include_str!("extract/fixtures/arguments.expected"); + test_fixture(transform_fixture, file!(), "arguments.js", "extract/fixtures/arguments.expected", input, expected).await; +} + +#[tokio::test] +async fn functions_unsupported() { + let input = include_str!("extract/fixtures/functions.unsupported.js"); + let expected = include_str!("extract/fixtures/functions.unsupported.expected"); + test_fixture(transform_fixture, file!(), "functions.unsupported.js", "extract/fixtures/functions.unsupported.expected", input, expected).await; +} + +#[tokio::test] +async fn generics() { + let input = include_str!("extract/fixtures/generics.js"); + let expected = include_str!("extract/fixtures/generics.expected"); + test_fixture(transform_fixture, file!(), "generics.js", "extract/fixtures/generics.expected", input, expected).await; +} + +#[tokio::test] +async fn plural_optional() { + let input = include_str!("extract/fixtures/plural-optional.js"); + let expected = include_str!("extract/fixtures/plural-optional.expected"); + test_fixture(transform_fixture, file!(), "plural-optional.js", "extract/fixtures/plural-optional.expected", input, expected).await; +} + +#[tokio::test] +async fn primitives() { + let input = include_str!("extract/fixtures/primitives.js"); + let expected = include_str!("extract/fixtures/primitives.expected"); + test_fixture(transform_fixture, file!(), "primitives.js", "extract/fixtures/primitives.expected", input, expected).await; +} diff --git a/compiler/crates/relay-schema/Cargo.toml b/compiler/crates/relay-schema/Cargo.toml index 2038a4ac75158..9cf972fb34e59 100644 --- a/compiler/crates/relay-schema/Cargo.toml +++ b/compiler/crates/relay-schema/Cargo.toml @@ -1,13 +1,17 @@ # @generated by autocargo from //relay/oss/crates/relay-schema:relay-schema + [package] name = "relay-schema" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] common = { path = "../common" } +docblock-shared = { path = "../docblock-shared" } +graphql-syntax = { path = "../graphql-syntax" } intern = { path = "../intern" } lazy_static = "1.4" schema = { path = "../schema" } diff --git a/compiler/crates/relay-schema/src/definitions.rs b/compiler/crates/relay-schema/src/definitions.rs new file mode 100644 index 0000000000000..0ff33634c7d78 --- /dev/null +++ b/compiler/crates/relay-schema/src/definitions.rs @@ -0,0 +1,56 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::NamedItem; +use docblock_shared::RELAY_RESOLVER_MODEL_DIRECTIVE_NAME; +use docblock_shared::RELAY_RESOLVER_MODEL_INSTANCE_FIELD; +use docblock_shared::RELAY_RESOLVER_WEAK_OBJECT_DIRECTIVE; +use schema::Schema; +use schema::Type; + +pub trait ResolverType { + fn is_resolver_object(&self, schema: &S) -> bool; + fn is_weak_resolver_object(&self, schema: &S) -> bool; + fn is_terse_resolver_object(&self, schema: &S) -> bool; +} + +impl ResolverType for Type { + fn is_resolver_object(&self, schema: &S) -> bool { + if let Type::Object(object_id) = self { + let object = schema.object(*object_id); + object + .directives + .named(*RELAY_RESOLVER_MODEL_DIRECTIVE_NAME) + .is_some() + } else { + false + } + } + + fn is_weak_resolver_object(&self, schema: &S) -> bool { + if let Type::Object(object_id) = self { + let object = schema.object(*object_id); + object + .directives + .named(*RELAY_RESOLVER_WEAK_OBJECT_DIRECTIVE) + .is_some() + } else { + false + } + } + + fn is_terse_resolver_object(&self, schema: &S) -> bool { + if let Type::Object(object_id) = self { + let object = schema.object(*object_id); + object.fields.iter().any(|field_id| { + schema.field(*field_id).name.item == *RELAY_RESOLVER_MODEL_INSTANCE_FIELD + }) + } else { + false + } + } +} diff --git a/compiler/crates/relay-schema/src/lib.rs b/compiler/crates/relay-schema/src/lib.rs index 881e177eab539..7f835de05ea3b 100644 --- a/compiler/crates/relay-schema/src/lib.rs +++ b/compiler/crates/relay-schema/src/lib.rs @@ -11,6 +11,7 @@ #![deny(rust_2018_idioms)] #![deny(clippy::all)] +pub mod definitions; use std::iter::once; use ::intern::string_key::StringKey; @@ -18,6 +19,7 @@ use common::ArgumentName; use common::DiagnosticsResult; use common::DirectiveName; use common::SourceLocationKey; +use graphql_syntax::SchemaDocument; use intern::intern; use lazy_static::lazy_static; use schema::ArgumentDefinitions; @@ -35,7 +37,10 @@ lazy_static! { pub static ref EXPORT_NAME_CUSTOM_SCALAR_ARGUMENT_NAME: StringKey = intern!("export_name"); } -pub fn build_schema_with_extensions, U: AsRef>( +pub fn build_schema_with_extensions< + T: AsRef + std::marker::Sync, + U: AsRef + std::marker::Sync, +>( server_sdls: &[(T, SourceLocationKey)], extension_sdls: &[(U, SourceLocationKey)], ) -> DiagnosticsResult { @@ -49,14 +54,32 @@ pub fn build_schema_with_extensions, U: AsRef>( .collect(); let mut schema = schema::build_schema_with_extensions(server_sdls, &extensions)?; + remove_defer_stream_label(&mut schema); + Ok(schema) +} + +pub fn build_schema_with_extensions_from_asts( + server_sdls: Vec, + mut extension_sdls: Vec, +) -> DiagnosticsResult { + let relay_extensions_ast = + graphql_syntax::parse_schema_document(RELAY_EXTENSIONS, SourceLocationKey::generated())?; + + extension_sdls.push(relay_extensions_ast); - // Remove label arg from @defer and @stream directives since the compiler - // adds these arguments. + let mut schema = SDLSchema::build(&server_sdls, &extension_sdls)?; + remove_defer_stream_label(&mut schema); + Ok(schema) +} + +/// Remove label arg from @defer and @stream directives since the compiler +/// adds these arguments. +fn remove_defer_stream_label(schema: &mut SDLSchema) { for directive_name in &[*DEFER, *STREAM] { if let Some(directive) = schema.get_directive_mut(*directive_name) { let mut next_args: Vec<_> = directive.arguments.iter().cloned().collect(); for arg in next_args.iter_mut() { - if arg.name == *LABEL { + if arg.name.item == *LABEL { if let TypeReference::NonNull(of) = &arg.type_ { arg.type_ = *of.clone() }; @@ -65,5 +88,4 @@ pub fn build_schema_with_extensions, U: AsRef>( directive.arguments = ArgumentDefinitions::new(next_args); } } - Ok(schema) } diff --git a/compiler/crates/relay-schema/src/relay-extensions.graphql b/compiler/crates/relay-schema/src/relay-extensions.graphql index 819977bb79674..1de631974fa0c 100644 --- a/compiler/crates/relay-schema/src/relay-extensions.graphql +++ b/compiler/crates/relay-schema/src/relay-extensions.graphql @@ -22,6 +22,19 @@ directive @no_inline(raw_response_type: Boolean) on FRAGMENT_DEFINITION """ (Relay only) +A directive added to queries and fragments which causes the Relay client to throw +if reading a field that has an error. Relay will also honor the @semanticNonNull +direcitve on fields read from that query or fragment. Emitted types for such +fields will be non-null. Requires the `experimental_emit_semantic_nullability_types` +typegen configuration to be enabled. + +[Read More](https://relay.dev/docs/api-reference/graphql-and-directives/) +""" +directive @throwOnFieldError on QUERY | FRAGMENT_DEFINITION + +""" +(Relay only) + A directive added to queries which tells Relay to generate types that cover the `optimisticResponse` parameter to `commitMutation`. @@ -45,6 +58,7 @@ types, or on a type that implements `Node` (i.e. a type that has an id). directive @refetchable( queryName: String! directives: [String!] + preferFetchable: Boolean ) on FRAGMENT_DEFINITION """ @@ -141,6 +155,17 @@ enum RequiredFieldAction { THROW } +# CatchTransform +""" +(Relay Only) + +NULL and RESULT are the `to` values you can use in the @catch directive to tell relay how to treat field-level errors +""" +enum CatchFieldTo { + NULL + RESULT +} + """ (Relay Only) @@ -153,6 +178,14 @@ null". """ directive @required(action: RequiredFieldAction! @static) on FIELD +""" +(Relay Only) + +`@catch` is a directive you can add to fields in your Relay queries to +declare how errors are handled. +""" +directive @catch(to: CatchFieldTo! = RESULT @static) on FIELD + # DeclarativeConnection """ (Relay Only) @@ -214,9 +247,6 @@ will be prepended to its parent connection. """ directive @prependNode(connections: [ID!]!, edgeTypeName: String!) on FIELD -# RelayClientComponentTransform -directive @relay_client_component on FRAGMENT_SPREAD - # RelayResolver directive @relay_resolver( fragment_name: String! @@ -260,7 +290,7 @@ directive @RelayOutputType on OBJECT Marks a given query or fragment as updatable. -[Read More](https://fb.quip.com/4FZaADvkQPPl) +[Read More](https://relay.dev/docs/next/guided-tour/updating-data/imperatively-modifying-linked-fields/) """ directive @updatable on QUERY | FRAGMENT_DEFINITION @@ -269,7 +299,7 @@ directive @updatable on QUERY | FRAGMENT_DEFINITION Marks a given fragment as assignable. -[Read More](https://fb.quip.com/4FZaADvkQPPl) +[Read More](https://relay.dev/docs/next/guided-tour/updating-data/imperatively-modifying-linked-fields/) """ directive @assignable on FRAGMENT_DEFINITION @@ -278,12 +308,27 @@ directive @assignable on FRAGMENT_DEFINITION Exposes a fragment's data as a new field which can be null checked to ensure it matches the parent selection. + +[Read More](https://relay.dev/docs/next/guides/alias-directive/) """ directive @alias(as: String) on FRAGMENT_SPREAD | INLINE_FRAGMENT """ (Relay Only) +This directive allows users to opt out of validation which enforces that @alias +be used on all fragment spreads which might not match. It is intended as an +escape hatch for incremental adoption of enforcing `@alias`. + +DO NOT ADD NEW USES OF THIS DIRECTIVE. + +[Read More](https://relay.dev/docs/next/guides/alias-directive/) +""" +directive @dangerously_unaliased_fixme on FRAGMENT_SPREAD + +""" +(Relay Only) + Indicates that a given directive argument is expected to be provided statically. If a non-static value is provided, it will result in a validation error. diff --git a/compiler/crates/relay-test-schema/Cargo.toml b/compiler/crates/relay-test-schema/Cargo.toml index 0719eafe3ff08..53279f59bd1f2 100644 --- a/compiler/crates/relay-test-schema/Cargo.toml +++ b/compiler/crates/relay-test-schema/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/relay-test-schema:relay-test-schema + [package] name = "relay-test-schema" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] diff --git a/compiler/crates/relay-test-schema/src/testschema.graphql b/compiler/crates/relay-test-schema/src/testschema.graphql index bb46328a8ccfe..c356122384f95 100644 --- a/compiler/crates/relay-test-schema/src/testschema.graphql +++ b/compiler/crates/relay-test-schema/src/testschema.graphql @@ -6,6 +6,12 @@ schema { subscription: Subscription } +directive @credentials( + use_case: String! + vc_transform: String! + token: String +) on FIELD + directive @live_query(polling_interval: Int, config_id: String) on QUERY directive @customDirective(level: Int!) on FIELD | FRAGMENT_SPREAD @@ -25,9 +31,9 @@ directive @defer( directive @stream( label: String! - initial_count: Int! + initialCount: Int! if: Boolean = true - use_customized_batch: Boolean = false + useCustomizedBatch: Boolean = false ) on FIELD directive @fetchable(field_name: String!) on OBJECT @@ -54,6 +60,7 @@ type Query { _mutation: Mutation fetch__User(id: ID!): User fetch__NonNodeStory(input_fetch_id: ID!): NonNodeStory + fetch__FetchableType(fetch_id: ID!): FetchableType nonNodeStory(id: ID!): NonNodeStory wrongViewer: WithWrongViewer } @@ -64,16 +71,6 @@ interface MaybeNodeInterface { scalar JSON -directive @react_flight( - components: [String!]! -) on QUERY | MUTATION | SUBSCRIPTION - -scalar ReactFlightComponent - -scalar ReactFlightProps - -directive @relay_client_component_server(module_id: String!) on FRAGMENT_SPREAD - union MaybeNode = Story | FakeNode | NonNode type FakeNode { @@ -124,6 +121,7 @@ type Mutation { commentDelete(input: CommentDeleteInput): CommentDeleteResponsePayload commentsDelete(input: CommentsDeleteInput): CommentsDeleteResponsePayload feedbackLike(input: FeedbackLikeInput): FeedbackLikeResponsePayload + feedbackUnLike(input: FeedbackUnLikeInput): FeedbackUnLikeResponsePayload feedbackLikeStrict( input: FeedbackLikeInputStrict! ): FeedbackLikeResponsePayload @@ -192,6 +190,11 @@ input FeedbackLikeInput { feedbackId: ID } +input FeedbackUnLikeInput { + feedbackId: ID + silent: Boolean! = false +} + input FeedbackLikeInputStrict { userID: ID! feedbackId: ID! @@ -456,6 +459,10 @@ type FeedbackLikeResponsePayload { feedback: Feedback } +type FeedbackUnLikeResponsePayload { + feedback: Feedback +} + interface FeedUnit { actor: Actor actorCount: Int @@ -796,7 +803,6 @@ type PhotoStory implements FeedUnit & Node { type Story implements FeedUnit & Node & MaybeNodeInterface { attachments: [StoryAttachment] - flight(component: String!, props: ReactFlightProps): ReactFlightComponent # FeedUnit canViewerDelete: Boolean @@ -1186,3 +1192,22 @@ type Settings { type WithWrongViewer { actor_key: Viewer } + +extend type Query { + opera: Opera +} + +type Opera { + composer: User @semanticNonNull + cast: [Portrayal] @semanticNonNull(levels: [0, 1]) +} + +type Portrayal { + singer: User @semanticNonNull + character: String @semanticNonNull +} + +type FetchableType implements Node @fetchable(field_name: "fetch_id") { + id: ID! + fetch_id: ID! +} diff --git a/compiler/crates/relay-test-schema/src/testschema_with_custom_id.graphql b/compiler/crates/relay-test-schema/src/testschema_with_custom_id.graphql index 643e61cca2a64..5c30aad299b27 100644 --- a/compiler/crates/relay-test-schema/src/testschema_with_custom_id.graphql +++ b/compiler/crates/relay-test-schema/src/testschema_with_custom_id.graphql @@ -25,9 +25,9 @@ directive @defer( directive @stream( label: String! - initial_count: Int! + initialCount: Int! if: Boolean = true - use_customized_batch: Boolean = false + useCustomizedBatch: Boolean = false ) on FIELD directive @fetchable(field_name: String!) on OBJECT @@ -64,16 +64,6 @@ interface MaybeNodeInterface { scalar JSON -directive @react_flight( - components: [String!]! -) on QUERY | MUTATION | SUBSCRIPTION - -scalar ReactFlightComponent - -scalar ReactFlightProps - -directive @relay_client_component_server(module_id: String!) on FRAGMENT_SPREAD - union MaybeNode = Story | FakeNode | NonNode type FakeNode { @@ -790,7 +780,6 @@ type PhotoStory implements FeedUnit & Node { type Story implements FeedUnit & Node & MaybeNodeInterface { attachments: [StoryAttachment] - flight(component: String!, props: ReactFlightProps): ReactFlightComponent # FeedUnit canViewerDelete: Boolean diff --git a/compiler/crates/relay-transforms/Cargo.toml b/compiler/crates/relay-transforms/Cargo.toml index 9d61fa9abc2fb..d8f584b97b43f 100644 --- a/compiler/crates/relay-transforms/Cargo.toml +++ b/compiler/crates/relay-transforms/Cargo.toml @@ -1,9 +1,11 @@ -# @generated by autocargo from //relay/oss/crates/relay-transforms:[apply_fragment_arguments_test,assignable_directive_test,assignable_fragment_spread_test,client_edges_test,client_extensions_test,declarative_connection_test,disallow_typename_on_root_test,fragment_alias_directive_test,generate_data_driven_dependency_metadata_test,generate_live_query_metadata_test,generate_relay_resolvers_operations_for_nested_objects_test,graphql-client_extensions_abstract_types-test,graphql-defer_stream-test,graphql-disallow_non_node_id_fields-test,graphql-disallow_reserved_aliases-test,graphql-flatten-test,graphql-generate_id_field-test,graphql-generate_typename-test,graphql-inline_fragments-test,graphql-mask-test,graphql-match-local-test,graphql-match-test,graphql-node_identifier-test,graphql-refetchable_fragment_test,graphql-skip_client_extensions-test,graphql-skip_redundant_nodes-test,graphql-skip_unreachable_nodes-test,graphql-sort_selections-test,graphql-subscription_transform-test,graphql-validate_deprecated_fields_test,graphql-validate_module_names-test,graphql-validate_relay_directives-test,graphql-validate_required_arguments_test,graphql-validate_server_only_directives-test,graphql-validate_unused_variables-test,inline_data_fragment_test,provided-variable-fragment-transform-test,relay-actor-change-test,relay-client-component-test,relay-transforms,relay_resolvers_test,relay_test_operation_test,required_directive_test,skip_unused_variables_test,transform_connections_test,updatable_directive_test,updatable_fragment_spread_test,validate_connections_schema_test,validate_connections_test,validate_global_variable_names_test,validate_global_variables-test,validate_no_double_underscore_alias_test,validate_no_unselectable_selections_test,validate_static_args] +# @generated by autocargo from //relay/oss/crates/relay-transforms:[apply_fragment_arguments_test,assignable_directive_test,assignable_fragment_spread_test,catch_directive_test,client_edges_test,client_extensions_test,declarative_connection_test,disallow_typename_on_root_test,fragment_alias_directive_test,generate_data_driven_dependency_metadata_test,generate_live_query_metadata_test,generate_relay_resolvers_operations_for_nested_objects_test,graphql-client_extensions_abstract_types-test,graphql-defer_stream-test,graphql-disallow_non_node_id_fields-test,graphql-disallow_required_on_non_null_field-test,graphql-disallow_reserved_aliases-test,graphql-disallowreadtime_features_in_mutations-test,graphql-flatten-test,graphql-generate_id_field-test,graphql-generate_typename-test,graphql-inline_fragments-test,graphql-mask-test,graphql-match-local-test,graphql-match-test,graphql-node_identifier-test,graphql-refetchable_fragment_test,graphql-skip_client_extensions-test,graphql-skip_redundant_nodes-test,graphql-skip_unreachable_nodes-test,graphql-sort_selections-test,graphql-subscription_transform-test,graphql-validate_deprecated_fields_test,graphql-validate_module_names-test,graphql-validate_relay_directives-test,graphql-validate_required_arguments_test,graphql-validate_server_only_directives-test,graphql-validate_unused_variables-test,inline_data_fragment_test,provided-variable-fragment-transform-test,relay-actor-change-test,relay-transforms,relay_resolvers_abstract_types_test,relay_resolvers_test,relay_test_operation_test,required_directive_test,skip_unused_variables_test,transform_connections_test,updatable_directive_test,updatable_fragment_spread_test,validate_connections_schema_test,validate_connections_test,validate_global_variable_names_test,validate_global_variables-test,validate_no_double_underscore_alias_test,validate_no_unselectable_selections_test,validate_static_args] + [package] name = "relay-transforms" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -18,10 +20,18 @@ path = "tests/defer_stream_test.rs" name = "graphql_disallow_non_node_id_fields_test" path = "tests/disallow_non_node_id_fields_test.rs" +[[test]] +name = "graphql_disallow_required_on_non_null_field_test" +path = "tests/disallow_required_on_non_null_field_test.rs" + [[test]] name = "graphql_disallow_reserved_aliases_test" path = "tests/disallow_reserved_aliases_test.rs" +[[test]] +name = "graphql_disallowreadtime_features_in_mutations_test" +path = "tests/disallow_readtime_features_in_mutations_test.rs" + [[test]] name = "graphql_flatten_test" path = "tests/flatten_test.rs" @@ -108,7 +118,7 @@ path = "tests/validate_static_args_test.rs" [dependencies] common = { path = "../common" } -dashmap = { version = "5.4", features = ["raw-api", "rayon", "serde"] } +dashmap = { version = "5.5.3", features = ["rayon", "serde"] } docblock-shared = { path = "../docblock-shared" } errors = { path = "../errors" } fnv = "1.0" @@ -116,20 +126,22 @@ graphql-ir = { path = "../graphql-ir" } graphql-ir-validations = { path = "../graphql-ir-validations" } graphql-syntax = { path = "../graphql-syntax" } graphql-text-printer = { path = "../graphql-text-printer" } -indexmap = { version = "1.9.2", features = ["arbitrary", "rayon", "serde-1"] } +indexmap = { version = "2.2.6", features = ["arbitrary", "rayon", "serde"] } intern = { path = "../intern" } -itertools = "0.10.3" +itertools = "0.11.0" lazy_static = "1.4" -once_cell = "1.12" -parking_lot = { version = "0.11.2", features = ["send_guard"] } -regex = "1.6.0" +parking_lot = { version = "0.12.1", features = ["send_guard"] } +regex = "1.9.2" relay-config = { path = "../relay-config" } +relay-schema = { path = "../relay-schema" } rustc-hash = "1.1.0" schema = { path = "../schema" } -thiserror = "1.0.36" +serde = { version = "1.0.185", features = ["derive", "rc"] } +thiserror = "1.0.49" [dev-dependencies] fixture-tests = { path = "../fixture-tests" } graphql-cli = { path = "../graphql-cli" } graphql-test-helpers = { path = "../graphql-test-helpers" } relay-test-schema = { path = "../relay-test-schema" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/relay-transforms/src/apply_fragment_arguments.rs b/compiler/crates/relay-transforms/src/apply_fragment_arguments.rs new file mode 100644 index 0000000000000..8daa00b4442db --- /dev/null +++ b/compiler/crates/relay-transforms/src/apply_fragment_arguments.rs @@ -0,0 +1,730 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +mod scope; + +use std::collections::HashMap; +use std::sync::Arc; + +use common::ArgumentName; +use common::Diagnostic; +use common::DiagnosticsResult; +use common::FeatureFlag; +use common::Location; +use common::NamedItem; +use common::SourceLocationKey; +use common::WithLocation; +use graphql_ir::associated_data_impl; +use graphql_ir::transform_list; +use graphql_ir::transform_list_multi; +use graphql_ir::Condition; +use graphql_ir::ConditionValue; +use graphql_ir::ConstantValue; +use graphql_ir::Directive; +use graphql_ir::FragmentDefinition; +use graphql_ir::FragmentDefinitionName; +use graphql_ir::FragmentDefinitionNameMap; +use graphql_ir::FragmentDefinitionNameSet; +use graphql_ir::FragmentSpread; +use graphql_ir::InlineFragment; +use graphql_ir::OperationDefinition; +use graphql_ir::OperationDefinitionName; +use graphql_ir::Program; +use graphql_ir::ProvidedVariableMetadata; +use graphql_ir::Selection; +use graphql_ir::Transformed; +use graphql_ir::TransformedMulti; +use graphql_ir::TransformedValue; +use graphql_ir::Transformer; +use graphql_ir::Value; +use graphql_ir::Variable; +use graphql_ir::VariableDefinition; +use graphql_ir::VariableName; +use graphql_syntax::OperationKind; +use intern::string_key::Intern; +use intern::string_key::StringKeyIndexMap; +use intern::string_key::StringKeyMap; +use itertools::Itertools; +use scope::format_local_variable; +use scope::Scope; +use thiserror::Error; + +use super::get_applied_fragment_name; +use crate::match_::SplitOperationMetadata; +use crate::match_::DIRECTIVE_SPLIT_OPERATION; +use crate::no_inline::is_raw_response_type_enabled; +use crate::no_inline::NO_INLINE_DIRECTIVE_NAME; +use crate::no_inline::PARENT_DOCUMENTS_ARG; +use crate::util::get_normalization_operation_name; +use crate::RawResponseGenerationMode; +use crate::RelayResolverMetadata; + +/// A transform that converts a set of documents containing fragments/fragment +/// spreads *with* arguments to one where all arguments have been inlined. This +/// is effectively static currying of functions. Nodes are changed as follows: +/// - Fragment spreads with arguments are replaced with references to an inlined +/// version of the referenced fragment. +/// - Fragments with argument definitions are cloned once per unique set of +/// arguments, with the name changed to original name + hash and all nested +/// variable references changed to the value of that variable given its +/// arguments. +/// - Field & directive argument variables are replaced with the value of those +/// variables in context. +/// - Definitions of provided variables are added to root operations. +/// - All nodes are cloned with updated children. +/// +/// The transform also handles statically passing/failing Condition nodes: +/// - Literal Conditions with a passing value are elided and their selections +/// inlined in their parent. +/// - Literal Conditions with a failing value are removed. +/// - Nodes that would become empty as a result of the above are removed. +/// +/// Note that unreferenced fragments are not added to the output. +pub fn apply_fragment_arguments( + program: &Program, + is_normalization: bool, + no_inline_feature: &FeatureFlag, + base_fragment_names: &FragmentDefinitionNameSet, +) -> DiagnosticsResult { + let mut transform = ApplyFragmentArgumentsTransform { + base_fragment_names, + errors: Vec::new(), + fragments: Default::default(), + is_normalization, + no_inline_feature, + program, + provided_variables: Default::default(), + scope: Default::default(), + split_operations: Default::default(), + }; + + let mut next_program = transform + .transform_program(program) + .replace_or_else(|| program.clone()); + + for (fragment_name, used_fragment) in transform.fragments { + match used_fragment { + PendingFragment::Resolved { + fragment_definition: Some(fragment), + .. + } => next_program.insert_fragment(fragment), + PendingFragment::Resolved { + fragment_definition: None, + .. + } => { + // The fragment ended up empty, do not add to result Program. + } + PendingFragment::Pending => panic!("Unexpected case, {}", fragment_name), + } + } + + for (_, (operation, _)) in transform.split_operations { + if let Some(operation) = operation { + next_program.insert_operation(Arc::new(operation)); + } + } + + if transform.errors.is_empty() { + Ok(next_program) + } else { + Err(transform.errors) + } +} + +type ProvidedVariablesMap = StringKeyIndexMap; + +#[derive(Debug)] +enum PendingFragment { + Pending, + Resolved { + fragment_definition: Option>, + provided_variables: ProvidedVariablesMap, + }, +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct NoInlineFragmentSpreadMetadata { + pub location: SourceLocationKey, +} + +associated_data_impl!(NoInlineFragmentSpreadMetadata); + +struct ApplyFragmentArgumentsTransform<'flags, 'program, 'base_fragments> { + base_fragment_names: &'base_fragments FragmentDefinitionNameSet, + errors: Vec, + fragments: FragmentDefinitionNameMap, + is_normalization: bool, + no_inline_feature: &'flags FeatureFlag, + program: &'program Program, + // used to keep track of the provided variables used by the current + // operation / fragment / no-inline fragment and its transitively + // included fragments + provided_variables: ProvidedVariablesMap, + scope: Scope, + split_operations: StringKeyMap<(Option, ProvidedVariablesMap)>, +} + +impl Transformer for ApplyFragmentArgumentsTransform<'_, '_, '_> { + const NAME: &'static str = "ApplyFragmentArgumentsTransform"; + const VISIT_ARGUMENTS: bool = true; + const VISIT_DIRECTIVES: bool = true; + + fn transform_operation( + &mut self, + operation: &OperationDefinition, + ) -> Transformed { + self.scope = Scope::root_scope(); + self.provided_variables = Default::default(); + let transform_result = self.default_transform_operation(operation); + if self.provided_variables.is_empty() + || operation + .directives + .named(*DIRECTIVE_SPLIT_OPERATION) + .is_some() + { + // this transform does not add the SplitOperation directive, so this + // should be equal to checking whether the result is a split operation + self.provided_variables.clear(); + transform_result + } else { + let mut add_provided_variables = |new_operation: &mut OperationDefinition| { + new_operation.variable_definitions.append( + &mut self + .provided_variables + .drain(..) + .map(|(_, definition)| definition) + .collect_vec(), + ); + }; + match transform_result { + Transformed::Keep => { + let mut new_operation = operation.clone(); + add_provided_variables(&mut new_operation); + Transformed::Replace(new_operation) + } + Transformed::Replace(mut new_operation) => { + add_provided_variables(&mut new_operation); + Transformed::Replace(new_operation) + } + Transformed::Delete => Transformed::Delete, + } + } + } + + fn transform_fragment( + &mut self, + fragment: &FragmentDefinition, + ) -> Transformed { + if self.is_normalization { + let no_inline_directive = fragment.directives.named(*NO_INLINE_DIRECTIVE_NAME); + if let Some(no_inline_directive) = no_inline_directive { + self.transform_no_inline_fragment(fragment, no_inline_directive); + } + } + // Non-inlined fragments are promoted to operations; other fragments are deleted + // unless they are referenced + Transformed::Delete + } + + fn transform_fragment_spread(&mut self, spread: &FragmentSpread) -> Transformed { + let fragment = self + .program + .fragment(spread.fragment.item) + .unwrap_or_else(|| { + panic!( + "Tried to spread missing fragment: `{}`.", + spread.fragment.item + ); + }); + + // Validate that the fragment spread does not try to pass in provided variables + for (original_definition_name, definition_location) in + fragment.used_global_variables.iter().filter_map(|def| { + Some(( + ProvidedVariableMetadata::find(&def.directives)?.original_variable_name, + def.name.location, + )) + }) + { + if let Some(invalid_argument) = spread + .arguments + .named(ArgumentName(original_definition_name.0)) + { + self.errors.push( + Diagnostic::error( + ValidationMessage::ProvidedVariableIncompatibleWithArguments { + original_definition_name, + }, + invalid_argument.name.location, + ) + .annotate("Provided variable defined here", definition_location), + ); + } + } + + if self.is_normalization { + if let Some(directive) = fragment.directives.named(*NO_INLINE_DIRECTIVE_NAME) { + self.transform_no_inline_fragment(fragment, directive); + let transformed_arguments = spread + .arguments + .iter() + .map(|arg| { + let mut arg = self.transform_argument(arg).unwrap_or_else(|| arg.clone()); + arg.name.item.0 = + format_local_variable(fragment.name.item, arg.name.item.0); + arg + }) + .collect(); + let mut directives = Vec::with_capacity(spread.directives.len() + 1); + directives.extend(spread.directives.iter().cloned()); + + directives.push( + NoInlineFragmentSpreadMetadata { + location: fragment.name.location.source_location(), + } + .into(), + ); + + let normalization_name = + get_normalization_operation_name(fragment.name.item.0).intern(); + let next_spread = Selection::FragmentSpread(Arc::new(FragmentSpread { + arguments: transformed_arguments, + directives, + fragment: WithLocation::new( + fragment.name.location, + FragmentDefinitionName(normalization_name), + ), + })); + // If the fragment type is abstract, we need to ensure that it's only evaluated at runtime if the + // type of the object matches the fragment's type condition. Rather than reimplement type refinement + // for fragment spreads, we wrap the fragment spread in an inline fragment (which may be inlined away) + // that ensures it will go through type-refinement at runtime. + return if fragment.type_condition.is_abstract_type() { + Transformed::Replace(Selection::InlineFragment(Arc::new(InlineFragment { + directives: Default::default(), + selections: vec![next_spread], + type_condition: Some(fragment.type_condition), + spread_location: Location::generated(), + }))) + } else { + Transformed::Replace(next_spread) + }; + } + } + + if let Some(applied_fragment) = self.apply_fragment(spread, fragment) { + let directives = self + .transform_directives(&spread.directives) + .replace_or_else(|| spread.directives.clone()); + Transformed::Replace(Selection::FragmentSpread(Arc::new(FragmentSpread { + fragment: applied_fragment.name, + arguments: Vec::new(), + directives, + }))) + } else { + Transformed::Delete + } + } + + fn transform_selections( + &mut self, + selections: &[Selection], + ) -> TransformedValue> { + transform_list_multi(selections, |selection| { + self.transform_selection_multi(selection) + }) + } + + fn transform_directive(&mut self, directive: &Directive) -> Transformed { + if directive.name.item == RelayResolverMetadata::directive_name() { + if let Some(resolver_metadata) = RelayResolverMetadata::from(directive) { + return self + .transform_arguments(&resolver_metadata.field_arguments) + .map(|new_args| { + RelayResolverMetadata { + field_arguments: new_args, + ..resolver_metadata.clone() + } + .into() + }) + .into(); + } + } + self.default_transform_directive(directive) + } + + fn transform_value(&mut self, value: &Value) -> TransformedValue { + match value { + Value::Variable(prev_variable) => { + if let Some(scope_value) = self.scope.get(prev_variable.name.item) { + match scope_value { + Value::Variable(replacement_variable) => { + TransformedValue::Replace(Value::Variable(Variable { + // Update the name/location to the applied variable name + name: replacement_variable.name, + // But keep the type of the previous variable, which reflects the type + // expected at this location + type_: prev_variable.type_.clone(), + })) + } + _ => TransformedValue::Replace(scope_value.clone()), + } + } else { + // Assume a global variable if the variable has no local + // bindings. + TransformedValue::Keep + } + } + Value::Constant(_) => TransformedValue::Keep, + Value::List(items) => { + transform_list(items, |value| self.transform_value(value)).map(Value::List) + } + Value::Object(arguments) => self.transform_arguments(arguments).map(Value::Object), + } + } + + fn transform_condition_value( + &mut self, + condition_value: &ConditionValue, + ) -> TransformedValue { + match condition_value { + ConditionValue::Variable(prev_variable) => { + match self.scope.get(prev_variable.name.item) { + Some(Value::Variable(replacement_variable)) => { + TransformedValue::Replace(ConditionValue::Variable(Variable { + // Update the name/location to the applied variable name + name: replacement_variable.name, + // But keep the type of the previous variable, which reflects the type + // expected at this location + type_: prev_variable.type_.clone(), + })) + } + Some(Value::Constant(ConstantValue::Boolean(constant_value))) => { + TransformedValue::Replace(ConditionValue::Constant(*constant_value)) + } + None => { + // Assume a global variable if the variable has no local + // bindings. + TransformedValue::Keep + } + Some(other_binding) => { + panic!("Invalid variable value for condition: {:?}", other_binding); + } + } + } + ConditionValue::Constant(_) => TransformedValue::Keep, + } + } +} + +impl ApplyFragmentArgumentsTransform<'_, '_, '_> { + fn transform_no_inline_fragment( + &mut self, + fragment: &FragmentDefinition, + directive: &Directive, + ) { + // If we have already computed, we can return early + if let Some((_, provided_variables)) = self.split_operations.get(&fragment.name.item.0) { + for (name, def) in provided_variables { + self.provided_variables.insert(*name, def.clone()); + } + return; + } + + // We do not need to to write normalization files for base fragments + let is_base = self.base_fragment_names.contains(&fragment.name.item); + if !is_base && !self.no_inline_feature.is_enabled_for(fragment.name.item.0) { + self.errors.push(Diagnostic::error( + format!( + "Invalid usage of @no_inline on fragment '{}': this feature is gated and currently set to: {}", + fragment.name.item, self.no_inline_feature + ), + directive.name.location, + )); + } + + // save the context used by the enclosing operation / fragment + let mut saved_provided_vars = std::mem::take(&mut self.provided_variables); + let saved_scope = std::mem::replace(&mut self.scope, no_inline_fragment_scope(fragment)); + + self.extract_provided_variables(fragment); + let fragment = self + .default_transform_fragment(fragment) + .unwrap_or_else(|| fragment.clone()); + let FragmentDefinition { + name, + mut directives, + mut variable_definitions, + selections, + type_condition, + .. + } = fragment; + + for variable in &mut variable_definitions { + variable.name.item = VariableName(format_local_variable( + fragment.name.item, + variable.name.item.0, + )); + } + let mut metadata = SplitOperationMetadata { + derived_from: Some(fragment.name.item), + location: fragment.name.location, + parent_documents: Default::default(), + raw_response_type_generation_mode: is_raw_response_type_enabled(directive) + .then_some(RawResponseGenerationMode::AllFieldsOptional), + }; + // - A fragment with user defined @no_inline always produces a $normalization file. The `parent_document` of + // that file is the fragment itself as it gets deleted iff that fragment is deleted or no longer + // has the @no_inline directive. + // - A fragment with @no_inline generated by @module, `parent_documents` also include fragments that + // spread the current fragment with @module + metadata.parent_documents.insert(fragment.name.item.into()); + let parent_documents_arg = directive.arguments.named(*PARENT_DOCUMENTS_ARG); + if let Some(Value::Constant(ConstantValue::List(parent_documents))) = + parent_documents_arg.map(|arg| &arg.value.item) + { + for val in parent_documents { + if let ConstantValue::String(name) = val { + metadata.parent_documents.insert( + graphql_ir::ExecutableDefinitionName::FragmentDefinitionName( + FragmentDefinitionName(*name), + ), + ); + } else { + panic!("Expected item in the parent_documents to be a StringKey.") + } + } + } + directives.push(metadata.into()); + let normalization_name = get_normalization_operation_name(name.item.0).intern(); + let operation = if is_base { + None + } else { + Some(OperationDefinition { + name: WithLocation::new(name.location, OperationDefinitionName(normalization_name)), + type_: type_condition, + variable_definitions, + directives, + selections, + kind: OperationKind::Query, + }) + }; + + if self + .program + .operation(OperationDefinitionName(normalization_name)) + .is_some() + { + self.errors.push(Diagnostic::error( + format!( + "Invalid usage of @no_inline on fragment '{}' - @no_inline is only allowed on allowlisted fragments loaded with @module", + fragment.name.item, + ), + directive.name.location, + )); + } + self.split_operations.insert( + fragment.name.item.0, + (operation, self.provided_variables.clone()), + ); + + // add this fragment's provided variables to that of the enclosing operation / fragment + saved_provided_vars.extend(self.provided_variables.drain(..)); + self.provided_variables = saved_provided_vars; + self.scope = saved_scope; + } + + fn extract_provided_variables(&mut self, fragment: &FragmentDefinition) { + let provided_arguments = + fragment + .used_global_variables + .iter() + .filter(|variable_definition| { + variable_definition + .directives + .named(ProvidedVariableMetadata::directive_name()) + .is_some() + }); + for definition in provided_arguments { + self.provided_variables + .entry(definition.name.item.0) + .or_insert_with(|| definition.clone()); + } + } + + fn apply_fragment( + &mut self, + spread: &FragmentSpread, + fragment: &FragmentDefinition, + ) -> Option> { + let transformed_arguments = self + .transform_arguments(&spread.arguments) + .replace_or_else(|| spread.arguments.clone()); + + let applied_fragment_name = + get_applied_fragment_name(spread.fragment.item, &transformed_arguments); + if let Some(applied_fragment) = self.fragments.get(&applied_fragment_name) { + return match applied_fragment { + PendingFragment::Resolved { + fragment_definition, + provided_variables, + } => { + // add this fragment's provided variables to that of the enclosing + // operation / fragment + for (name, def) in provided_variables.iter() { + self.provided_variables.insert(*name, def.clone()); + } + fragment_definition.clone() + } + PendingFragment::Pending => { + let mut error = Diagnostic::error( + ValidationMessage::CircularFragmentReference { + fragment_name: spread.fragment.item, + }, + spread.fragment.location, + ); + for location in self.scope.locations() { + error = error.annotate("other member of the cycle", location); + } + self.errors.push(error); + None + } + }; + } + + self.fragments + .insert(applied_fragment_name, PendingFragment::Pending); + + self.scope + .push(spread.fragment.location, &transformed_arguments, fragment); + // save the context used by the enclosing operation / fragment + let mut saved_provided_vars = std::mem::take(&mut self.provided_variables); + self.extract_provided_variables(fragment); + + let selections = self + .transform_selections(&fragment.selections) + .replace_or_else(|| fragment.selections.clone()); + + let transformed_fragment = if selections.is_empty() { + None + } else { + Some(Arc::new(FragmentDefinition { + name: WithLocation::new(fragment.name.location, applied_fragment_name), + variable_definitions: Vec::new(), + type_condition: fragment.type_condition, + // TODO update globals + used_global_variables: Vec::new(), + directives: fragment.directives.clone(), + selections, + })) + }; + + self.fragments.insert( + applied_fragment_name, + PendingFragment::Resolved { + fragment_definition: transformed_fragment.clone(), + provided_variables: self.provided_variables.clone(), + }, + ); + + // add this fragment's provided variables to that of the enclosing operation / fragment + saved_provided_vars.extend(self.provided_variables.drain(..)); + self.provided_variables = saved_provided_vars; + self.scope.pop(); + + transformed_fragment + } + + fn transform_selection_multi(&mut self, selection: &Selection) -> TransformedMulti { + match selection { + Selection::FragmentSpread(selection) => { + self.transform_fragment_spread(selection).into() + } + Selection::InlineFragment(selection) => { + self.transform_inline_fragment(selection).into() + } + Selection::LinkedField(selection) => self.transform_linked_field(selection).into(), + Selection::ScalarField(selection) => self.transform_scalar_field(selection).into(), + Selection::Condition(selection) => self.transform_condition_multi(selection), + } + } + + fn transform_condition_multi(&mut self, condition: &Condition) -> TransformedMulti { + let condition_value = self.transform_condition_value(&condition.value); + + // If we replace with a constant condition, remove the condition node. + if let TransformedValue::Replace(ConditionValue::Constant(const_condition_value)) = + condition_value + { + return if const_condition_value == condition.passing_value { + let selections = self + .transform_selections(&condition.selections) + .replace_or_else(|| condition.selections.clone()); + TransformedMulti::ReplaceMultiple(selections) + } else { + TransformedMulti::Delete + }; + } + + // If selections are empty, delete + let selections = self.transform_selections(&condition.selections); + if let TransformedValue::Replace(selections) = &selections { + if selections.is_empty() { + return TransformedMulti::Delete; + } + } + + if selections.should_keep() && condition_value.should_keep() { + TransformedMulti::Keep + } else { + TransformedMulti::Replace(Selection::Condition(Arc::new(Condition { + value: condition_value.replace_or_else(|| condition.value.clone()), + selections: selections.replace_or_else(|| condition.selections.clone()), + ..condition.clone() + }))) + } + } +} + +fn no_inline_fragment_scope(fragment: &FragmentDefinition) -> Scope { + let mut bindings = HashMap::::with_capacity_and_hasher( + fragment.variable_definitions.len(), + Default::default(), + ); + for variable_definition in &fragment.variable_definitions { + let variable_name = variable_definition.name.item; + let scoped_variable_name = format_local_variable(fragment.name.item, variable_name.0); + bindings.insert( + variable_name, + Value::Variable(Variable { + name: WithLocation::new( + variable_definition.name.location, + VariableName(scoped_variable_name), + ), + type_: variable_definition.type_.clone(), + }), + ); + } + let mut scope = Scope::root_scope(); + scope.push_bindings(fragment.name.location, bindings); + scope +} + +#[derive(Debug, Error, serde::Serialize)] +#[serde(tag = "type")] +enum ValidationMessage { + #[error("Found a circular reference from fragment '{fragment_name}'.")] + CircularFragmentReference { + fragment_name: FragmentDefinitionName, + }, + #[error( + "Passing a value to '{original_definition_name}' (a provided variable) through @arguments is not supported." + )] + ProvidedVariableIncompatibleWithArguments { + original_definition_name: VariableName, + }, +} diff --git a/compiler/crates/relay-transforms/src/apply_fragment_arguments/mod.rs b/compiler/crates/relay-transforms/src/apply_fragment_arguments/mod.rs deleted file mode 100644 index 1aca03890ed7d..0000000000000 --- a/compiler/crates/relay-transforms/src/apply_fragment_arguments/mod.rs +++ /dev/null @@ -1,710 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -mod scope; - -use std::collections::HashMap; -use std::sync::Arc; - -use common::ArgumentName; -use common::Diagnostic; -use common::DiagnosticsResult; -use common::FeatureFlag; -use common::Location; -use common::NamedItem; -use common::SourceLocationKey; -use common::WithLocation; -use graphql_ir::associated_data_impl; -use graphql_ir::transform_list; -use graphql_ir::transform_list_multi; -use graphql_ir::Condition; -use graphql_ir::ConditionValue; -use graphql_ir::ConstantValue; -use graphql_ir::Directive; -use graphql_ir::FragmentDefinition; -use graphql_ir::FragmentDefinitionName; -use graphql_ir::FragmentDefinitionNameMap; -use graphql_ir::FragmentDefinitionNameSet; -use graphql_ir::FragmentSpread; -use graphql_ir::InlineFragment; -use graphql_ir::OperationDefinition; -use graphql_ir::OperationDefinitionName; -use graphql_ir::Program; -use graphql_ir::ProvidedVariableMetadata; -use graphql_ir::Selection; -use graphql_ir::Transformed; -use graphql_ir::TransformedMulti; -use graphql_ir::TransformedValue; -use graphql_ir::Transformer; -use graphql_ir::Value; -use graphql_ir::Variable; -use graphql_ir::VariableDefinition; -use graphql_ir::VariableName; -use graphql_syntax::OperationKind; -use intern::string_key::Intern; -use intern::string_key::StringKeyIndexMap; -use intern::string_key::StringKeyMap; -use itertools::Itertools; -use scope::format_local_variable; -use scope::Scope; -use thiserror::Error; - -use super::get_applied_fragment_name; -use crate::match_::SplitOperationMetadata; -use crate::match_::DIRECTIVE_SPLIT_OPERATION; -use crate::no_inline::is_raw_response_type_enabled; -use crate::no_inline::NO_INLINE_DIRECTIVE_NAME; -use crate::no_inline::PARENT_DOCUMENTS_ARG; -use crate::util::get_normalization_operation_name; -use crate::RawResponseGenerationMode; - -/// A transform that converts a set of documents containing fragments/fragment -/// spreads *with* arguments to one where all arguments have been inlined. This -/// is effectively static currying of functions. Nodes are changed as follows: -/// - Fragment spreads with arguments are replaced with references to an inlined -/// version of the referenced fragment. -/// - Fragments with argument definitions are cloned once per unique set of -/// arguments, with the name changed to original name + hash and all nested -/// variable references changed to the value of that variable given its -/// arguments. -/// - Field & directive argument variables are replaced with the value of those -/// variables in context. -/// - Definitions of provided variables are added to root operations. -/// - All nodes are cloned with updated children. -/// -/// The transform also handles statically passing/failing Condition nodes: -/// - Literal Conditions with a passing value are elided and their selections -/// inlined in their parent. -/// - Literal Conditions with a failing value are removed. -/// - Nodes that would become empty as a result of the above are removed. -/// -/// Note that unreferenced fragments are not added to the output. -pub fn apply_fragment_arguments( - program: &Program, - is_normalization: bool, - no_inline_feature: &FeatureFlag, - base_fragment_names: &FragmentDefinitionNameSet, -) -> DiagnosticsResult { - let mut transform = ApplyFragmentArgumentsTransform { - base_fragment_names, - errors: Vec::new(), - fragments: Default::default(), - is_normalization, - no_inline_feature, - program, - provided_variables: Default::default(), - scope: Default::default(), - split_operations: Default::default(), - }; - - let mut next_program = transform - .transform_program(program) - .replace_or_else(|| program.clone()); - - for (fragment_name, used_fragment) in transform.fragments { - match used_fragment { - PendingFragment::Resolved { - fragment_definition: Some(fragment), - .. - } => next_program.insert_fragment(fragment), - PendingFragment::Resolved { - fragment_definition: None, - .. - } => { - // The fragment ended up empty, do not add to result Program. - } - PendingFragment::Pending => panic!("Unexpected case, {}", fragment_name), - } - } - - for (_, (operation, _)) in transform.split_operations { - if let Some(operation) = operation { - next_program.insert_operation(Arc::new(operation)); - } - } - - if transform.errors.is_empty() { - Ok(next_program) - } else { - Err(transform.errors) - } -} - -type ProvidedVariablesMap = StringKeyIndexMap; - -#[derive(Debug)] -enum PendingFragment { - Pending, - Resolved { - fragment_definition: Option>, - provided_variables: ProvidedVariablesMap, - }, -} - -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct NoInlineFragmentSpreadMetadata { - pub location: SourceLocationKey, -} - -associated_data_impl!(NoInlineFragmentSpreadMetadata); - -struct ApplyFragmentArgumentsTransform<'flags, 'program, 'base_fragments> { - base_fragment_names: &'base_fragments FragmentDefinitionNameSet, - errors: Vec, - fragments: FragmentDefinitionNameMap, - is_normalization: bool, - no_inline_feature: &'flags FeatureFlag, - program: &'program Program, - // used to keep track of the provided variables used by the current - // operation / fragment / no-inline fragment and its transitively - // included fragments - provided_variables: ProvidedVariablesMap, - scope: Scope, - split_operations: StringKeyMap<(Option, ProvidedVariablesMap)>, -} - -impl Transformer for ApplyFragmentArgumentsTransform<'_, '_, '_> { - const NAME: &'static str = "ApplyFragmentArgumentsTransform"; - const VISIT_ARGUMENTS: bool = true; - const VISIT_DIRECTIVES: bool = true; - - fn transform_operation( - &mut self, - operation: &OperationDefinition, - ) -> Transformed { - self.scope = Scope::root_scope(); - self.provided_variables = Default::default(); - let transform_result = self.default_transform_operation(operation); - if self.provided_variables.is_empty() - || operation - .directives - .named(*DIRECTIVE_SPLIT_OPERATION) - .is_some() - { - // this transform does not add the SplitOperation directive, so this - // should be equal to checking whether the result is a split operation - self.provided_variables.clear(); - transform_result - } else { - let mut add_provided_variables = |new_operation: &mut OperationDefinition| { - new_operation.variable_definitions.append( - &mut self - .provided_variables - .drain(..) - .map(|(_, definition)| definition) - .collect_vec(), - ); - }; - match transform_result { - Transformed::Keep => { - let mut new_operation = operation.clone(); - add_provided_variables(&mut new_operation); - Transformed::Replace(new_operation) - } - Transformed::Replace(mut new_operation) => { - add_provided_variables(&mut new_operation); - Transformed::Replace(new_operation) - } - Transformed::Delete => Transformed::Delete, - } - } - } - - fn transform_fragment( - &mut self, - fragment: &FragmentDefinition, - ) -> Transformed { - if self.is_normalization { - let no_inline_directive = fragment.directives.named(*NO_INLINE_DIRECTIVE_NAME); - if let Some(no_inline_directive) = no_inline_directive { - self.transform_no_inline_fragment(fragment, no_inline_directive); - } - } - // Non-inlined fragments are promoted to operations; other fragments are deleted - // unless they are referenced - Transformed::Delete - } - - fn transform_fragment_spread(&mut self, spread: &FragmentSpread) -> Transformed { - let fragment = self - .program - .fragment(spread.fragment.item) - .unwrap_or_else(|| { - panic!( - "Tried to spread missing fragment: `{}`.", - spread.fragment.item - ); - }); - - // Validate that the fragment spread does not try to pass in provided variables - for (original_definition_name, definition_location) in - fragment.used_global_variables.iter().filter_map(|def| { - Some(( - ProvidedVariableMetadata::find(&def.directives)?.original_variable_name, - def.name.location, - )) - }) - { - if let Some(invalid_argument) = spread - .arguments - .named(ArgumentName(original_definition_name.0)) - { - self.errors.push( - Diagnostic::error( - ValidationMessage::ProvidedVariableIncompatibleWithArguments { - original_definition_name, - }, - invalid_argument.name.location, - ) - .annotate("Provided variable defined here", definition_location), - ); - } - } - - if self.is_normalization { - if let Some(directive) = fragment.directives.named(*NO_INLINE_DIRECTIVE_NAME) { - self.transform_no_inline_fragment(fragment, directive); - let transformed_arguments = spread - .arguments - .iter() - .map(|arg| { - let mut arg = self.transform_argument(arg).unwrap_or_else(|| arg.clone()); - arg.name.item.0 = - format_local_variable(fragment.name.item, arg.name.item.0); - arg - }) - .collect(); - let mut directives = Vec::with_capacity(spread.directives.len() + 1); - directives.extend(spread.directives.iter().cloned()); - - directives.push( - NoInlineFragmentSpreadMetadata { - location: fragment.name.location.source_location(), - } - .into(), - ); - - let normalization_name = - get_normalization_operation_name(fragment.name.item.0).intern(); - let next_spread = Selection::FragmentSpread(Arc::new(FragmentSpread { - arguments: transformed_arguments, - directives, - fragment: WithLocation::new( - fragment.name.location, - FragmentDefinitionName(normalization_name), - ), - })); - // If the fragment type is abstract, we need to ensure that it's only evaluated at runtime if the - // type of the object matches the fragment's type condition. Rather than reimplement type refinement - // for fragment spreads, we wrap the fragment spread in an inlinefragment (which may be inlined away) - // that ensures it will go through type-refinement at runtime. - return if fragment.type_condition.is_abstract_type() { - Transformed::Replace(Selection::InlineFragment(Arc::new(InlineFragment { - directives: Default::default(), - selections: vec![next_spread], - type_condition: Some(fragment.type_condition), - spread_location: Location::generated(), - }))) - } else { - Transformed::Replace(next_spread) - }; - } - } - - if let Some(applied_fragment) = self.apply_fragment(spread, fragment) { - let directives = self - .transform_directives(&spread.directives) - .replace_or_else(|| spread.directives.clone()); - Transformed::Replace(Selection::FragmentSpread(Arc::new(FragmentSpread { - fragment: applied_fragment.name, - arguments: Vec::new(), - directives, - }))) - } else { - Transformed::Delete - } - } - - fn transform_selections( - &mut self, - selections: &[Selection], - ) -> TransformedValue> { - transform_list_multi(selections, |selection| { - self.transform_selection_multi(selection) - }) - } - - fn transform_value(&mut self, value: &Value) -> TransformedValue { - match value { - Value::Variable(prev_variable) => { - if let Some(scope_value) = self.scope.get(prev_variable.name.item) { - match scope_value { - Value::Variable(replacement_variable) => { - TransformedValue::Replace(Value::Variable(Variable { - // Update the name/location to the applied variable name - name: replacement_variable.name, - // But keep the type of the previous variable, which reflects the type - // expected at this location - type_: prev_variable.type_.clone(), - })) - } - _ => TransformedValue::Replace(scope_value.clone()), - } - } else { - // Assume a global variable if the variable has no local - // bindings. - TransformedValue::Keep - } - } - Value::Constant(_) => TransformedValue::Keep, - Value::List(items) => { - transform_list(items, |value| self.transform_value(value)).map(Value::List) - } - Value::Object(arguments) => self.transform_arguments(arguments).map(Value::Object), - } - } - - fn transform_condition_value( - &mut self, - condition_value: &ConditionValue, - ) -> TransformedValue { - match condition_value { - ConditionValue::Variable(prev_variable) => { - match self.scope.get(prev_variable.name.item) { - Some(Value::Variable(replacement_variable)) => { - TransformedValue::Replace(ConditionValue::Variable(Variable { - // Update the name/location to the applied variable name - name: replacement_variable.name, - // But keep the type of the previous variable, which reflects the type - // expected at this location - type_: prev_variable.type_.clone(), - })) - } - Some(Value::Constant(ConstantValue::Boolean(constant_value))) => { - TransformedValue::Replace(ConditionValue::Constant(*constant_value)) - } - None => { - // Assume a global variable if the variable has no local - // bindings. - TransformedValue::Keep - } - Some(other_binding) => { - panic!("Invalid variable value for condition: {:?}", other_binding); - } - } - } - ConditionValue::Constant(_) => TransformedValue::Keep, - } - } -} - -impl ApplyFragmentArgumentsTransform<'_, '_, '_> { - fn transform_no_inline_fragment( - &mut self, - fragment: &FragmentDefinition, - directive: &Directive, - ) { - // If we have already computed, we can return early - if let Some((_, provided_variables)) = self.split_operations.get(&fragment.name.item.0) { - for (name, def) in provided_variables { - self.provided_variables.insert(*name, def.clone()); - } - return; - } - - // We do not need to to write normalization files for base fragments - let is_base = self.base_fragment_names.contains(&fragment.name.item); - if !is_base && !self.no_inline_feature.is_enabled_for(fragment.name.item.0) { - self.errors.push(Diagnostic::error( - format!( - "Invalid usage of @no_inline on fragment '{}': this feature is gated and currently set to: {}", - fragment.name.item, self.no_inline_feature - ), - directive.name.location, - )); - } - - // save the context used by the enclosing operation / fragment - let mut saved_provided_vars = std::mem::take(&mut self.provided_variables); - let saved_scope = std::mem::replace(&mut self.scope, no_inline_fragment_scope(fragment)); - - self.extract_provided_variables(fragment); - let fragment = self - .default_transform_fragment(fragment) - .unwrap_or_else(|| fragment.clone()); - let FragmentDefinition { - name, - mut directives, - mut variable_definitions, - selections, - type_condition, - .. - } = fragment; - - for variable in &mut variable_definitions { - variable.name.item = VariableName(format_local_variable( - fragment.name.item, - variable.name.item.0, - )); - } - let mut metadata = SplitOperationMetadata { - derived_from: Some(fragment.name.item), - location: fragment.name.location, - parent_documents: Default::default(), - raw_response_type_generation_mode: is_raw_response_type_enabled(directive) - .then_some(RawResponseGenerationMode::AllFieldsOptional), - }; - // - A fragment with user defined @no_inline always produces a $normalization file. The `parent_document` of - // that file is the fragment itself as it gets deleted iff that fragment is deleted or no longer - // has the @no_inline directive. - // - A fragment with @no_inline generated by @module, `parent_documents` also include fragments that - // spread the current fragment with @module - metadata.parent_documents.insert(fragment.name.item.into()); - let parent_documents_arg = directive.arguments.named(*PARENT_DOCUMENTS_ARG); - if let Some(Value::Constant(ConstantValue::List(parent_documents))) = - parent_documents_arg.map(|arg| &arg.value.item) - { - for val in parent_documents { - if let ConstantValue::String(name) = val { - metadata.parent_documents.insert( - graphql_ir::ExecutableDefinitionName::FragmentDefinitionName( - FragmentDefinitionName(*name), - ), - ); - } else { - panic!("Expected item in the parent_documents to be a StringKey.") - } - } - } - directives.push(metadata.into()); - let normalization_name = get_normalization_operation_name(name.item.0).intern(); - let operation = if is_base { - None - } else { - Some(OperationDefinition { - name: WithLocation::new(name.location, OperationDefinitionName(normalization_name)), - type_: type_condition, - variable_definitions, - directives, - selections, - kind: OperationKind::Query, - }) - }; - - if self - .program - .operation(OperationDefinitionName(normalization_name)) - .is_some() - { - self.errors.push(Diagnostic::error( - format!( - "Invalid usage of @no_inline on fragment '{}' - @no_inline is only allowed on allowlisted fragments loaded with @module", - fragment.name.item, - ), - directive.name.location, - )); - } - self.split_operations.insert( - fragment.name.item.0, - (operation, self.provided_variables.clone()), - ); - - // add this fragment's provided variables to that of the enclosing operation / fragment - saved_provided_vars.extend(self.provided_variables.drain(..).into_iter()); - self.provided_variables = saved_provided_vars; - self.scope = saved_scope; - } - - fn extract_provided_variables(&mut self, fragment: &FragmentDefinition) { - let provided_arguments = - fragment - .used_global_variables - .iter() - .filter(|variable_definition| { - variable_definition - .directives - .named(ProvidedVariableMetadata::directive_name()) - .is_some() - }); - for definition in provided_arguments { - self.provided_variables - .entry(definition.name.item.0) - .or_insert_with(|| definition.clone()); - } - } - - fn apply_fragment( - &mut self, - spread: &FragmentSpread, - fragment: &FragmentDefinition, - ) -> Option> { - let transformed_arguments = self - .transform_arguments(&spread.arguments) - .replace_or_else(|| spread.arguments.clone()); - - let applied_fragment_name = - get_applied_fragment_name(spread.fragment.item, &transformed_arguments); - if let Some(applied_fragment) = self.fragments.get(&applied_fragment_name) { - return match applied_fragment { - PendingFragment::Resolved { - fragment_definition, - provided_variables, - } => { - // add this fragment's provided variables to that of the enclosing - // operation / fragment - for (name, def) in provided_variables.iter() { - self.provided_variables.insert(*name, def.clone()); - } - fragment_definition.clone() - } - PendingFragment::Pending => { - let mut error = Diagnostic::error( - ValidationMessage::CircularFragmentReference { - fragment_name: spread.fragment.item, - }, - spread.fragment.location, - ); - for location in self.scope.locations() { - error = error.annotate("other member of the cycle", location); - } - self.errors.push(error); - None - } - }; - } - - self.fragments - .insert(applied_fragment_name, PendingFragment::Pending); - - self.scope - .push(spread.fragment.location, &transformed_arguments, fragment); - // save the context used by the enclosing operation / fragment - let mut saved_provided_vars = std::mem::take(&mut self.provided_variables); - self.extract_provided_variables(fragment); - - let selections = self - .transform_selections(&fragment.selections) - .replace_or_else(|| fragment.selections.clone()); - - let transformed_fragment = if selections.is_empty() { - None - } else { - Some(Arc::new(FragmentDefinition { - name: WithLocation::new(fragment.name.location, applied_fragment_name), - variable_definitions: Vec::new(), - type_condition: fragment.type_condition, - // TODO update globals - used_global_variables: Vec::new(), - directives: fragment.directives.clone(), - selections, - })) - }; - - self.fragments.insert( - applied_fragment_name, - PendingFragment::Resolved { - fragment_definition: transformed_fragment.clone(), - provided_variables: self.provided_variables.clone(), - }, - ); - - // add this fragment's provided variables to that of the enclosing operation / fragment - saved_provided_vars.extend(self.provided_variables.drain(..).into_iter()); - self.provided_variables = saved_provided_vars; - self.scope.pop(); - - transformed_fragment - } - - fn transform_selection_multi(&mut self, selection: &Selection) -> TransformedMulti { - match selection { - Selection::FragmentSpread(selection) => { - self.transform_fragment_spread(selection).into() - } - Selection::InlineFragment(selection) => { - self.transform_inline_fragment(selection).into() - } - Selection::LinkedField(selection) => self.transform_linked_field(selection).into(), - Selection::ScalarField(selection) => self.transform_scalar_field(selection).into(), - Selection::Condition(selection) => self.transform_condition_multi(selection), - } - } - - fn transform_condition_multi(&mut self, condition: &Condition) -> TransformedMulti { - let condition_value = self.transform_condition_value(&condition.value); - - // If we replace with a constant condition, remove the condition node. - if let TransformedValue::Replace(ConditionValue::Constant(const_condition_value)) = - condition_value - { - return if const_condition_value == condition.passing_value { - let selections = self - .transform_selections(&condition.selections) - .replace_or_else(|| condition.selections.clone()); - TransformedMulti::ReplaceMultiple(selections) - } else { - TransformedMulti::Delete - }; - } - - // If selections are empty, delete - let selections = self.transform_selections(&condition.selections); - if let TransformedValue::Replace(selections) = &selections { - if selections.is_empty() { - return TransformedMulti::Delete; - } - } - - if selections.should_keep() && condition_value.should_keep() { - TransformedMulti::Keep - } else { - TransformedMulti::Replace(Selection::Condition(Arc::new(Condition { - value: condition_value.replace_or_else(|| condition.value.clone()), - selections: selections.replace_or_else(|| condition.selections.clone()), - ..condition.clone() - }))) - } - } -} - -fn no_inline_fragment_scope(fragment: &FragmentDefinition) -> Scope { - let mut bindings = HashMap::::with_capacity_and_hasher( - fragment.variable_definitions.len(), - Default::default(), - ); - for variable_definition in &fragment.variable_definitions { - let variable_name = variable_definition.name.item; - let scoped_variable_name = format_local_variable(fragment.name.item, variable_name.0); - bindings.insert( - variable_name, - Value::Variable(Variable { - name: WithLocation::new( - variable_definition.name.location, - VariableName(scoped_variable_name), - ), - type_: variable_definition.type_.clone(), - }), - ); - } - let mut scope = Scope::root_scope(); - scope.push_bindings(fragment.name.location, bindings); - scope -} - -#[derive(Debug, Error)] -enum ValidationMessage { - #[error("Found a circular reference from fragment '{fragment_name}'.")] - CircularFragmentReference { - fragment_name: FragmentDefinitionName, - }, - #[error( - "Passing a value to '{original_definition_name}' (a provided variable) through @arguments is not supported." - )] - ProvidedVariableIncompatibleWithArguments { - original_definition_name: VariableName, - }, -} diff --git a/compiler/crates/relay-transforms/src/apply_transforms.rs b/compiler/crates/relay-transforms/src/apply_transforms.rs index 6c27fabf28514..624b4ad043242 100644 --- a/compiler/crates/relay-transforms/src/apply_transforms.rs +++ b/compiler/crates/relay-transforms/src/apply_transforms.rs @@ -25,7 +25,9 @@ use crate::client_extensions_abstract_types::client_extensions_abstract_types; use crate::disallow_non_node_id_fields; use crate::generate_relay_resolvers_model_fragments::generate_relay_resolvers_model_fragments; use crate::generate_relay_resolvers_operations_for_nested_objects::generate_relay_resolvers_operations_for_nested_objects; +use crate::generate_relay_resolvers_root_fragment_split_operation::generate_relay_resolvers_root_fragment_split_operation; use crate::match_::hash_supported_argument; +use crate::relay_resolvers_abstract_types::relay_resolvers_abstract_types; use crate::skip_updatable_queries::skip_updatable_queries; #[derive(Debug)] @@ -153,40 +155,38 @@ fn apply_common_transforms( &log_event, None, )?; - + program = log_event.time("relay_resolvers_abstract_types", || { + relay_resolvers_abstract_types(&program, &project_config.feature_flags) + })?; program = log_event.time("transform_connections", || { - transform_connections(&program, &project_config.schema_config.connection_interface) + transform_connections( + &program, + &project_config.schema_config.connection_interface, + &project_config.schema_config.defer_stream_interface, + ) }); program = log_event.time("mask", || mask(&program)); program = log_event.time("transform_defer_stream", || { - transform_defer_stream(&program) + transform_defer_stream( + &program, + &project_config.schema_config.defer_stream_interface, + ) })?; program = log_event.time("transform_match", || { transform_match( &program, &project_config.feature_flags, project_config.module_import_config, + project_config.schema_config.defer_stream_interface, ) })?; program = log_event.time("transform_subscriptions", || { transform_subscriptions(&program) })?; program = log_event.time("transform_refetchable_fragment", || { - transform_refetchable_fragment( - &program, - &project_config.schema_config, - &base_fragment_names, - false, - ) + transform_refetchable_fragment(&program, project_config, &base_fragment_names, false) })?; - if project_config.feature_flags.enable_flight_transform { - program = log_event.time("react_flight", || react_flight(&program))?; - program = log_event.time("relay_client_component", || { - relay_client_component(&program, &project_config.feature_flags) - })?; - } - program = log_event.time("relay_actor_change_transform", || { relay_actor_change_transform(&program, &project_config.feature_flags.actor_change_support) })?; @@ -196,13 +196,18 @@ fn apply_common_transforms( })?; program = log_event.time("generate_relay_resolvers_model_fragments", || { - generate_relay_resolvers_model_fragments(&program, &project_config.schema_config) + generate_relay_resolvers_model_fragments( + project_config.name, + &program, + &project_config.schema_config, + ) }); program = log_event.time( "generate_relay_resolvers_operations_for_nested_objects", || { generate_relay_resolvers_operations_for_nested_objects( + project_config.name, &program, &project_config.schema_config, ) @@ -248,17 +253,38 @@ fn apply_reader_transforms( program = log_event.time("fragment_alias_directive", || { fragment_alias_directive( &program, - &project_config.feature_flags.enable_fragment_aliases, + project_config + .feature_flags + .enable_fragment_aliases + .is_fully_enabled(), + // NOTE: We purposefully don't run validation in this arm of the + // transform pipeline, and instead we expect it to run in the + // typegen arm. In this arm we've already run refetchable fragment + // transform which creates some synthentic fragment spreads that we + // don't want to report. + false, ) })?; program = log_event.time("required_directive", || required_directive(&program))?; + + program = log_event.time("catch_directive", || { + catch_directive( + &program, + project_config + .feature_flags + .enable_catch_directive_transform + .is_fully_enabled(), + ) + })?; + program = log_event.time("client_edges", || { - client_edges(&program, &project_config.schema_config) + client_edges(&program, project_config, &base_fragment_names) })?; program = log_event.time("relay_resolvers", || { relay_resolvers( + project_config.name, &program, project_config.feature_flags.enable_relay_resolver_transform, ) @@ -276,19 +302,27 @@ fn apply_reader_transforms( program = log_event.time("inline_data_fragment", || inline_data_fragment(&program))?; program = log_event.time("skip_unreachable_node", || { - skip_unreachable_node_strict(&program) + skip_unreachable_node_strict( + &program, + project_config.schema_config.defer_stream_interface, + ) })?; program = log_event.time("remove_base_fragments", || { remove_base_fragments(&program, &base_fragment_names) }); log_event.time("flatten", || flatten(&mut program, true, false))?; - program = log_event.time("skip_redundant_nodes", || skip_redundant_nodes(&program)); + program = log_event.time("skip_redundant_nodes", || { + skip_redundant_nodes( + &program, + project_config.schema_config.defer_stream_interface, + ) + }); program = log_event.time("generate_data_driven_dependency_metadata", || { generate_data_driven_dependency_metadata(&program) }); program = log_event.time("hash_supported_argument", || { - hash_supported_argument(&program, &project_config.feature_flags) + hash_supported_argument(&program) })?; program = apply_after_custom_transforms( @@ -332,14 +366,21 @@ fn apply_operation_transforms( }); program = log_event.time("client_edges", || { - client_edges(&program, &project_config.schema_config) + client_edges(&program, project_config, &base_fragment_names) })?; program = log_event.time("relay_resolvers", || { relay_resolvers( + project_config.name, &program, project_config.feature_flags.enable_relay_resolver_transform, ) })?; + if project_config.resolvers_schema_module.is_some() { + program = log_event.time( + "generate_relay_resolvers_root_fragment_split_operation", + || generate_relay_resolvers_root_fragment_split_operation(&program), + )?; + } program = log_event.time("split_module_import", || { split_module_import(&program, &base_fragment_names) @@ -351,6 +392,7 @@ fn apply_operation_transforms( transform_declarative_connection( &program, &project_config.schema_config.connection_interface, + &project_config.feature_flags, ) })?; @@ -432,14 +474,17 @@ fn apply_normalization_transforms( }); program = log_event.time("hash_supported_argument", || { - hash_supported_argument(&program, &project_config.feature_flags) + hash_supported_argument(&program) })?; if let Some(print_stats) = maybe_print_stats { print_stats("hash_supported_argument", &program); } program = log_event.time("skip_unreachable_node", || { - skip_unreachable_node_strict(&program) + skip_unreachable_node_strict( + &program, + project_config.schema_config.defer_stream_interface, + ) })?; if let Some(print_stats) = maybe_print_stats { print_stats("skip_unreachable_node", &program); @@ -465,7 +510,12 @@ fn apply_normalization_transforms( print_stats("flatten", &program); } - program = log_event.time("skip_redundant_nodes", || skip_redundant_nodes(&program)); + program = log_event.time("skip_redundant_nodes", || { + skip_redundant_nodes( + &program, + project_config.schema_config.defer_stream_interface, + ) + }); if let Some(print_stats) = maybe_print_stats { print_stats("skip_redundant_nodes", &program); } @@ -525,35 +575,43 @@ fn apply_operation_text_transforms( ) })?; - program = log_event.time("remove_client_edge_selections", || { - remove_client_edge_selections(&program) - })?; - log_event.time("validate_global_variables", || { validate_global_variables(&program) })?; + program = log_event.time("remove_client_edge_selections", || { + remove_client_edge_selections(&program) + })?; + program = log_event.time("replace_updatable_fragment_spreads", || { replace_updatable_fragment_spreads(&program) }); program = log_event.time("skip_split_operation", || skip_split_operation(&program)); program = log_event.time("skip_unreachable_node_strict", || { - skip_unreachable_node_strict(&program) + skip_unreachable_node_strict( + &program, + project_config.schema_config.defer_stream_interface, + ) })?; program = log_event.time("skip_null_arguments_transform", || { skip_null_arguments_transform(&program) }); log_event.time("validate_selection_conflict", || { graphql_ir_validations::validate_selection_conflict::( - &program, true, + &program, + project_config, + true, ) })?; program = log_event.time("skip_client_extensions", || { skip_client_extensions(&program) }); program = log_event.time("skip_unreachable_node_loose", || { - skip_unreachable_node_loose(&program) + skip_unreachable_node_loose( + &program, + project_config.schema_config.defer_stream_interface, + ) }); program = log_event.time("generate_typename", || generate_typename(&program, false)); @@ -568,7 +626,10 @@ fn apply_operation_text_transforms( validate_required_arguments(&program) })?; program = log_event.time("unwrap_custom_directive_selection", || { - unwrap_custom_directive_selection(&program) + unwrap_custom_directive_selection( + &program, + project_config.schema_config.defer_stream_interface, + ) }); program = apply_after_custom_transforms( @@ -608,7 +669,14 @@ fn apply_typegen_transforms( program = log_event.time("fragment_alias_directive", || { fragment_alias_directive( &program, - &project_config.feature_flags.enable_fragment_aliases, + project_config + .feature_flags + .enable_fragment_aliases + .is_fully_enabled(), + project_config + .feature_flags + .enforce_fragment_alias_where_ambiguous + .is_fully_enabled(), ) })?; @@ -618,19 +686,35 @@ fn apply_typegen_transforms( &program, &project_config.feature_flags, project_config.module_import_config, + project_config.schema_config.defer_stream_interface, ) })?; program = log_event.time("transform_subscriptions", || { transform_subscriptions(&program) })?; program = log_event.time("required_directive", || required_directive(&program))?; + program = log_event.time("catch_directive", || { + catch_directive( + &program, + project_config + .feature_flags + .enable_catch_directive_transform + .is_fully_enabled(), + ) + })?; program = log_event.time("generate_relay_resolvers_model_fragments", || { - generate_relay_resolvers_model_fragments(&program, &project_config.schema_config) + generate_relay_resolvers_model_fragments( + project_config.name, + &program, + &project_config.schema_config, + ) }); + program = log_event.time( "generate_relay_resolvers_operations_for_nested_objects", || { generate_relay_resolvers_operations_for_nested_objects( + project_config.name, &program, &project_config.schema_config, ) @@ -638,7 +722,7 @@ fn apply_typegen_transforms( )?; program = log_event.time("client_edges", || { - client_edges(&program, &project_config.schema_config) + client_edges(&program, project_config, &base_fragment_names) })?; program = log_event.time( @@ -655,18 +739,14 @@ fn apply_typegen_transforms( program = log_event.time("relay_resolvers", || { relay_resolvers( + project_config.name, &program, project_config.feature_flags.enable_relay_resolver_transform, ) })?; log_event.time("flatten", || flatten(&mut program, false, false))?; program = log_event.time("transform_refetchable_fragment", || { - transform_refetchable_fragment( - &program, - &project_config.schema_config, - &base_fragment_names, - true, - ) + transform_refetchable_fragment(&program, project_config, &base_fragment_names, true) })?; program = log_event.time("remove_base_fragments", || { remove_base_fragments(&program, &base_fragment_names) diff --git a/compiler/crates/relay-transforms/src/assignable_fragment_spread/mod.rs b/compiler/crates/relay-transforms/src/assignable_fragment_spread.rs similarity index 100% rename from compiler/crates/relay-transforms/src/assignable_fragment_spread/mod.rs rename to compiler/crates/relay-transforms/src/assignable_fragment_spread.rs diff --git a/compiler/crates/relay-transforms/src/assignable_fragment_spread/errors.rs b/compiler/crates/relay-transforms/src/assignable_fragment_spread/errors.rs index 8900b69140231..faf9b173805d4 100644 --- a/compiler/crates/relay-transforms/src/assignable_fragment_spread/errors.rs +++ b/compiler/crates/relay-transforms/src/assignable_fragment_spread/errors.rs @@ -9,7 +9,8 @@ use graphql_ir::FragmentDefinitionName; use intern::string_key::StringKey; use thiserror::Error; -#[derive(Debug, Error)] +#[derive(Debug, Error, serde::Serialize)] +#[serde(tag = "type")] pub enum ValidationMessage { #[error( "The @{disallowed_directive_name} directive is not allowed on assignable fragment spreads." @@ -59,10 +60,8 @@ pub enum ValidationMessage { outer_type_plural: &'static str, }, - #[error( - "Fields defined using Relay Resolvers are not not allowed within @updatable operations." - )] - UpdatableDisallowRealyResolvers, + #[error("Fields defined using Relay Resolvers are not allowed within @updatable operations.")] + UpdatableDisallowRelayResolvers, #[error("The directives @include and @skip are not allowed within {outer_type_plural}.")] UpdatableNoConditions { outer_type_plural: &'static str }, diff --git a/compiler/crates/relay-transforms/src/assignable_fragment_spread/transform_assignable_fragment_spreads_in_regular_queries.rs b/compiler/crates/relay-transforms/src/assignable_fragment_spread/transform_assignable_fragment_spreads_in_regular_queries.rs index d10eb3491476d..bbc381568b208 100644 --- a/compiler/crates/relay-transforms/src/assignable_fragment_spread/transform_assignable_fragment_spreads_in_regular_queries.rs +++ b/compiler/crates/relay-transforms/src/assignable_fragment_spread/transform_assignable_fragment_spreads_in_regular_queries.rs @@ -287,7 +287,7 @@ impl<'s> Transformer for AssignableFragmentSpread<'s> { linked_field, "an assignable fragment was spread in this linked field", ) { - self.errors.extend(e.into_iter()); + self.errors.extend(e); } } diff --git a/compiler/crates/relay-transforms/src/assignable_fragment_spread/validate_assignable_directive.rs b/compiler/crates/relay-transforms/src/assignable_fragment_spread/validate_assignable_directive.rs index a5ceac991c261..5fde17f4ed0b6 100644 --- a/compiler/crates/relay-transforms/src/assignable_fragment_spread/validate_assignable_directive.rs +++ b/compiler/crates/relay-transforms/src/assignable_fragment_spread/validate_assignable_directive.rs @@ -53,7 +53,7 @@ impl<'a> Validator for AssignableDirective<'a> { if fragment.selections.len() == 1 { let first = fragment .selections - .get(0) + .first() .expect("Just checked selection length"); if let Selection::ScalarField(scalar_field) = first { if scalar_field.definition.item != self.program.schema.typename_field() diff --git a/compiler/crates/relay-transforms/src/assignable_fragment_spread/validate_updatable_directive.rs b/compiler/crates/relay-transforms/src/assignable_fragment_spread/validate_updatable_directive.rs index 529b115cd9160..040042a534e43 100644 --- a/compiler/crates/relay-transforms/src/assignable_fragment_spread/validate_updatable_directive.rs +++ b/compiler/crates/relay-transforms/src/assignable_fragment_spread/validate_updatable_directive.rs @@ -320,7 +320,7 @@ impl<'a> Validator for UpdatableDirective<'a> { { return Err(vec![ Diagnostic::error( - ValidationMessage::UpdatableDisallowRealyResolvers, + ValidationMessage::UpdatableDisallowRelayResolvers, field.definition.location, ) .annotate("The field is defined here:", field_def.name.location), @@ -341,7 +341,7 @@ impl<'a> Validator for UpdatableDirective<'a> { { return Err(vec![ Diagnostic::error( - ValidationMessage::UpdatableDisallowRealyResolvers, + ValidationMessage::UpdatableDisallowRelayResolvers, linked_field.definition.location, ) .annotate("The field is defined here:", field_def.name.location), diff --git a/compiler/crates/relay-transforms/src/assignable_fragment_spread/validate_updatable_fragment_spread.rs b/compiler/crates/relay-transforms/src/assignable_fragment_spread/validate_updatable_fragment_spread.rs index 034331d37607c..fc9c1372068f8 100644 --- a/compiler/crates/relay-transforms/src/assignable_fragment_spread/validate_updatable_fragment_spread.rs +++ b/compiler/crates/relay-transforms/src/assignable_fragment_spread/validate_updatable_fragment_spread.rs @@ -201,7 +201,7 @@ impl<'a> Validator for UpdatableFragmentSpread<'a> { })); match self.default_validate_linked_field(linked_field) { Ok(_) => {} - Err(e) => errors.extend(e.into_iter()), + Err(e) => errors.extend(e), } let linked_field_item = match self.path.pop().expect("path should not be empty") { PathItem::LinkedField(l) => l, diff --git a/compiler/crates/relay-transforms/src/catch_directive.rs b/compiler/crates/relay-transforms/src/catch_directive.rs new file mode 100644 index 0000000000000..37c17d5393994 --- /dev/null +++ b/compiler/crates/relay-transforms/src/catch_directive.rs @@ -0,0 +1,229 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::ArgumentName; +use common::Diagnostic; +use common::DiagnosticsResult; +use common::DirectiveName; +use common::NamedItem; +use graphql_ir::associated_data_impl; +use graphql_ir::Directive; +use graphql_ir::LinkedField; +use graphql_ir::Program; +use graphql_ir::ScalarField; +use graphql_ir::Selection; +use graphql_ir::Transformed; +use graphql_ir::Transformer; +use intern::string_key::Intern; +use intern::string_key::StringKey; +use lazy_static::lazy_static; +mod catchable_field; +mod validation_message; +use graphql_ir::Field; +use intern::Lookup; + +use self::catchable_field::CatchMetadata; +use self::catchable_field::CatchableField; +use crate::catch_directive::validation_message::ValidationMessage; +use crate::REQUIRED_DIRECTIVE_NAME; + +lazy_static! { + pub static ref CATCH_DIRECTIVE_NAME: DirectiveName = DirectiveName("catch".intern()); + pub static ref NULL_TO: StringKey = "NULL".intern(); + pub static ref RESULT_TO: StringKey = "RESULT".intern(); + pub static ref TO_ARGUMENT: ArgumentName = ArgumentName("to".intern()); +} + +// Possible @catch `to` enum values ordered by severity. +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Debug, Hash)] +pub enum CatchTo { + Null, + Result, +} + +impl From for CatchTo { + fn from(to: StringKey) -> Self { + match to { + _ if to == *RESULT_TO => Self::Result, + _ if to == *NULL_TO => Self::Null, + _ => panic!("unknown @catch `to` value. Use `NULL` or `RESULT` (default) instead."), + } + } +} + +impl From for StringKey { + fn from(val: CatchTo) -> Self { + match val { + CatchTo::Null => *NULL_TO, + CatchTo::Result => *RESULT_TO, + } + } +} +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct CatchMetadataDirective { + pub to: CatchTo, + pub path: StringKey, +} +associated_data_impl!(CatchMetadataDirective); + +pub fn catch_directive(program: &Program, enabled: bool) -> DiagnosticsResult { + let mut transform = CatchDirective::new(program, enabled); + + let next_program = transform + .transform_program(program) + .replace_or_else(|| program.clone()); + + if transform.errors.is_empty() { + Ok(next_program) + } else { + Err(transform.errors) + } +} + +struct CatchDirective<'s> { + #[allow(dead_code)] + program: &'s Program, + errors: Vec, + enabled: bool, + path: Vec<&'s str>, +} + +impl<'program> CatchDirective<'program> { + fn new(program: &'program Program, enabled: bool) -> Self { + Self { + program, + errors: Default::default(), + enabled, + path: vec![], + } + } + + fn report_unimplemented(&mut self, directives: &[Directive]) { + if let Some(directive) = directives.named(*CATCH_DIRECTIVE_NAME) { + self.errors.push(Diagnostic::error( + ValidationMessage::CatchDirectiveNotImplemented, + directive.name.location, + )); + } + } + + fn get_catch_metadata(&mut self, field: &T) -> Option { + self.assert_not_with_required(field); + + match field.catch_metadata() { + Err(err) => { + self.errors.push(err); + None + } + Ok(catch) => catch, + } + } + + fn assert_not_with_required(&mut self, field: &T) { + let catchable_field = field.directives().named(*CATCH_DIRECTIVE_NAME); + let required_field = field.directives().named(*REQUIRED_DIRECTIVE_NAME); + + if catchable_field.is_some() && required_field.is_some() { + let required_location = required_field.unwrap().name.location; + self.errors.push(Diagnostic::error( + ValidationMessage::CatchDirectiveWithRequiredDirective, + required_location, + )); + } + } +} + +impl<'s> Transformer for CatchDirective<'s> { + const NAME: &'static str = "CatchDirectiveTransform"; + const VISIT_ARGUMENTS: bool = false; + const VISIT_DIRECTIVES: bool = false; + + fn transform_scalar_field(&mut self, field: &ScalarField) -> Transformed { + if !self.enabled { + self.report_unimplemented(&field.directives); + } + + let name = field.alias_or_name(&self.program.schema).lookup(); + self.path.push(name); + let path_name: StringKey = self.path.join(".").intern(); + self.path.pop(); + + match self.get_catch_metadata(field) { + None => Transformed::Keep, + Some(catch_metadata) => { + Transformed::Replace(Selection::ScalarField(Arc::new(ScalarField { + directives: add_metadata_directive( + &field.directives, + path_name, + catch_metadata.to, + ), + ..field.clone() + }))) + } + } + } + + fn transform_linked_field(&mut self, field: &LinkedField) -> Transformed { + if !self.enabled { + self.report_unimplemented(&field.directives); + } + + let name = field.alias_or_name(&self.program.schema).lookup(); + self.path.push(name); + + let maybe_catch_metadata = self.get_catch_metadata(field); + + match maybe_catch_metadata { + None => { + let selections = self.transform_selections(&field.selections); + self.path.pop(); + if selections.should_keep() { + Transformed::Keep + } else { + Transformed::Replace(Selection::LinkedField(Arc::new(LinkedField { + selections: selections.replace_or_else(|| field.selections.clone()), + ..field.clone() + }))) + } + } + Some(catch_metadata) => { + let path_name = self.path.join(".").intern(); + let next_directives = + add_metadata_directive(&field.directives, path_name, catch_metadata.to); + + let selections = self.transform_selections(&field.selections); + + self.path.pop(); + + Transformed::Replace(Selection::LinkedField(Arc::new(LinkedField { + directives: next_directives, + selections: selections.replace_or_else(|| field.selections.clone()), + ..field.clone() + }))) + } + } + } +} + +fn add_metadata_directive( + directives: &[Directive], + path_name: StringKey, + to: CatchTo, +) -> Vec { + let mut next_directives: Vec = Vec::with_capacity(directives.len() + 1); + next_directives.extend(directives.iter().cloned()); + next_directives.push( + CatchMetadataDirective { + to, + path: path_name, + } + .into(), + ); + next_directives +} diff --git a/compiler/crates/relay-transforms/src/catch_directive/catchable_field.rs b/compiler/crates/relay-transforms/src/catch_directive/catchable_field.rs new file mode 100644 index 0000000000000..04441c705b4de --- /dev/null +++ b/compiler/crates/relay-transforms/src/catch_directive/catchable_field.rs @@ -0,0 +1,72 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::Diagnostic; +use common::Location; +use common::NamedItem; +use common::WithLocation; +use graphql_ir::Directive; +use graphql_ir::Field; +use graphql_ir::LinkedField; +use graphql_ir::ScalarField; +use intern::string_key::StringKey; +use schema::SDLSchema; + +use super::CATCH_DIRECTIVE_NAME; +use super::TO_ARGUMENT; +use crate::CatchTo; + +#[derive(Clone, Copy)] +pub struct CatchMetadata { + pub to: CatchTo, + pub directive_location: Location, + pub to_location: Location, +} + +#[allow(dead_code)] +pub trait CatchableField { + fn directives(&self) -> &Vec; + fn name_with_location(&self, schema: &SDLSchema) -> WithLocation; + fn catch_metadata(&self) -> Result, Diagnostic> { + if let Some(catch_directive) = self.directives().named(*CATCH_DIRECTIVE_NAME) { + let maybe_to_arg = catch_directive.arguments.named(*TO_ARGUMENT); + let to_arg = match maybe_to_arg { + Some(arg) => WithLocation::new( + catch_directive.name.location, + CatchTo::from(arg.value.item.expect_constant().unwrap_enum()), + ), + None => WithLocation::new(catch_directive.name.location, CatchTo::Result), + }; + + Ok(Some(CatchMetadata { + to: to_arg.item, + to_location: to_arg.location, + directive_location: catch_directive.name.location, + })) + } else { + Ok(None) + } + } +} + +impl CatchableField for ScalarField { + fn directives(&self) -> &Vec { + &self.directives + } + fn name_with_location(&self, schema: &SDLSchema) -> WithLocation { + WithLocation::new(self.alias_or_name_location(), self.alias_or_name(schema)) + } +} + +impl CatchableField for LinkedField { + fn directives(&self) -> &Vec { + &self.directives + } + fn name_with_location(&self, schema: &SDLSchema) -> WithLocation { + WithLocation::new(self.alias_or_name_location(), self.alias_or_name(schema)) + } +} diff --git a/compiler/crates/relay-transforms/src/catch_directive/validation_message.rs b/compiler/crates/relay-transforms/src/catch_directive/validation_message.rs new file mode 100644 index 0000000000000..68b165c74c4d8 --- /dev/null +++ b/compiler/crates/relay-transforms/src/catch_directive/validation_message.rs @@ -0,0 +1,18 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use thiserror::Error; + +#[derive(Error, Debug, serde::Serialize)] +#[serde(tag = "type")] +pub(super) enum ValidationMessage { + #[error("Catch directive is not yet implemented in Relay")] + CatchDirectiveNotImplemented, + + #[error("@catch and @required directives cannot be on the same field")] + CatchDirectiveWithRequiredDirective, +} diff --git a/compiler/crates/relay-transforms/src/client_edges.rs b/compiler/crates/relay-transforms/src/client_edges.rs index bb3086129651e..a702cddde71eb 100644 --- a/compiler/crates/relay-transforms/src/client_edges.rs +++ b/compiler/crates/relay-transforms/src/client_edges.rs @@ -16,7 +16,9 @@ use common::NamedItem; use common::ObjectName; use common::WithLocation; use docblock_shared::HAS_OUTPUT_TYPE_ARGUMENT_NAME; +use docblock_shared::LIVE_ARGUMENT_NAME; use docblock_shared::RELAY_RESOLVER_DIRECTIVE_NAME; +use docblock_shared::RELAY_RESOLVER_MODEL_INSTANCE_FIELD; use graphql_ir::associated_data_impl; use graphql_ir::Argument; use graphql_ir::ConstantValue; @@ -25,6 +27,7 @@ use graphql_ir::ExecutableDefinitionName; use graphql_ir::Field; use graphql_ir::FragmentDefinition; use graphql_ir::FragmentDefinitionName; +use graphql_ir::FragmentDefinitionNameSet; use graphql_ir::InlineFragment; use graphql_ir::LinkedField; use graphql_ir::OperationDefinition; @@ -40,8 +43,10 @@ use intern::string_key::StringKey; use intern::string_key::StringKeyMap; use intern::Lookup; use lazy_static::lazy_static; -use relay_config::SchemaConfig; +use relay_config::ProjectConfig; +use relay_schema::definitions::ResolverType; use schema::DirectiveValue; +use schema::ObjectID; use schema::Schema; use schema::Type; @@ -51,6 +56,7 @@ use crate::refetchable_fragment::REFETCHABLE_NAME; use crate::relay_resolvers::get_bool_argument_is_true; use crate::RequiredMetadataDirective; use crate::ValidationMessage; +use crate::CHILDREN_CAN_BUBBLE_METADATA_KEY; use crate::REQUIRED_DIRECTIVE_NAME; lazy_static! { @@ -77,10 +83,17 @@ pub enum ClientEdgeMetadataDirective { ClientObject { type_name: Option, unique_id: u32, + model_resolvers: Vec, }, } associated_data_impl!(ClientEdgeMetadataDirective); +#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct ClientEdgeModelResolver { + pub type_name: WithLocation, + pub is_live: bool, +} + /// Metadata directive attached to generated queries #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct ClientEdgeGeneratedQueryMetadataDirective { @@ -120,8 +133,7 @@ impl<'a> ClientEdgeMetadata<'a> { "Expected Client Edge inline fragment to have exactly two selections. This is a bug in the Relay compiler." ); let mut backing_field = fragment - .selections - .get(0) + .selections.first() .expect("Client Edge inline fragments have exactly two selections").clone(); let backing_field_directives = backing_field.directives().iter().filter(|directive| @@ -142,8 +154,12 @@ impl<'a> ClientEdgeMetadata<'a> { }) } } -pub fn client_edges(program: &Program, schema_config: &SchemaConfig) -> DiagnosticsResult { - let mut transform = ClientEdgesTransform::new(program, schema_config); +pub fn client_edges( + program: &Program, + project_config: &ProjectConfig, + base_fragment_names: &FragmentDefinitionNameSet, +) -> DiagnosticsResult { + let mut transform = ClientEdgesTransform::new(program, project_config, base_fragment_names); let mut next_program = transform .transform_program(program) .replace_or_else(|| program.clone()); @@ -161,7 +177,7 @@ pub fn client_edges(program: &Program, schema_config: &SchemaConfig) -> Diagnost } } -struct ClientEdgesTransform<'program, 'sc> { +struct ClientEdgesTransform<'program, 'pc> { path: Vec<&'program str>, document_name: Option>, query_names: StringKeyMap, @@ -169,15 +185,19 @@ struct ClientEdgesTransform<'program, 'sc> { new_fragments: Vec>, new_operations: Vec, errors: Vec, - schema_config: &'sc SchemaConfig, + project_config: &'pc ProjectConfig, next_key: u32, + base_fragment_names: &'program FragmentDefinitionNameSet, } -impl<'program, 'sc> ClientEdgesTransform<'program, 'sc> { - fn new(program: &'program Program, schema_config: &'sc SchemaConfig) -> Self { +impl<'program, 'pc> ClientEdgesTransform<'program, 'pc> { + fn new( + program: &'program Program, + project_config: &'pc ProjectConfig, + base_fragment_names: &'program FragmentDefinitionNameSet, + ) -> Self { Self { program, - schema_config, path: Default::default(), query_names: Default::default(), document_name: Default::default(), @@ -185,17 +205,17 @@ impl<'program, 'sc> ClientEdgesTransform<'program, 'sc> { new_operations: Default::default(), errors: Default::default(), next_key: 0, + project_config, + base_fragment_names, } } - fn generate_query_name(&mut self) -> OperationDefinitionName { - let document_name = self.document_name.expect("We are within a document"); - let name_root = format!( - "ClientEdgeQuery_{}_{}", - document_name.item, - self.path.join("__") - ) - .intern(); + fn generate_query_name( + &mut self, + document_name: ExecutableDefinitionName, + ) -> OperationDefinitionName { + let name_root = + format!("ClientEdgeQuery_{}_{}", document_name, self.path.join("__")).intern(); // Due to duplicate inline fragments, or inline fragments without type // conditions, it's possible that multiple fields will have the same @@ -248,7 +268,7 @@ impl<'program, 'sc> ClientEdgesTransform<'program, 'sc> { selections, }; - let mut transformer = RefetchableFragment::new(self.program, self.schema_config, false); + let mut transformer = RefetchableFragment::new(self.program, self.project_config, false); let refetchable_fragment = transformer .transform_refetch_fragment_with_refetchable_directive( @@ -290,40 +310,15 @@ impl<'program, 'sc> ClientEdgesTransform<'program, 'sc> { }; } - fn transform_linked_field_impl(&mut self, field: &LinkedField) -> Transformed { - let schema = &self.program.schema; - let field_type = schema.field(field.definition.item); - - // Eventually we will want to enable client edges on non-resolver client - // schema extensions, but we'll start with limiting them to resolvers. - let resolver_directive = field_type.directives.named(*RELAY_RESOLVER_DIRECTIVE_NAME); - - let is_client_edge = field_type.is_extension && resolver_directive.is_some(); - - let waterfall_directive = field - .directives() - .named(*CLIENT_EDGE_WATERFALL_DIRECTIVE_NAME); - - if !is_client_edge { - // Non-Client-Edge fields do not incur a waterfall, and thus should - // not be annotated with @waterfall. - if let Some(directive) = waterfall_directive { - self.errors.push(Diagnostic::error_with_data( - ValidationMessageWithData::RelayResolversUnexpectedWaterfall, - directive.name.location, - )); - } - return self.default_transform_linked_field(field); - } - + fn verify_directives_or_push_errors(&mut self, directives: &[Directive]) { let allowed_directive_names = [ *CLIENT_EDGE_WATERFALL_DIRECTIVE_NAME, *REQUIRED_DIRECTIVE_NAME, + *CHILDREN_CAN_BUBBLE_METADATA_KEY, RequiredMetadataDirective::directive_name(), ]; - let other_directives = field - .directives + let other_directives = directives .iter() .filter(|directive| { !allowed_directive_names @@ -340,102 +335,251 @@ impl<'program, 'sc> ClientEdgesTransform<'program, 'sc> { directive.name.location, )); } + } - let edge_to_type = field_type.type_.inner(); - - let is_edge_to_client_object = schema.is_extension_type(edge_to_type); - - let new_selections = self - .transform_selections(&field.selections) - .replace_or_else(|| field.selections.clone()); - - let metadata_directive = if is_edge_to_client_object { - // We assume edges to client objects will be resolved on the client - // and thus not incur a waterfall. This will change in the future - // for @live Resolvers that can trigger suspense. - if let Some(directive) = waterfall_directive { - self.errors.push(Diagnostic::error_with_data( - ValidationMessageWithData::RelayResolversUnexpectedWaterfall, - directive.name.location, - )); - } + fn get_edge_to_client_object_metadata_directive( + &mut self, + field: &LinkedField, + edge_to_type: Type, + waterfall_directive: Option<&Directive>, + resolver_directive: Option<&DirectiveValue>, + ) -> Option { + // We assume edges to client objects will be resolved on the client + // and thus not incur a waterfall. This will change in the future + // for @live Resolvers that can trigger suspense. + if let Some(directive) = waterfall_directive { + self.errors.push(Diagnostic::error_with_data( + ValidationMessageWithData::RelayResolversUnexpectedWaterfall, + directive.name.location, + )); + } - match edge_to_type { - Type::Interface(_) => { - if !has_output_type(resolver_directive) { - self.errors.push(Diagnostic::error( - ValidationMessage::ClientEdgeToClientInterface, - field.alias_or_name_location(), - )); - } - ClientEdgeMetadataDirective::ClientObject { - type_name: None, - unique_id: self.get_key(), - } + match edge_to_type { + Type::Interface(interface_id) => { + let interface = self.program.schema.interface(interface_id); + let implementing_objects = + interface.recursively_implementing_objects(Arc::as_ref(&self.program.schema)); + if implementing_objects.is_empty() { + self.errors.push(Diagnostic::error( + ValidationMessage::RelayResolverClientInterfaceMustBeImplemented { + interface_name: interface.name.item, + }, + interface.name.location, + )); } - Type::Union(_) => { + if !self + .project_config + .feature_flags + .relay_resolver_enable_interface_output_type + .is_fully_enabled() + && !has_output_type(resolver_directive) + { self.errors.push(Diagnostic::error( - ValidationMessage::ClientEdgeToClientUnion, + ValidationMessage::ClientEdgeToClientInterface, field.alias_or_name_location(), )); - return Transformed::Keep; } - Type::Object(object_id) => ClientEdgeMetadataDirective::ClientObject { - type_name: Some(schema.object(object_id).name.item), + self.get_client_object_for_abstract_type( + implementing_objects.iter(), + interface.name.item.0, + ) + } + Type::Union(union) => { + let union = self.program.schema.union(union); + self.get_client_object_for_abstract_type(union.members.iter(), union.name.item.0) + } + Type::Object(object_id) => { + let type_name = self.program.schema.object(object_id).name.item; + let model_resolvers = self + .get_client_edge_model_resolver_for_object(object_id) + .map_or(vec![], |model_resolver| vec![model_resolver]); + Some(ClientEdgeMetadataDirective::ClientObject { + type_name: Some(type_name), + model_resolvers, unique_id: self.get_key(), - }, - _ => { - panic!( - "Expected a linked field to reference either an Object, Interface, or Union" - ) - } + }) } - } else { - // Client Edges to server objects must be annotated with @waterfall - if waterfall_directive.is_none() { - self.errors.push(Diagnostic::error_with_data( - ValidationMessageWithData::RelayResolversMissingWaterfall { - field_name: field_type.name.item, - }, - field.definition.location, - )); + _ => { + panic!("Expected a linked field to reference either an Object, Interface, or Union") } - let client_edge_query_name = self.generate_query_name(); + } + } + fn get_client_object_for_abstract_type<'a>( + &mut self, + members: impl Iterator, + abstract_type_name: StringKey, + ) -> Option { + let mut model_resolvers: Vec = members + .filter_map(|object_id| { + let model_resolver = self.get_client_edge_model_resolver_for_object(*object_id); + model_resolver.or_else(|| { + self.maybe_report_error_for_missing_model_resolver( + object_id, + abstract_type_name, + ); + None + }) + }) + .collect(); + model_resolvers.sort(); + Some(ClientEdgeMetadataDirective::ClientObject { + type_name: None, + model_resolvers, + unique_id: self.get_key(), + }) + } + + fn maybe_report_error_for_missing_model_resolver( + &mut self, + object_id: &ObjectID, + abstract_type_name: StringKey, + ) { + let object = Type::Object(*object_id); + let schema = self.program.schema.as_ref(); + if !object.is_weak_resolver_object(schema) && object.is_resolver_object(schema) { + let model_name = self.program.schema.object(*object_id).name; + self.errors.push(Diagnostic::error( + ValidationMessage::ClientEdgeImplementingObjectMissingModelResolver { + name: abstract_type_name, + type_name: model_name.item, + }, + model_name.location, + )); + } + } + + fn get_client_edge_model_resolver_for_object( + &mut self, + object_id: ObjectID, + ) -> Option { + let model = Type::Object(object_id); + let schema = self.program.schema.as_ref(); + if !model.is_resolver_object(schema) + || model.is_weak_resolver_object(schema) + || !model.is_terse_resolver_object(schema) + { + return None; + } + let object = self.program.schema.object(object_id); + let model_field_id = self + .program + .schema + .named_field(model, *RELAY_RESOLVER_MODEL_INSTANCE_FIELD)?; + let model_field = self.program.schema.field(model_field_id); + let resolver_directive = model_field.directives.named(*RELAY_RESOLVER_DIRECTIVE_NAME); + let is_live = resolver_directive.map_or(false, |resolver_directive| { + resolver_directive + .arguments + .iter() + .any(|arg| arg.name.0 == LIVE_ARGUMENT_NAME.0) + }); + Some(ClientEdgeModelResolver { + type_name: object.name, + is_live, + }) + } + + fn get_edge_to_server_object_metadata_directive( + &mut self, + field_type: &schema::Field, + field_location: Location, + waterfall_directive: Option<&Directive>, + selections: Vec, + ) -> ClientEdgeMetadataDirective { + // Client Edges to server objects must be annotated with @waterfall + if waterfall_directive.is_none() { + self.errors.push(Diagnostic::error_with_data( + ValidationMessageWithData::RelayResolversMissingWaterfall { + field_name: field_type.name.item, + }, + field_location, + )); + } + let document_name = self.document_name.expect("We are within a document"); + let client_edge_query_name = self.generate_query_name(document_name.item); + + let should_generate_query = + if let ExecutableDefinitionName::FragmentDefinitionName(fragment_name) = + document_name.item + { + // For base fragments we don't need to generate refetch queries + !self.base_fragment_names.contains(&fragment_name) + } else { + true + }; + if should_generate_query { self.generate_client_edge_query( client_edge_query_name, field_type.type_.inner(), - new_selections.clone(), + selections, ); - ClientEdgeMetadataDirective::ServerObject { - query_name: client_edge_query_name, - unique_id: self.get_key(), + } + + ClientEdgeMetadataDirective::ServerObject { + query_name: client_edge_query_name, + unique_id: self.get_key(), + } + } + + fn transform_linked_field_impl(&mut self, field: &LinkedField) -> Transformed { + let schema = &self.program.schema; + let field_type = schema.field(field.definition.item); + + // Eventually we will want to enable client edges on non-resolver client + // schema extensions, but we'll start with limiting them to resolvers. + let resolver_directive = field_type.directives.named(*RELAY_RESOLVER_DIRECTIVE_NAME); + + let is_client_edge = field_type.is_extension && resolver_directive.is_some(); + + let waterfall_directive = field + .directives() + .named(*CLIENT_EDGE_WATERFALL_DIRECTIVE_NAME); + + if !is_client_edge { + // Non-Client-Edge fields do not incur a waterfall, and thus should + // not be annotated with @waterfall. + if let Some(directive) = waterfall_directive { + self.errors.push(Diagnostic::error_with_data( + ValidationMessageWithData::RelayResolversUnexpectedWaterfall, + directive.name.location, + )); } - }; - let mut inline_fragment_directives: Vec = vec![metadata_directive.into()]; - if let Some(required_directive_metadata) = field - .directives - .named(RequiredMetadataDirective::directive_name()) - .cloned() - { - inline_fragment_directives.push(required_directive_metadata); + return self.default_transform_linked_field(field); } - let transformed_field = Arc::new(LinkedField { - selections: new_selections, - ..field.clone() - }); + self.verify_directives_or_push_errors(&field.directives); - let inline_fragment = InlineFragment { - type_condition: None, - directives: inline_fragment_directives, - selections: vec![ - Selection::LinkedField(transformed_field.clone()), - Selection::LinkedField(transformed_field), - ], - spread_location: Location::generated(), + let edge_to_type = field_type.type_.inner(); + + let is_edge_to_client_object = schema.is_extension_type(edge_to_type); + + let new_selections = self + .transform_selections(&field.selections) + .replace_or_else(|| field.selections.clone()); + + let metadata_directive = if is_edge_to_client_object { + match self.get_edge_to_client_object_metadata_directive( + field, + edge_to_type, + waterfall_directive, + resolver_directive, + ) { + Some(directive) => directive, + None => return Transformed::Keep, + } + } else { + self.get_edge_to_server_object_metadata_directive( + field_type, + field.definition.location, + waterfall_directive, + new_selections.clone(), + ) }; + let inline_fragment = + create_inline_fragment_for_client_edge(field, new_selections, metadata_directive); + Transformed::Replace(Selection::InlineFragment(Arc::new(inline_fragment))) } @@ -446,6 +590,36 @@ impl<'program, 'sc> ClientEdgesTransform<'program, 'sc> { } } +fn create_inline_fragment_for_client_edge( + field: &LinkedField, + selections: Vec, + metadata_directive: ClientEdgeMetadataDirective, +) -> InlineFragment { + let mut inline_fragment_directives: Vec = vec![metadata_directive.into()]; + if let Some(required_directive_metadata) = field + .directives + .named(RequiredMetadataDirective::directive_name()) + .cloned() + { + inline_fragment_directives.push(required_directive_metadata); + } + + let transformed_field = Arc::new(LinkedField { + selections, + ..field.clone() + }); + + InlineFragment { + type_condition: None, + directives: inline_fragment_directives, + selections: vec![ + Selection::LinkedField(Arc::clone(&transformed_field)), + Selection::LinkedField(transformed_field), + ], + spread_location: Location::generated(), + } +} + impl Transformer for ClientEdgesTransform<'_, '_> { const NAME: &'static str = "ClientEdgesTransform"; const VISIT_ARGUMENTS: bool = false; @@ -528,7 +702,7 @@ fn make_refetchable_directive(query_name: OperationDefinitionName) -> Directive } pub fn remove_client_edge_selections(program: &Program) -> DiagnosticsResult { - let mut transform = ClientEdgesCleanupTransform::default(); + let mut transform = ClientEdgesCleanupTransform; let next_program = transform .transform_program(program) .replace_or_else(|| program.clone()); diff --git a/compiler/crates/relay-transforms/src/connections/mod.rs b/compiler/crates/relay-transforms/src/connections.rs similarity index 100% rename from compiler/crates/relay-transforms/src/connections/mod.rs rename to compiler/crates/relay-transforms/src/connections.rs diff --git a/compiler/crates/relay-transforms/src/connections/connection_constants.rs b/compiler/crates/relay-transforms/src/connections/connection_constants.rs index 87c10224625ab..7abd4a52a6d1f 100644 --- a/compiler/crates/relay-transforms/src/connections/connection_constants.rs +++ b/compiler/crates/relay-transforms/src/connections/connection_constants.rs @@ -14,6 +14,10 @@ use intern::string_key::StringKey; pub struct ConnectionConstants { pub connection_directive_name: DirectiveName, pub stream_connection_directive_name: DirectiveName, + pub stream_connection_if_arg: ArgumentName, + pub stream_connection_label_arg: ArgumentName, + pub stream_connection_initial_count_arg: ArgumentName, + pub stream_connection_use_customized_batch_arg: ArgumentName, pub direction_forward: StringKey, pub direction_backward: StringKey, @@ -43,6 +47,12 @@ impl Default for ConnectionConstants { Self { connection_directive_name: DirectiveName("connection".intern()), stream_connection_directive_name: DirectiveName("stream_connection".intern()), + stream_connection_if_arg: ArgumentName("if".intern()), + stream_connection_label_arg: ArgumentName("label".intern()), + stream_connection_initial_count_arg: ArgumentName("initial_count".intern()), + stream_connection_use_customized_batch_arg: ArgumentName( + "use_customized_batch".intern(), + ), direction_forward: "forward".intern(), direction_backward: "backward".intern(), diff --git a/compiler/crates/relay-transforms/src/declarative_connection.rs b/compiler/crates/relay-transforms/src/declarative_connection.rs index a3b6f923b5c6c..117f32a9bea38 100644 --- a/compiler/crates/relay-transforms/src/declarative_connection.rs +++ b/compiler/crates/relay-transforms/src/declarative_connection.rs @@ -11,6 +11,7 @@ use common::ArgumentName; use common::Diagnostic; use common::DiagnosticsResult; use common::DirectiveName; +use common::FeatureFlags; use common::NamedItem; use graphql_ir::Field; use graphql_ir::FragmentDefinition; @@ -23,6 +24,8 @@ use graphql_ir::Transformer; use intern::string_key::Intern; use intern::string_key::StringKey; use lazy_static::lazy_static; +use schema::suggestion_list::did_you_mean; +use schema::suggestion_list::GraphQLSuggestions; use schema::SDLSchema; use schema::Schema; use schema::Type; @@ -36,8 +39,10 @@ use crate::handle_fields::HandleFieldDirectiveValues; pub fn transform_declarative_connection( program: &Program, connection_interface: &ConnectionInterface, + feature_flags: &FeatureFlags, ) -> DiagnosticsResult { - let mut transform = DeclarativeConnectionMutationTransform::new(program, connection_interface); + let mut transform = + DeclarativeConnectionMutationTransform::new(program, connection_interface, feature_flags); let next_program = transform .transform_program(program) .replace_or_else(|| program.clone()); @@ -65,13 +70,19 @@ struct DeclarativeConnectionMutationTransform<'a> { schema: &'a SDLSchema, errors: Vec, connection_interface: &'a ConnectionInterface, + feature_flags: &'a FeatureFlags, } impl<'a> DeclarativeConnectionMutationTransform<'a> { - fn new(program: &'a Program, connection_interface: &'a ConnectionInterface) -> Self { + fn new( + program: &'a Program, + connection_interface: &'a ConnectionInterface, + feature_flags: &'a FeatureFlags, + ) -> Self { Self { schema: &program.schema, connection_interface, + feature_flags, errors: vec![], } } @@ -273,6 +284,36 @@ impl Transformer for DeclarativeConnectionMutationTransform<'_> { Some(connections_arg) => { let edge_typename_arg = node_directive.arguments.named(*EDGE_TYPENAME_ARG); if let Some(edge_typename_arg) = edge_typename_arg { + if let Some(edge_typename_value) = edge_typename_arg + .value + .item + .get_constant() + .and_then(|c| c.get_string_literal()) + { + if !self.feature_flags.disable_edge_type_name_validation_on_declerative_connection_directives.is_enabled_for(edge_typename_value) { + let is_not_object_type = self + .schema + .get_type(edge_typename_value) + .map_or(true, |edge_type| !edge_type.is_object()); + + if is_not_object_type { + let suggestions = GraphQLSuggestions::new(self.schema); + + self.errors.push(Diagnostic::error( + ValidationMessage::InvalidEdgeTypeName { + directive_name: node_directive.name.item, + edge_typename: edge_typename_value, + suggestions: suggestions + .object_type_suggestions(edge_typename_value), + }, + edge_typename_arg.value.location, + )); + + return Transformed::Keep; + } + } + } + let field_definition = self.schema.field(field.definition.item); match field_definition.type_.inner() { Type::Object(_) | Type::Interface(_) | Type::Union(_) => { @@ -340,7 +381,8 @@ impl Transformer for DeclarativeConnectionMutationTransform<'_> { } } -#[derive(Debug, Error)] +#[derive(Debug, Error, serde::Serialize)] +#[serde(tag = "type")] enum ValidationMessage { #[error( "Unsupported use of @{directive_name} on field '${field_name}', 'edgeTypeName' argument must be provided." @@ -399,4 +441,12 @@ enum ValidationMessage { field_name: StringKey, current_type: String, }, + #[error( + "Expected the 'edgeTypeName' argument value on @{directive_name} to be the name of an object type. '{edge_typename}' does not refer to a known object type.{suggestions}", suggestions = did_you_mean(suggestions) + )] + InvalidEdgeTypeName { + directive_name: DirectiveName, + edge_typename: StringKey, + suggestions: Vec, + }, } diff --git a/compiler/crates/relay-transforms/src/defer_stream.rs b/compiler/crates/relay-transforms/src/defer_stream.rs new file mode 100644 index 0000000000000..6f2158d2faf4d --- /dev/null +++ b/compiler/crates/relay-transforms/src/defer_stream.rs @@ -0,0 +1,448 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +mod directives; + +use std::collections::HashMap; +use std::sync::Arc; + +use common::ArgumentName; +use common::Diagnostic; +use common::DiagnosticsResult; +use common::DirectiveName; +use common::Location; +use common::NamedItem; +use common::WithLocation; +pub use directives::DeferDirective; +pub use directives::StreamDirective; +use graphql_ir::Argument; +use graphql_ir::ConstantValue; +use graphql_ir::Directive; +use graphql_ir::Field; +use graphql_ir::FragmentDefinition; +use graphql_ir::FragmentDefinitionName; +use graphql_ir::FragmentSpread; +use graphql_ir::InlineFragment; +use graphql_ir::LinkedField; +use graphql_ir::OperationDefinition; +use graphql_ir::Program; +use graphql_ir::ScalarField; +use graphql_ir::Selection; +use graphql_ir::Transformed; +use graphql_ir::Transformer; +use graphql_ir::Value; +use intern::string_key::Intern; +use intern::string_key::StringKey; +use intern::Lookup; +use relay_config::DeferStreamInterface; +use schema::Schema; +use thiserror::Error; + +use super::get_applied_fragment_name; +use crate::util::remove_directive; +use crate::util::replace_directive; + +pub fn transform_defer_stream( + program: &Program, + defer_stream_interface: &DeferStreamInterface, +) -> DiagnosticsResult { + let mut transformer = DeferStreamTransform { + program, + current_document_name: None, + labels: Default::default(), + errors: Default::default(), + defer_stream_interface, + }; + let next_program = transformer.transform_program(program); + + if transformer.errors.is_empty() { + Ok(next_program.replace_or_else(|| program.clone())) + } else { + Err(transformer.errors) + } +} + +struct DeferStreamTransform<'s> { + program: &'s Program, + current_document_name: Option, + labels: HashMap, + errors: Vec, + defer_stream_interface: &'s DeferStreamInterface, +} + +impl DeferStreamTransform<'_> { + fn set_current_document_name(&mut self, document_name: StringKey) { + self.current_document_name = Some(document_name) + } + + fn record_label( + &mut self, + label: StringKey, + directive: &Directive, + defer_stream_interface: &DeferStreamInterface, + ) { + let prev_directive = self.labels.get(&label); + match prev_directive { + Some(prev) => { + self.errors.push( + Diagnostic::error( + ValidationMessage::LabelNotUniqueForDeferStream { + directive_name: defer_stream_interface.defer_name, + }, + prev.name.location, + ) + .annotate("related location", directive.name.location), + ); + } + None => { + self.labels.insert(label, directive.to_owned()); + } + }; + } + + fn transform_defer( + &mut self, + spread: &FragmentSpread, + defer: &Directive, + defer_stream_interface: &DeferStreamInterface, + ) -> Result, Diagnostic> { + let DeferDirective { if_arg, label_arg } = + DeferDirective::from(defer, defer_stream_interface); + + if is_literal_false_arg(if_arg) { + return Ok(Transformed::Replace(Selection::FragmentSpread(Arc::new( + FragmentSpread { + directives: remove_directive(&spread.directives, defer.name.item), + ..spread.clone() + }, + )))); + } + + let label_value = get_literal_string_argument(defer, label_arg)?; + let label = label_value.unwrap_or_else(|| { + get_applied_fragment_name(spread.fragment.item, &spread.arguments).0 + }); + let transformed_label = transform_label( + self.current_document_name + .expect("We expect the parent name to be defined here."), + defer_stream_interface.defer_name, + label, + ); + self.record_label(transformed_label, defer, defer_stream_interface); + let next_label_value = Value::Constant(ConstantValue::String(transformed_label)); + let next_label_arg = Argument { + name: WithLocation { + item: defer_stream_interface.label_arg, + location: label_arg.map_or(defer.name.location, |arg| arg.name.location), + }, + value: WithLocation { + item: next_label_value, + location: label_arg.map_or(defer.name.location, |arg| arg.value.location), + }, + }; + + let mut next_arguments = Vec::with_capacity(2); + next_arguments.push(next_label_arg); + if let Some(if_arg) = if_arg { + next_arguments.push(if_arg.clone()); + } + + let next_defer = Directive { + name: defer.name, + arguments: next_arguments, + data: None, + }; + + Ok(Transformed::Replace(Selection::InlineFragment(Arc::new( + InlineFragment { + type_condition: None, + directives: vec![next_defer], + selections: vec![Selection::FragmentSpread(Arc::new(FragmentSpread { + directives: remove_directive(&spread.directives, defer.name.item), + ..spread.clone() + }))], + spread_location: Location::generated(), + }, + )))) + } + + fn transform_stream( + &mut self, + linked_field: &LinkedField, + stream: &Directive, + defer_stream_interface: &DeferStreamInterface, + ) -> Result, Diagnostic> { + let schema_field = self.program.schema.field(linked_field.definition.item); + if !schema_field.type_.is_list() { + return Err(Diagnostic::error( + ValidationMessage::StreamFieldIsNotAList { + field_name: schema_field.name.item, + }, + stream.name.location, + )); + } + + let StreamDirective { + if_arg, + label_arg, + initial_count_arg, + use_customized_batch_arg, + } = StreamDirective::from(stream, defer_stream_interface); + + let transformed_linked_field = self.default_transform_linked_field(linked_field); + let get_next_selection = |directives| match transformed_linked_field { + Transformed::Replace(mut selection) => { + selection.set_directives(directives); + Transformed::Replace(selection) + } + Transformed::Keep => { + Transformed::Replace(Selection::LinkedField(Arc::new(LinkedField { + directives, + ..linked_field.clone() + }))) + } + Transformed::Delete => Transformed::Delete, + }; + if is_literal_false_arg(if_arg) { + return Ok(get_next_selection(remove_directive( + &linked_field.directives, + stream.name.item, + ))); + } + + if initial_count_arg.is_none() { + return Err(Diagnostic::error( + ValidationMessage::StreamInitialCountRequired, + stream.name.location, + )); + } + + let label_value = get_literal_string_argument(stream, label_arg)?; + let label = label_value.unwrap_or_else(|| { + get_applied_fragment_name( + FragmentDefinitionName(linked_field.alias_or_name(&self.program.schema)), + &linked_field.arguments, + ) + .0 + }); + let transformed_label = transform_label( + self.current_document_name + .expect("We expect the parent name to be defined here."), + defer_stream_interface.stream_name, + label, + ); + self.record_label(transformed_label, stream, defer_stream_interface); + let next_label_value = Value::Constant(ConstantValue::String(transformed_label)); + let next_label_arg = Argument { + name: WithLocation { + item: defer_stream_interface.label_arg, + location: label_arg.map_or(stream.name.location, |arg| arg.name.location), + }, + value: WithLocation { + item: next_label_value, + location: label_arg.map_or(stream.name.location, |arg| arg.value.location), + }, + }; + + let mut next_arguments = Vec::with_capacity(4); + next_arguments.push(next_label_arg); + if let Some(if_arg) = if_arg { + next_arguments.push(if_arg.clone()); + } + if let Some(initial_count_arg) = initial_count_arg { + next_arguments.push(initial_count_arg.clone()); + } + if let Some(use_customized_batch_arg) = use_customized_batch_arg { + next_arguments.push(use_customized_batch_arg.clone()); + } + + let next_stream = Directive { + name: stream.name, + arguments: next_arguments, + data: None, + }; + + Ok(get_next_selection(replace_directive( + &linked_field.directives, + next_stream, + ))) + } +} + +impl<'s> Transformer for DeferStreamTransform<'s> { + const NAME: &'static str = "DeferStreamTransform"; + const VISIT_ARGUMENTS: bool = false; + const VISIT_DIRECTIVES: bool = false; + + fn transform_operation( + &mut self, + operation: &OperationDefinition, + ) -> Transformed { + self.set_current_document_name(operation.name.item.0); + self.default_transform_operation(operation) + } + + fn transform_fragment( + &mut self, + fragment: &FragmentDefinition, + ) -> Transformed { + self.set_current_document_name(fragment.name.item.0); + self.default_transform_fragment(fragment) + } + + /// Validates @defer is not allowed on inline fragments. + fn transform_inline_fragment( + &mut self, + inline_fragment: &InlineFragment, + ) -> Transformed { + let defer_directive = inline_fragment + .directives + .named(self.defer_stream_interface.defer_name); + if let Some(directive) = defer_directive { + // Special case for @defer generated by transform_connection + if let Some(label) = directive + .arguments + .named(self.defer_stream_interface.label_arg) + { + if let Some(label) = label.value.item.get_string_literal() { + if label.lookup().contains("$defer$") { + return self.default_transform_inline_fragment(inline_fragment); + } + } + } + self.errors.push(Diagnostic::error( + ValidationMessage::InvalidDeferOnInlineFragment, + directive.name.location, + )); + } + + self.default_transform_inline_fragment(inline_fragment) + } + + /// Transform of fragment spread with @defer is delegated to `transform_defer`. + fn transform_fragment_spread(&mut self, spread: &FragmentSpread) -> Transformed { + let defer_directive = spread + .directives + .named(self.defer_stream_interface.defer_name); + if let Some(defer) = defer_directive { + match self.transform_defer(spread, defer, self.defer_stream_interface) { + Ok(transformed) => transformed, + Err(err) => { + self.errors.push(err); + self.default_transform_fragment_spread(spread) + } + } + } else { + self.default_transform_fragment_spread(spread) + } + } + + /// Validates @stream is not allowed on scalar fields. + fn transform_scalar_field(&mut self, scalar_field: &ScalarField) -> Transformed { + let stream_directive = &scalar_field + .directives + .named(self.defer_stream_interface.stream_name); + if let Some(directive) = stream_directive { + self.errors.push(Diagnostic::error( + ValidationMessage::InvalidStreamOnScalarField { + field_name: scalar_field.alias_or_name(&self.program.schema), + }, + directive.name.location, + )); + } + self.default_transform_scalar_field(scalar_field) + } + + /// Transform of linked field with @stream is delegated to `transform_stream`. + fn transform_linked_field(&mut self, linked_field: &LinkedField) -> Transformed { + let stream_directive = linked_field + .directives + .named(self.defer_stream_interface.stream_name); + if let Some(stream) = stream_directive { + match self.transform_stream(linked_field, stream, self.defer_stream_interface) { + Ok(transformed) => transformed, + Err(err) => { + self.errors.push(err); + self.default_transform_linked_field(linked_field) + } + } + } else { + self.default_transform_linked_field(linked_field) + } + } +} + +fn is_literal_false_arg(arg: Option<&Argument>) -> bool { + if let Some(arg) = arg { + matches!( + arg.value.item, + Value::Constant(ConstantValue::Boolean(false)) + ) + } else { + false + } +} + +fn transform_label( + parent_name: StringKey, + directive_name: DirectiveName, + label: StringKey, +) -> StringKey { + format!("{}${}${}", parent_name, directive_name, label).intern() +} + +fn get_literal_string_argument( + directive: &Directive, + argument: Option<&Argument>, +) -> Result, Diagnostic> { + if let Some(arg) = argument { + if let Some(val) = arg.value.item.get_string_literal() { + Ok(Some(val)) + } else { + Err(Diagnostic::error( + ValidationMessage::LiteralStringArgumentExpectedForDirective { + arg_name: arg.name.item, + directive_name: directive.name.item, + }, + directive.name.location, + )) + } + } else { + Ok(None) + } +} + +#[derive(Debug, Error, serde::Serialize)] +#[serde(tag = "type")] +enum ValidationMessage { + #[error( + "Invalid use of @{directive_name}, the provided label is not unique. Specify a unique 'label' as a literal string." + )] + LabelNotUniqueForDeferStream { directive_name: DirectiveName }, + + #[error("Field '{field_name}' is not of list type, therefore cannot use @stream directive.")] + StreamFieldIsNotAList { field_name: StringKey }, + + #[error("Invalid use of @stream, the 'initial_count' argument is required.")] + StreamInitialCountRequired, + + #[error( + "Invalid use of @defer on an inline fragment. Relay only supports @defer on fragment spreads." + )] + InvalidDeferOnInlineFragment, + + #[error("Invalid use of @stream on scalar field '{field_name}'")] + InvalidStreamOnScalarField { field_name: StringKey }, + + #[error( + "Expected the '{arg_name}' value to @{directive_name} to be a string literal if provided." + )] + LiteralStringArgumentExpectedForDirective { + arg_name: ArgumentName, + directive_name: DirectiveName, + }, +} diff --git a/compiler/crates/relay-transforms/src/defer_stream/directives.rs b/compiler/crates/relay-transforms/src/defer_stream/directives.rs index 9c68debed3543..d177f14dff377 100644 --- a/compiler/crates/relay-transforms/src/defer_stream/directives.rs +++ b/compiler/crates/relay-transforms/src/defer_stream/directives.rs @@ -7,8 +7,7 @@ use graphql_ir::Argument; use graphql_ir::Directive; - -use super::DEFER_STREAM_CONSTANTS; +use relay_config::DeferStreamInterface; /// Utility to access the arguments of the @defer directive. pub struct DeferDirective<'a> { @@ -20,13 +19,13 @@ impl<'a> DeferDirective<'a> { /// Extracts the arguments from the given directive assumed to be a @defer /// directive. /// Panics on any unexpected arguments. - pub fn from(directive: &'a Directive) -> Self { + pub fn from(directive: &'a Directive, defer_stream_interface: &DeferStreamInterface) -> Self { let mut if_arg = None; let mut label_arg = None; for arg in &directive.arguments { - if arg.name.item == DEFER_STREAM_CONSTANTS.if_arg { + if arg.name.item == defer_stream_interface.if_arg { if_arg = Some(arg); - } else if arg.name.item == DEFER_STREAM_CONSTANTS.label_arg { + } else if arg.name.item == defer_stream_interface.label_arg { label_arg = Some(arg); } else { panic!("Unexpected argument to @defer: {}", arg.name.item); @@ -48,19 +47,19 @@ impl<'a> StreamDirective<'a> { /// Extracts the arguments from the given directive assumed to be a @stream /// directive. /// Panics on any unexpected arguments. - pub fn from(directive: &'a Directive) -> Self { + pub fn from(directive: &'a Directive, defer_stream_interface: &DeferStreamInterface) -> Self { let mut if_arg = None; let mut label_arg = None; let mut initial_count_arg = None; let mut use_customized_batch_arg = None; for arg in &directive.arguments { - if arg.name.item == DEFER_STREAM_CONSTANTS.if_arg { + if arg.name.item == defer_stream_interface.if_arg { if_arg = Some(arg); - } else if arg.name.item == DEFER_STREAM_CONSTANTS.label_arg { + } else if arg.name.item == defer_stream_interface.label_arg { label_arg = Some(arg); - } else if arg.name.item == DEFER_STREAM_CONSTANTS.initial_count_arg { + } else if arg.name.item == defer_stream_interface.initial_count_arg { initial_count_arg = Some(arg); - } else if arg.name.item == DEFER_STREAM_CONSTANTS.use_customized_batch_arg { + } else if arg.name.item == defer_stream_interface.use_customized_batch_arg { use_customized_batch_arg = Some(arg); } else { panic!("Unexpected argument to @stream: {}", arg.name.item); diff --git a/compiler/crates/relay-transforms/src/defer_stream/mod.rs b/compiler/crates/relay-transforms/src/defer_stream/mod.rs deleted file mode 100644 index cf7759ebb3a64..0000000000000 --- a/compiler/crates/relay-transforms/src/defer_stream/mod.rs +++ /dev/null @@ -1,455 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -mod directives; - -use std::collections::HashMap; -use std::sync::Arc; - -use common::ArgumentName; -use common::Diagnostic; -use common::DiagnosticsResult; -use common::DirectiveName; -use common::Location; -use common::NamedItem; -use common::WithLocation; -pub use directives::DeferDirective; -pub use directives::StreamDirective; -use graphql_ir::Argument; -use graphql_ir::ConstantValue; -use graphql_ir::Directive; -use graphql_ir::Field; -use graphql_ir::FragmentDefinition; -use graphql_ir::FragmentDefinitionName; -use graphql_ir::FragmentSpread; -use graphql_ir::InlineFragment; -use graphql_ir::LinkedField; -use graphql_ir::OperationDefinition; -use graphql_ir::Program; -use graphql_ir::ScalarField; -use graphql_ir::Selection; -use graphql_ir::Transformed; -use graphql_ir::Transformer; -use graphql_ir::Value; -use intern::string_key::Intern; -use intern::string_key::StringKey; -use intern::Lookup; -use lazy_static::lazy_static; -use schema::Schema; -use thiserror::Error; - -use super::get_applied_fragment_name; -use crate::util::remove_directive; -use crate::util::replace_directive; - -pub struct DeferStreamConstants { - pub defer_name: DirectiveName, - pub stream_name: DirectiveName, - pub if_arg: ArgumentName, - pub label_arg: ArgumentName, - pub initial_count_arg: ArgumentName, - pub use_customized_batch_arg: ArgumentName, -} - -impl Default for DeferStreamConstants { - fn default() -> Self { - Self { - defer_name: DirectiveName("defer".intern()), - stream_name: DirectiveName("stream".intern()), - if_arg: ArgumentName("if".intern()), - label_arg: ArgumentName("label".intern()), - initial_count_arg: ArgumentName("initial_count".intern()), - use_customized_batch_arg: ArgumentName("use_customized_batch".intern()), - } - } -} - -lazy_static! { - pub static ref DEFER_STREAM_CONSTANTS: DeferStreamConstants = Default::default(); -} - -pub fn transform_defer_stream(program: &Program) -> DiagnosticsResult { - let mut transformer = DeferStreamTransform { - program, - current_document_name: None, - labels: Default::default(), - errors: Default::default(), - }; - let next_program = transformer.transform_program(program); - - if transformer.errors.is_empty() { - Ok(next_program.replace_or_else(|| program.clone())) - } else { - Err(transformer.errors) - } -} - -struct DeferStreamTransform<'s> { - program: &'s Program, - current_document_name: Option, - labels: HashMap, - errors: Vec, -} - -impl DeferStreamTransform<'_> { - fn set_current_document_name(&mut self, document_name: StringKey) { - self.current_document_name = Some(document_name) - } - - fn record_label(&mut self, label: StringKey, directive: &Directive) { - let prev_directive = self.labels.get(&label); - match prev_directive { - Some(prev) => { - self.errors.push( - Diagnostic::error( - ValidationMessage::LabelNotUniqueForDeferStream { - directive_name: DEFER_STREAM_CONSTANTS.defer_name, - }, - prev.name.location, - ) - .annotate("related location", directive.name.location), - ); - } - None => { - self.labels.insert(label, directive.to_owned()); - } - }; - } - - fn transform_defer( - &mut self, - spread: &FragmentSpread, - defer: &Directive, - ) -> Result, Diagnostic> { - let DeferDirective { if_arg, label_arg } = DeferDirective::from(defer); - - if is_literal_false_arg(if_arg) { - return Ok(Transformed::Replace(Selection::FragmentSpread(Arc::new( - FragmentSpread { - directives: remove_directive(&spread.directives, defer.name.item), - ..spread.clone() - }, - )))); - } - - let label_value = get_literal_string_argument(defer, label_arg)?; - let label = label_value.unwrap_or_else(|| { - get_applied_fragment_name(spread.fragment.item, &spread.arguments).0 - }); - let transformed_label = transform_label( - self.current_document_name - .expect("We expect the parent name to be defined here."), - DEFER_STREAM_CONSTANTS.defer_name, - label, - ); - self.record_label(transformed_label, defer); - let next_label_value = Value::Constant(ConstantValue::String(transformed_label)); - let next_label_arg = Argument { - name: WithLocation { - item: DEFER_STREAM_CONSTANTS.label_arg, - location: label_arg.map_or(defer.name.location, |arg| arg.name.location), - }, - value: WithLocation { - item: next_label_value, - location: label_arg.map_or(defer.name.location, |arg| arg.value.location), - }, - }; - - let mut next_arguments = Vec::with_capacity(2); - next_arguments.push(next_label_arg); - if let Some(if_arg) = if_arg { - next_arguments.push(if_arg.clone()); - } - - let next_defer = Directive { - name: defer.name, - arguments: next_arguments, - data: None, - }; - - Ok(Transformed::Replace(Selection::InlineFragment(Arc::new( - InlineFragment { - type_condition: None, - directives: vec![next_defer], - selections: vec![Selection::FragmentSpread(Arc::new(FragmentSpread { - directives: remove_directive(&spread.directives, defer.name.item), - ..spread.clone() - }))], - spread_location: Location::generated(), - }, - )))) - } - - fn transform_stream( - &mut self, - linked_field: &LinkedField, - stream: &Directive, - ) -> Result, Diagnostic> { - let schema_field = self.program.schema.field(linked_field.definition.item); - if !schema_field.type_.is_list() { - return Err(Diagnostic::error( - ValidationMessage::StreamFieldIsNotAList { - field_name: schema_field.name.item, - }, - stream.name.location, - )); - } - - let StreamDirective { - if_arg, - label_arg, - initial_count_arg, - use_customized_batch_arg, - } = StreamDirective::from(stream); - - let transformed_linked_field = self.default_transform_linked_field(linked_field); - let get_next_selection = |directives| match transformed_linked_field { - Transformed::Replace(mut selection) => { - selection.set_directives(directives); - Transformed::Replace(selection) - } - Transformed::Keep => { - Transformed::Replace(Selection::LinkedField(Arc::new(LinkedField { - directives, - ..linked_field.clone() - }))) - } - Transformed::Delete => Transformed::Delete, - }; - if is_literal_false_arg(if_arg) { - return Ok(get_next_selection(remove_directive( - &linked_field.directives, - stream.name.item, - ))); - } - - if initial_count_arg.is_none() { - return Err(Diagnostic::error( - ValidationMessage::StreamInitialCountRequired, - stream.name.location, - )); - } - - let label_value = get_literal_string_argument(stream, label_arg)?; - let label = label_value.unwrap_or_else(|| { - get_applied_fragment_name( - FragmentDefinitionName(linked_field.alias_or_name(&self.program.schema)), - &linked_field.arguments, - ) - .0 - }); - let transformed_label = transform_label( - self.current_document_name - .expect("We expect the parent name to be defined here."), - DEFER_STREAM_CONSTANTS.stream_name, - label, - ); - self.record_label(transformed_label, stream); - let next_label_value = Value::Constant(ConstantValue::String(transformed_label)); - let next_label_arg = Argument { - name: WithLocation { - item: DEFER_STREAM_CONSTANTS.label_arg, - location: label_arg.map_or(stream.name.location, |arg| arg.name.location), - }, - value: WithLocation { - item: next_label_value, - location: label_arg.map_or(stream.name.location, |arg| arg.value.location), - }, - }; - - let mut next_arguments = Vec::with_capacity(4); - next_arguments.push(next_label_arg); - if let Some(if_arg) = if_arg { - next_arguments.push(if_arg.clone()); - } - if let Some(initial_count_arg) = initial_count_arg { - next_arguments.push(initial_count_arg.clone()); - } - if let Some(use_customized_batch_arg) = use_customized_batch_arg { - next_arguments.push(use_customized_batch_arg.clone()); - } - - let next_stream = Directive { - name: stream.name, - arguments: next_arguments, - data: None, - }; - - Ok(get_next_selection(replace_directive( - &linked_field.directives, - next_stream, - ))) - } -} - -impl<'s> Transformer for DeferStreamTransform<'s> { - const NAME: &'static str = "DeferStreamTransform"; - const VISIT_ARGUMENTS: bool = false; - const VISIT_DIRECTIVES: bool = false; - - fn transform_operation( - &mut self, - operation: &OperationDefinition, - ) -> Transformed { - self.set_current_document_name(operation.name.item.0); - self.default_transform_operation(operation) - } - - fn transform_fragment( - &mut self, - fragment: &FragmentDefinition, - ) -> Transformed { - self.set_current_document_name(fragment.name.item.0); - self.default_transform_fragment(fragment) - } - - /// Validates @defer is not allowed on inline fragments. - fn transform_inline_fragment( - &mut self, - inline_fragment: &InlineFragment, - ) -> Transformed { - let defer_directive = inline_fragment - .directives - .named(DEFER_STREAM_CONSTANTS.defer_name); - if let Some(directive) = defer_directive { - // Special case for @defer generated by transform_connection - if let Some(label) = directive.arguments.named(DEFER_STREAM_CONSTANTS.label_arg) { - if let Some(label) = label.value.item.get_string_literal() { - if label.lookup().contains("$defer$") { - return self.default_transform_inline_fragment(inline_fragment); - } - } - } - self.errors.push(Diagnostic::error( - ValidationMessage::InvalidDeferOnInlineFragment, - directive.name.location, - )); - } - - self.default_transform_inline_fragment(inline_fragment) - } - - /// Transform of fragment spread with @defer is delegated to `transform_defer`. - fn transform_fragment_spread(&mut self, spread: &FragmentSpread) -> Transformed { - let defer_directive = spread.directives.named(DEFER_STREAM_CONSTANTS.defer_name); - if let Some(defer) = defer_directive { - match self.transform_defer(spread, defer) { - Ok(transformed) => transformed, - Err(err) => { - self.errors.push(err); - self.default_transform_fragment_spread(spread) - } - } - } else { - self.default_transform_fragment_spread(spread) - } - } - - /// Validates @stream is not allowed on scalar fields. - fn transform_scalar_field(&mut self, scalar_field: &ScalarField) -> Transformed { - let stream_directive = &scalar_field - .directives - .named(DEFER_STREAM_CONSTANTS.stream_name); - if let Some(directive) = stream_directive { - self.errors.push(Diagnostic::error( - ValidationMessage::InvalidStreamOnScalarField { - field_name: scalar_field.alias_or_name(&self.program.schema), - }, - directive.name.location, - )); - } - self.default_transform_scalar_field(scalar_field) - } - - /// Transform of linked field with @stream is delegated to `transform_stream`. - fn transform_linked_field(&mut self, linked_field: &LinkedField) -> Transformed { - let stream_directive = linked_field - .directives - .named(DEFER_STREAM_CONSTANTS.stream_name); - if let Some(stream) = stream_directive { - match self.transform_stream(linked_field, stream) { - Ok(transformed) => transformed, - Err(err) => { - self.errors.push(err); - self.default_transform_linked_field(linked_field) - } - } - } else { - self.default_transform_linked_field(linked_field) - } - } -} - -fn is_literal_false_arg(arg: Option<&Argument>) -> bool { - if let Some(arg) = arg { - matches!( - arg.value.item, - Value::Constant(ConstantValue::Boolean(false)) - ) - } else { - false - } -} - -fn transform_label( - parent_name: StringKey, - directive_name: DirectiveName, - label: StringKey, -) -> StringKey { - format!("{}${}${}", parent_name, directive_name, label).intern() -} - -fn get_literal_string_argument( - directive: &Directive, - argument: Option<&Argument>, -) -> Result, Diagnostic> { - if let Some(arg) = argument { - if let Some(val) = arg.value.item.get_string_literal() { - Ok(Some(val)) - } else { - Err(Diagnostic::error( - ValidationMessage::LiteralStringArgumentExpectedForDirective { - arg_name: arg.name.item, - directive_name: directive.name.item, - }, - directive.name.location, - )) - } - } else { - Ok(None) - } -} - -#[derive(Debug, Error)] -enum ValidationMessage { - #[error( - "Invalid use of @{directive_name}, the provided label is not unique. Specify a unique 'label' as a literal string." - )] - LabelNotUniqueForDeferStream { directive_name: DirectiveName }, - - #[error("Field '{field_name}' is not of list type, therefore cannot use @stream directive.")] - StreamFieldIsNotAList { field_name: StringKey }, - - #[error("Invalid use of @stream, the 'initial_count' argument is required.")] - StreamInitialCountRequired, - - #[error( - "Invalid use of @defer on an inline fragment. Relay only supports @defer on fragment spreads." - )] - InvalidDeferOnInlineFragment, - - #[error("Invalid use of @stream on scalar field '{field_name}'")] - InvalidStreamOnScalarField { field_name: StringKey }, - - #[error( - "Expected the '{arg_name}' value to @{directive_name} to be a string literal if provided." - )] - LiteralStringArgumentExpectedForDirective { - arg_name: ArgumentName, - directive_name: DirectiveName, - }, -} diff --git a/compiler/crates/relay-transforms/src/errors.rs b/compiler/crates/relay-transforms/src/errors.rs index d20fec1ba46de..ed0802fe08986 100644 --- a/compiler/crates/relay-transforms/src/errors.rs +++ b/compiler/crates/relay-transforms/src/errors.rs @@ -14,34 +14,24 @@ use common::WithDiagnosticData; use graphql_ir::FragmentDefinitionName; use graphql_ir::VariableName; use intern::string_key::StringKey; -use intern::Lookup; use thiserror::Error; -#[derive(Clone, Debug, Error, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +#[serde(tag = "type")] pub enum ValidationMessage { - #[error("@relay_client_component is not compatible with these {}: `{}`", - if incompatible_directives.len() > 1 { "directives" } else { "directive" }, - incompatible_directives - .iter() - .map(|name| name.0.lookup()) - .collect::>() - .join("`, `")) - ] - IncompatibleRelayClientComponentDirectives { - incompatible_directives: Vec, - }, - - #[error("@relay_client_component is not compatible with @arguments.")] - InvalidRelayClientComponentWithArguments, - #[error("This fragment spread already has a split normalization file generated.")] DuplicateRelayClientComponentSplitOperation, - #[error( - "@relay_client_component can only be used on fragments on Viewer or Query, or whose type implements the Node interface. If the fragment's type is a union type, all members of that union must implement Node." - )] - InvalidRelayClientComponentNonNodeFragment, - #[error( "The Relay Resolver backing this field has an `@relay_resolver` directive with an invalid '{key}' argument. Expected a literal string value." )] @@ -113,7 +103,7 @@ pub enum ValidationMessage { }, #[error( - "The '{fragment_name}' is transformed to use @no_inline implictly by `@module` or `@relay_client_component`, but it's also used in a regular fragment spread. It's required to explicitly add `@no_inline` to the definition of '{fragment_name}'." + "The '{fragment_name}' is transformed to use @no_inline implicitly by `@module`, but it's also used in a regular fragment spread. It's required to explicitly add `@no_inline` to the definition of '{fragment_name}'." )] RequiredExplicitNoInlineDirective { fragment_name: FragmentDefinitionName, @@ -140,23 +130,25 @@ pub enum ValidationMessage { ClientEdgeToClientInterface, #[error( - "Client Edges that reference client-defined union types are not currently supported in Relay." + "The client edge pointing to `{name}` with implementing object, `{type_name}`, is missing its corresponding model resolver. The concrete type `{type_name}` and its resolver fields should be defined with the newer dot notation resolver syntax. See https://relay.dev/docs/guides/relay-resolvers/." )] - ClientEdgeToClientUnion, - + ClientEdgeImplementingObjectMissingModelResolver { + name: StringKey, + type_name: ObjectName, + }, #[error("Invalid directive combination. @alias may not be combined with other directives.")] FragmentAliasIncompatibleDirective, - #[error("Unexpected directive @alias. @alias is not currently enabled in this location.")] - FragmentAliasDirectiveDisabled, + #[error("Unexpected directive @catch. @catch is not yet implemented.")] + CatchDirectiveNotImplemented, - #[error("Expected the `as` argument of the @alias directive to be a static string.")] - FragmentAliasDirectiveDynamicNameArg, + #[error("Unexpected directive `@alias`. `@alias` is not currently enabled in this location.")] + FragmentAliasDirectiveDisabled, #[error( - "Missing required argument `as`. The `as` argument of the @alias directive is required on inline fragments without a type condition." + "Unexpected `@alias` on spread of plural fragment. @alias may not be used on fragments marked as `@relay(plural: true)`." )] - FragmentAliasDirectiveMissingAs, + PluralFragmentAliasNotSupported, #[error( "Unexpected dynamic argument. {field_name}'s '{argument_name}' argument must be a constant value because it is read by the Relay compiler." @@ -189,7 +181,7 @@ pub enum ValidationMessage { }, #[error( - "No types implement the client interface {interface_name}. For a client interface to be used as a @RelayResolver @outputType, at least one Object type must implement the interface." + "No types implement the client interface {interface_name}. Interfaces returned by a @RelayResolver must have at least one concrete implementation." )] RelayResolverClientInterfaceMustBeImplemented { interface_name: InterfaceName }, @@ -236,7 +228,18 @@ pub enum ValidationMessage { }, } -#[derive(Clone, Debug, Error, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +#[serde(tag = "type")] pub enum ValidationMessageWithData { #[error( "Expected a `@waterfall` directive on this field. Consuming a Client Edge field incurs a network roundtrip or \"waterfall\". To make this explicit, a `@waterfall` directive is required on this field." @@ -247,6 +250,39 @@ pub enum ValidationMessageWithData { "Unexpected `@waterfall` directive. Only fields that are backed by a Client Edge and point to a server object should be annotated with the `@waterfall` directive." )] RelayResolversUnexpectedWaterfall, + + #[error( + "Unexpected `@required` directive on a non-null field. This field is already non-null and does not need the `@required` directive." + )] + RequiredOnNonNull, + + #[error( + "Unexpected `@required` directive on a `@semanticNonNull` field within a `@throwOnFieldError` fragment or operation. Such fields are already non-null and do not need the `@required` directive." + )] + RequiredOnSemanticNonNull, + + #[error( + "Expected `@alias` directive. `{fragment_name}` is defined on `{fragment_type_name}` which might not match this selection type of `{selection_type_name}`. Add `@alias` to this spread to expose the fragment reference as a nullable property." + )] + ExpectedAliasOnNonSubtypeSpread { + fragment_name: FragmentDefinitionName, + fragment_type_name: StringKey, + selection_type_name: StringKey, + }, + + #[error( + "Expected `@alias` directive. `{fragment_name}` is defined on `{fragment_type_name}` which might not match this selection type of `{selection_type_name}`. Add `@alias` to this spread to expose the fragment reference as a nullable property. NOTE: The selection type inferred here does not include inline fragments because Relay does not always model inline fragment type refinements in its generated types." + )] + ExpectedAliasOnNonSubtypeSpreadWithinTypedInlineFragment { + fragment_name: FragmentDefinitionName, + fragment_type_name: StringKey, + selection_type_name: StringKey, + }, + + #[error( + "Expected `@alias` directive. Fragment spreads with `@{condition_name}` are conditionally fetched. Add `@alias` to this spread to expose the fragment reference as a nullable property." + )] + ExpectedAliasOnConditionalFragmentSpread { condition_name: String }, } impl WithDiagnosticData for ValidationMessageWithData { @@ -258,6 +294,37 @@ impl WithDiagnosticData for ValidationMessageWithData { ValidationMessageWithData::RelayResolversUnexpectedWaterfall => { vec![Box::new("")] } + ValidationMessageWithData::RequiredOnNonNull => { + vec![Box::new("")] + } + ValidationMessageWithData::RequiredOnSemanticNonNull => { + vec![Box::new("")] + } + ValidationMessageWithData::ExpectedAliasOnNonSubtypeSpread { + fragment_name, .. + } => { + vec![ + Box::new(format!("{fragment_name} @alias")), + Box::new(format!("{fragment_name} @dangerously_unaliased_fixme")), + ] + } + ValidationMessageWithData::ExpectedAliasOnNonSubtypeSpreadWithinTypedInlineFragment { + fragment_name, .. + } => { + vec![ + Box::new(format!("{fragment_name} @alias")), + Box::new(format!("{fragment_name} @dangerously_unaliased_fixme")), + ] + } + ValidationMessageWithData::ExpectedAliasOnConditionalFragmentSpread { + condition_name, + .. + } => { + vec![ + Box::new(format!("@alias @{condition_name}")), + Box::new(format!("@dangerously_unaliased_fixme @{condition_name}")), + ] + } } } } diff --git a/compiler/crates/relay-transforms/src/flatten.rs b/compiler/crates/relay-transforms/src/flatten.rs index e38996dda1a16..e8c75118f27b2 100644 --- a/compiler/crates/relay-transforms/src/flatten.rs +++ b/compiler/crates/relay-transforms/src/flatten.rs @@ -78,12 +78,12 @@ pub fn flatten( par_iter(&mut program.operations).for_each(|operation| { if let Err(err) = transform.transform_operation(operation) { - errors.lock().extend(err.into_iter()); + errors.lock().extend(err); } }); par_iter(&mut program.fragments).for_each(|(_, fragment)| { if let Err(err) = transform.transform_fragment(fragment) { - errors.lock().extend(err.into_iter()); + errors.lock().extend(err); } }); let is_errors_empty = { errors.lock().is_empty() }; @@ -608,6 +608,6 @@ fn merge_handle_directives( } } } - directives.extend(handles.into_iter()); + directives.extend(handles); directives } diff --git a/compiler/crates/relay-transforms/src/fragment_alias_directive.rs b/compiler/crates/relay-transforms/src/fragment_alias_directive.rs index cc1c697802444..0bb4b0358fcce 100644 --- a/compiler/crates/relay-transforms/src/fragment_alias_directive.rs +++ b/compiler/crates/relay-transforms/src/fragment_alias_directive.rs @@ -11,13 +11,11 @@ use common::ArgumentName; use common::Diagnostic; use common::DiagnosticsResult; use common::DirectiveName; -use common::FeatureFlag; -use common::Location; use common::NamedItem; use common::WithLocation; use graphql_ir::associated_data_impl; use graphql_ir::transform_list; -use graphql_ir::Directive; +use graphql_ir::Condition; use graphql_ir::FragmentDefinition; use graphql_ir::FragmentSpread; use graphql_ir::InlineFragment; @@ -34,10 +32,15 @@ use lazy_static::lazy_static; use schema::Schema; use schema::Type; +use crate::RelayDirective; use crate::ValidationMessage; +use crate::ValidationMessageWithData; +use crate::MATCH_CONSTANTS; lazy_static! { pub static ref FRAGMENT_ALIAS_DIRECTIVE_NAME: DirectiveName = DirectiveName("alias".intern()); + pub static ref FRAGMENT_DANGEROUSLY_UNALIAS_DIRECTIVE_NAME: DirectiveName = + DirectiveName("dangerously_unaliased_fixme".intern()); pub static ref FRAGMENT_ALIAS_ARGUMENT_NAME: ArgumentName = ArgumentName("as".intern()); } @@ -45,15 +48,17 @@ lazy_static! { pub struct FragmentAliasMetadata { pub alias: WithLocation, pub type_condition: Option, + pub non_nullable: bool, pub selection_type: Type, } associated_data_impl!(FragmentAliasMetadata); pub fn fragment_alias_directive( program: &Program, - feature_flag: &FeatureFlag, + is_enabled: bool, + is_enforced: bool, ) -> DiagnosticsResult { - let mut transform = FragmentAliasTransform::new(program, feature_flag); + let mut transform = FragmentAliasTransform::new(program, is_enabled, is_enforced); let next_program = transform .transform_program(program) .replace_or_else(|| program.clone()); @@ -66,94 +71,114 @@ pub fn fragment_alias_directive( struct FragmentAliasTransform<'program> { program: &'program Program, - feature_flag: &'program FeatureFlag, + is_enabled: bool, + is_enforced: bool, document_name: Option, parent_type: Option, + within_inline_fragment_type_condition: bool, + maybe_condition: Option, errors: Vec, } impl<'program> FragmentAliasTransform<'program> { - fn new(program: &'program Program, feature_flag: &'program FeatureFlag) -> Self { + fn new(program: &'program Program, enabled: bool, enforced: bool) -> Self { Self { program, - feature_flag, + is_enabled: enabled, + is_enforced: enforced, document_name: None, parent_type: None, + within_inline_fragment_type_condition: false, + maybe_condition: None, errors: Vec::new(), } } - fn transform_alias_directives( + fn will_always_match(&self, type_condition: Option) -> bool { + if self.maybe_condition.is_some() { + return false; + } + match type_condition { + Some(type_condition) => { + let parent_type = self + .parent_type + .expect("Selection should be within a parent type."); + + self.program + .schema + .is_named_type_subtype_of(parent_type, type_condition) + } + None => true, + } + } + + fn validate_unaliased_fragment_spread( &mut self, - directives: &[Directive], type_condition: Option, - get_default_name: N, - ) -> TransformedValue> - where - N: Fn() -> Option, - { - transform_list(directives, |directive| { - if directive.name.item != *FRAGMENT_ALIAS_DIRECTIVE_NAME { - return Transformed::Keep; - } + spread: &FragmentSpread, + ) { + if !self.is_enforced { + return; + } + if spread + .directives + .named(*FRAGMENT_DANGEROUSLY_UNALIAS_DIRECTIVE_NAME) + .is_some() + { + // We allow users to add `@dangerously_unaliaed_fixme` to suppress + // this error as a migration strategy. + return; + } + if spread + .directives + .named(MATCH_CONSTANTS.module_directive_name) + .is_some() + { + // Fragments that have `@module` are likely going to be accessed with a + // MatchContainer which should handle the possibility that this fragment + // will not match. + return; + } + if let Some(condition) = &self.maybe_condition { + self.errors.push(Diagnostic::error_with_data( + ValidationMessageWithData::ExpectedAliasOnConditionalFragmentSpread { + condition_name: condition.directive_name().to_string(), + }, + condition.location, + )); + return; + } + if let Some(type_condition) = type_condition { + let parent_type = self + .parent_type + .expect("Selection should be within a parent type."); - let allowed = match self.document_name { - Some(name) => self.feature_flag.is_enabled_for(name), - None => false, - }; - - if !allowed { - self.errors.push(Diagnostic::error( - ValidationMessage::FragmentAliasDirectiveDisabled, - directive.name.location, - )); - return Transformed::Keep; - } - let alias = match directive.arguments.named(*FRAGMENT_ALIAS_ARGUMENT_NAME) { - Some(arg) => match arg.value.item.get_string_literal() { - Some(name) => WithLocation::new(arg.name.location, name), - None => { - self.errors.push(Diagnostic::error( - ValidationMessage::FragmentAliasDirectiveDynamicNameArg, - arg.value.location, - )); - return Transformed::Keep; + if !self + .program + .schema + .is_named_type_subtype_of(parent_type, type_condition) + { + let fragment_type_name = self.program.schema.get_type_name(type_condition); + let selection_type_name = self.program.schema.get_type_name(parent_type); + let diagnostic = if self.within_inline_fragment_type_condition { + ValidationMessageWithData::ExpectedAliasOnNonSubtypeSpreadWithinTypedInlineFragment { + fragment_name: spread.fragment.item, + fragment_type_name, + selection_type_name, } - }, - None => match get_default_name() { - None => { - self.errors.push(Diagnostic::error( - ValidationMessage::FragmentAliasDirectiveMissingAs, - directive.name.location, - )); - return Transformed::Keep; + } else { + ValidationMessageWithData::ExpectedAliasOnNonSubtypeSpread { + fragment_name: spread.fragment.item, + fragment_type_name, + selection_type_name, } - Some(as_) => WithLocation::new(directive.name.location, as_), - }, - }; - - // In the future we might want to relax this restriction, but for now this allows us - // to avoid having to consider how @alias would interact - // with all other directives like @defer. - if directives.len() > 1 { - self.errors.push(Diagnostic::error( - ValidationMessage::FragmentAliasIncompatibleDirective, - directive.name.location, - )); - return Transformed::Keep; + }; + self.errors.push(Diagnostic::error_with_data( + diagnostic, + spread.fragment.location, + )) } - Transformed::Replace( - FragmentAliasMetadata { - alias, - type_condition, - selection_type: type_condition.unwrap_or( - self.parent_type - .expect("Selection should be within a parent type."), - ), - } - .into(), - ) - }) + } } } @@ -186,60 +211,161 @@ impl Transformer for FragmentAliasTransform<'_> { transformed } + fn transform_condition(&mut self, condition: &Condition) -> Transformed { + self.maybe_condition = Some(condition.clone()); + let selections = transform_list(&condition.selections, |selection| { + self.transform_selection(selection) + }); + self.maybe_condition = None; + if let TransformedValue::Replace(selections) = &selections { + if !Self::RETAIN_EMPTY_SELECTION_SETS && selections.is_empty() { + return Transformed::Delete; + } + } + let condition_value = self.transform_condition_value(&condition.value); + if selections.should_keep() && condition_value.should_keep() { + Transformed::Keep + } else { + Transformed::Replace(Selection::Condition(Arc::new(Condition { + value: condition_value.replace_or_else(|| condition.value.clone()), + selections: selections.replace_or_else(|| condition.selections.clone()), + ..condition.clone() + }))) + } + } + + fn transform_selections( + &mut self, + selections: &[Selection], + ) -> TransformedValue> { + self.maybe_condition = None; + transform_list(selections, |selection| self.transform_selection(selection)) + } + fn transform_inline_fragment(&mut self, fragment: &InlineFragment) -> Transformed { - let get_default_name = || { - fragment - .type_condition - .map(|type_| self.program.schema.get_type_name(type_)) - }; let previous_parent_type = self.parent_type; + let previous_within_inline_fragment_type_condition = + self.within_inline_fragment_type_condition; - if let Some(type_condition) = fragment.type_condition { - self.parent_type = Some(type_condition); - } + self.within_inline_fragment_type_condition = fragment.type_condition.is_some(); + + let transformed = match fragment.alias(&self.program.schema) { + Ok(Some(alias)) => { + if !self.is_enabled { + self.errors.push(Diagnostic::error( + ValidationMessage::FragmentAliasDirectiveDisabled, + alias.location, + )); + self.default_transform_inline_fragment(fragment); + } + + // Note: This must be called before we set self.parent_type + let will_always_match = self.will_always_match(fragment.type_condition); + + if let Some(type_condition) = fragment.type_condition { + self.parent_type = Some(type_condition); + } + + let alias_metadata = FragmentAliasMetadata { + alias, + type_condition: fragment.type_condition, + non_nullable: will_always_match, + selection_type: self + .parent_type + .expect("Selection should be within a parent type."), + }; + + let mut directives = fragment.directives.clone(); + directives.push(alias_metadata.into()); - let transformed = match self.transform_alias_directives( - &fragment.directives, - fragment.type_condition, - get_default_name, - ) { - TransformedValue::Keep => self.default_transform_inline_fragment(fragment), - TransformedValue::Replace(next_directives) => { Transformed::Replace(Selection::InlineFragment(Arc::new(InlineFragment { - directives: next_directives, + directives, type_condition: fragment.type_condition, selections: self .transform_selections(&fragment.selections) .replace_or_else(|| fragment.selections.clone()), - spread_location: Location::generated(), + spread_location: fragment.spread_location, }))) } + Ok(None) => { + // Note: We intentionally don't set self.parent_type here, even if we + // have at type conditions. This is because Relay does not always accurately model + // inline fragment type refinements as discriminated unions in its + // Flow/TypeScript types. This means the inline fragment might not actually result + // in a spread that can only be accessed when the type condition has + // been set. + // + // By leaving the parent selection's parent type we will require + // `@alias` on any spread that could fail to match with its top level + // selection set type. + self.default_transform_inline_fragment(fragment) + } + Err(diagnostics) => { + self.errors.extend(diagnostics); + self.default_transform_inline_fragment(fragment) + } }; self.parent_type = previous_parent_type; - + self.within_inline_fragment_type_condition = previous_within_inline_fragment_type_condition; transformed } fn transform_fragment_spread(&mut self, spread: &FragmentSpread) -> Transformed { - let type_condition = Some( - self.program - .fragment(spread.fragment.item) - .expect("I believe we have already validated that all fragments exist") - .type_condition, - ); - let get_default_name = || Some(spread.fragment.item.0); - self.transform_alias_directives(&spread.directives, type_condition, get_default_name) - .map(|directives| { - Selection::FragmentSpread(Arc::new(FragmentSpread { + let fragment = self + .program + .fragment(spread.fragment.item) + .expect("I believe we have already validated that all fragments exist"); + + let type_condition = Some(fragment.type_condition); + + match spread.alias() { + Ok(Some(alias)) => { + if !self.is_enabled { + self.errors.push(Diagnostic::error( + ValidationMessage::FragmentAliasDirectiveDisabled, + alias.location, + )); + } + + let is_plural = RelayDirective::find(&fragment.directives) + .map_or(false, |directive| directive.plural); + + if is_plural { + self.errors.push(Diagnostic::error( + ValidationMessage::PluralFragmentAliasNotSupported, + alias.location, + )); + } + + let alias_metadata = FragmentAliasMetadata { + alias, + type_condition, + non_nullable: self.will_always_match(type_condition), + selection_type: self + .parent_type + .expect("Selection should be within a parent type."), + }; + + let mut directives = spread.directives.clone(); + directives.push(alias_metadata.into()); + + Transformed::Replace(Selection::FragmentSpread(Arc::new(FragmentSpread { fragment: spread.fragment, arguments: spread.arguments.clone(), directives, - })) - }) - .into() + }))) + } + Ok(None) => { + self.validate_unaliased_fragment_spread(type_condition, spread); + self.default_transform_fragment_spread(spread) + } + Err(diagnostics) => { + self.errors.extend(diagnostics); + self.default_transform_fragment_spread(spread) + } + } } - fn transform_linked_field(&mut self, field: &LinkedField) -> Transformed { let previous_parent_type = self.parent_type; diff --git a/compiler/crates/relay-transforms/src/generate_id_field.rs b/compiler/crates/relay-transforms/src/generate_id_field.rs index 2125e9a2c7f94..5d54604348ff7 100644 --- a/compiler/crates/relay-transforms/src/generate_id_field.rs +++ b/compiler/crates/relay-transforms/src/generate_id_field.rs @@ -250,7 +250,7 @@ impl<'s> GenerateIDFieldTransform<'s> { node_interface.id_field, ))); } - result.extend(next_selections.into_iter()); + result.extend(next_selections); TransformedValue::Replace(result) } diff --git a/compiler/crates/relay-transforms/src/generate_live_query_metadata.rs b/compiler/crates/relay-transforms/src/generate_live_query_metadata.rs index 923a0dd19f712..17f4e7625db10 100644 --- a/compiler/crates/relay-transforms/src/generate_live_query_metadata.rs +++ b/compiler/crates/relay-transforms/src/generate_live_query_metadata.rs @@ -141,7 +141,8 @@ impl Transformer for GenerateLiveQueryMetadata { } } -#[derive(Error, Debug)] +#[derive(Error, Debug, serde::Serialize)] +#[serde(tag = "type")] enum ValidationMessage { #[error( "Live query expects 'polling_interval' or 'config_id' as an argument to @live_query to for root field {query_name}" diff --git a/compiler/crates/relay-transforms/src/generate_relay_resolvers_model_fragments.rs b/compiler/crates/relay-transforms/src/generate_relay_resolvers_model_fragments.rs index a898aaee983db..0790fc3c63b88 100644 --- a/compiler/crates/relay-transforms/src/generate_relay_resolvers_model_fragments.rs +++ b/compiler/crates/relay-transforms/src/generate_relay_resolvers_model_fragments.rs @@ -9,26 +9,33 @@ use std::sync::Arc; use common::NamedItem; use common::WithLocation; +use docblock_shared::ResolverSourceHash; use docblock_shared::RELAY_RESOLVER_MODEL_DIRECTIVE_NAME; +use docblock_shared::RELAY_RESOLVER_MODEL_INSTANCE_FIELD; +use docblock_shared::RELAY_RESOLVER_SOURCE_HASH; +use docblock_shared::RELAY_RESOLVER_SOURCE_HASH_VALUE; +use graphql_ir::associated_data_impl; +use graphql_ir::Directive; use graphql_ir::FragmentDefinition; use graphql_ir::FragmentDefinitionName; use graphql_ir::Program; use graphql_ir::ScalarField; use graphql_ir::Selection; use intern::string_key::Intern; -use intern::string_key::StringKey; -use lazy_static::lazy_static; +use relay_config::ProjectName; use relay_config::SchemaConfig; use schema::Schema; -lazy_static! { - // Using a longer name version for this "special" field - // help us avoid potential collision with product code (__self, __instance can be used for something else) - static ref RESOLVER_MODEL_INSTANCE_FIELD_NAME: StringKey = - "__relay_model_instance".intern(); -} +/// Currently, this is a wrapper of the hash of the resolver source code. +/// But we can change this `ArtifactSourceKeyData` to be an +/// enum and also represent the `fragment` or `operation` names. +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct ArtifactSourceKeyData(pub ResolverSourceHash); + +associated_data_impl!(ArtifactSourceKeyData); pub fn generate_relay_resolvers_model_fragments( + project_name: ProjectName, program: &Program, schema_config: &SchemaConfig, ) -> Program { @@ -44,31 +51,33 @@ pub fn generate_relay_resolvers_model_fragments( .is_some() { let object_type = program.schema.get_type(object.name.item.0).unwrap(); - let model_instance_field_id = program .schema - .named_field(object_type, *RESOLVER_MODEL_INSTANCE_FIELD_NAME) + .named_field(object_type, *RELAY_RESOLVER_MODEL_INSTANCE_FIELD) .unwrap_or_else(|| { panic!( "Objects with directive @{} expected to have field `{}`.", - *RELAY_RESOLVER_MODEL_DIRECTIVE_NAME, *RESOLVER_MODEL_INSTANCE_FIELD_NAME + *RELAY_RESOLVER_MODEL_DIRECTIVE_NAME, *RELAY_RESOLVER_MODEL_INSTANCE_FIELD ) }); let model_fragment_name = FragmentDefinitionName( - format!( - "{}__{}", - object.name.item.0, *RESOLVER_MODEL_INSTANCE_FIELD_NAME - ) - .intern(), + project_name + .generate_name_for_object_and_field( + object.name.item.0, + *RELAY_RESOLVER_MODEL_INSTANCE_FIELD, + ) + .intern(), ); + let resolver_field = program.schema.field(model_instance_field_id); + let fragment_directives = directives_with_artifact_source(resolver_field); let fragment_definition = FragmentDefinition { name: WithLocation::new(object.name.location, model_fragment_name), variable_definitions: vec![], used_global_variables: vec![], type_condition: object_type, - directives: vec![], + directives: fragment_directives, selections: vec![Selection::ScalarField(Arc::new(ScalarField { alias: None, definition: WithLocation::generated(model_instance_field_id), @@ -87,11 +96,12 @@ pub fn generate_relay_resolvers_model_fragments( .named_field(object_type, schema_config.node_interface_id_field) { let id_fragment_name = FragmentDefinitionName( - format!( - "{}__{}", - object.name.item.0, schema_config.node_interface_id_field - ) - .intern(), + project_name + .generate_name_for_object_and_field( + object.name.item.0, + schema_config.node_interface_id_field, + ) + .intern(), ); let id_fragment = FragmentDefinition { @@ -99,7 +109,7 @@ pub fn generate_relay_resolvers_model_fragments( variable_definitions: vec![], used_global_variables: vec![], type_condition: object_type, - directives: vec![], + directives: directives_with_artifact_source(resolver_field), selections: vec![Selection::ScalarField(Arc::new(ScalarField { alias: None, definition: WithLocation::generated(id_field_id), @@ -115,3 +125,20 @@ pub fn generate_relay_resolvers_model_fragments( next_program } + +pub fn get_resolver_source_hash(field: &schema::Field) -> Option { + field + .directives + .named(*RELAY_RESOLVER_SOURCE_HASH) + .and_then(|directive| directive.arguments.named(*RELAY_RESOLVER_SOURCE_HASH_VALUE)) + .and_then(|source| source.value.get_string_literal()) + .map(ResolverSourceHash::from_raw) +} + +pub(crate) fn directives_with_artifact_source(field: &schema::Field) -> Vec { + if let Some(source_hash) = get_resolver_source_hash(field) { + vec![ArtifactSourceKeyData(source_hash).into()] + } else { + vec![] + } +} diff --git a/compiler/crates/relay-transforms/src/generate_relay_resolvers_operations_for_nested_objects.rs b/compiler/crates/relay-transforms/src/generate_relay_resolvers_operations_for_nested_objects.rs index e9fe8bcafa3f3..3c36b9cada4b4 100644 --- a/compiler/crates/relay-transforms/src/generate_relay_resolvers_operations_for_nested_objects.rs +++ b/compiler/crates/relay-transforms/src/generate_relay_resolvers_operations_for_nested_objects.rs @@ -15,6 +15,7 @@ use common::NamedItem; use common::WithLocation; use docblock_shared::HAS_OUTPUT_TYPE_ARGUMENT_NAME; use docblock_shared::RELAY_RESOLVER_DIRECTIVE_NAME; +use docblock_shared::RELAY_RESOLVER_WEAK_OBJECT_DIRECTIVE; use graphql_ir::InlineFragment; use graphql_ir::LinkedField; use graphql_ir::OperationDefinition; @@ -24,6 +25,7 @@ use graphql_ir::ScalarField; use graphql_ir::Selection; use graphql_syntax::OperationKind; use intern::string_key::Intern; +use relay_config::ProjectName; use relay_config::SchemaConfig; use schema::Field; use schema::FieldID; @@ -34,11 +36,13 @@ use schema::SDLSchema; use schema::Schema; use schema::Type; +use crate::generate_relay_resolvers_model_fragments::directives_with_artifact_source; use crate::get_normalization_operation_name; use crate::match_::RawResponseGenerationMode; use crate::relay_resolvers::get_bool_argument_is_true; use crate::SplitOperationMetadata; use crate::ValidationMessage; +use crate::RESOLVER_BELONGS_TO_BASE_SCHEMA_DIRECTIVE; fn generate_fat_selections_from_type( schema: &SDLSchema, @@ -439,6 +443,7 @@ fn generate_selections_from_interface_fields( } pub(crate) fn generate_name_for_nested_object_operation( + project_name: ProjectName, schema: &SDLSchema, field: &Field, ) -> WithLocation { @@ -446,10 +451,10 @@ pub(crate) fn generate_name_for_nested_object_operation( .parent_type .unwrap_or_else(|| panic!("Expected parent type for field {:?}.", field)); - let normalization_name = get_normalization_operation_name( - format!("{}__{}", schema.get_type_name(parent_type), field.name.item).intern(), - ) - .intern(); + let name = project_name + .generate_name_for_object_and_field(schema.get_type_name(parent_type), field.name.item); + + let normalization_name = get_normalization_operation_name(name.intern()).intern(); field .name @@ -457,6 +462,7 @@ pub(crate) fn generate_name_for_nested_object_operation( } pub fn generate_relay_resolvers_operations_for_nested_objects( + project_name: ProjectName, program: &Program, schema_config: &SchemaConfig, ) -> DiagnosticsResult { @@ -468,21 +474,47 @@ pub fn generate_relay_resolvers_operations_for_nested_objects( } if let Some(directive) = field.directives.named(*RELAY_RESOLVER_DIRECTIVE_NAME) { + // For resolvers that belong to the base schema, we don't need to generate fragments. + // These fragments should be generated during compilcation of the base project. + if field + .directives + .named(*RESOLVER_BELONGS_TO_BASE_SCHEMA_DIRECTIVE) + .is_some() + { + continue; + } + let has_output_type = get_bool_argument_is_true(&directive.arguments, *HAS_OUTPUT_TYPE_ARGUMENT_NAME); if !has_output_type { continue; } + let inner_field_type = field.type_.inner(); + // Allow scalar/enums as @outputType - if field.type_.inner().is_scalar() || field.type_.inner().is_enum() { + if inner_field_type.is_scalar() || inner_field_type.is_enum() { + continue; + } + + let is_model = inner_field_type + .get_object_id() + .and_then(|object_id| { + let object = program.schema.object(object_id); + object + .directives + .named(*RELAY_RESOLVER_WEAK_OBJECT_DIRECTIVE) + }) + .is_some(); + + if is_model { continue; } let selections = match generate_fat_selections_from_type( &program.schema, schema_config, - field.type_.inner(), + inner_field_type, field, ) { Ok(selections) => selections, @@ -496,26 +528,21 @@ pub fn generate_relay_resolvers_operations_for_nested_objects( continue; } - let operation_name = generate_name_for_nested_object_operation(&program.schema, field); + let operation_name = + generate_name_for_nested_object_operation(project_name, &program.schema, field); - let parent_documents = { - let mut parent_documents = HashSet::default(); - parent_documents.insert(operation_name.item.into()); - parent_documents - }; - - let directives = vec![ + let mut directives = directives_with_artifact_source(field); + directives.push( SplitOperationMetadata { location: field.name.location, - parent_documents, + parent_documents: Default::default(), derived_from: None, raw_response_type_generation_mode: Some( RawResponseGenerationMode::AllFieldsRequired, ), } .into(), - ]; - + ); let operation = OperationDefinition { name: operation_name, type_: field.type_.inner(), diff --git a/compiler/crates/relay-transforms/src/generate_relay_resolvers_root_fragment_split_operation.rs b/compiler/crates/relay-transforms/src/generate_relay_resolvers_root_fragment_split_operation.rs new file mode 100644 index 0000000000000..8371b6c274957 --- /dev/null +++ b/compiler/crates/relay-transforms/src/generate_relay_resolvers_root_fragment_split_operation.rs @@ -0,0 +1,124 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::DiagnosticsResult; +use common::NamedItem; +use docblock_shared::RELAY_RESOLVER_DIRECTIVE_NAME; +use graphql_ir::associated_data_impl; +use graphql_ir::ExecutableDefinition; +use graphql_ir::FragmentDefinition; +use graphql_ir::FragmentDefinitionName; +use graphql_ir::OperationDefinition; +use graphql_ir::OperationDefinitionName; +use graphql_ir::Program; +use graphql_syntax::OperationKind; +use intern::string_key::Intern; +use rustc_hash::FxHashSet; +use schema::SDLSchema; + +use crate::get_normalization_operation_name; +use crate::get_resolver_fragment_dependency_name; +use crate::SplitOperationMetadata; +use crate::RESOLVER_BELONGS_TO_BASE_SCHEMA_DIRECTIVE; + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +struct IsResolverRootFragment(); +associated_data_impl!(IsResolverRootFragment); + +pub fn generate_relay_resolvers_root_fragment_split_operation( + program: &Program, +) -> DiagnosticsResult { + let mut operations = vec![]; + for fragment in program.fragments() { + if IsResolverRootFragment::find(&fragment.directives).is_some() { + operations.push(Arc::new(OperationDefinition { + name: fragment.name.map(|name| { + OperationDefinitionName(get_normalization_operation_name(name.0).intern()) + }), + type_: fragment.type_condition, + variable_definitions: fragment.variable_definitions.clone(), + directives: vec![ + SplitOperationMetadata { + location: fragment.name.location, + parent_documents: FxHashSet::from_iter([fragment.name.item.into()]), + derived_from: Some(fragment.name.item), + raw_response_type_generation_mode: None, + } + .into(), + ], + selections: fragment.selections.clone(), + kind: OperationKind::Query, + })); + } + } + + if operations.is_empty() { + Ok(program.clone()) + } else { + let mut next_program = program.clone(); + + for operation in operations { + next_program.insert_operation(operation) + } + + Ok(next_program) + } +} + +fn get_resolver_root_fragment_names(schema: &SDLSchema) -> FxHashSet { + let mut names = FxHashSet::default(); + for field in schema.get_fields() { + if !field.is_extension + || field + .directives + .named(*RELAY_RESOLVER_DIRECTIVE_NAME) + .is_none() + || field + .directives + .named(*RESOLVER_BELONGS_TO_BASE_SCHEMA_DIRECTIVE) + .is_some() + { + continue; + } + + if let Some(root_fragment_name) = get_resolver_fragment_dependency_name(field) { + names.insert(root_fragment_name); + } + } + names +} + +/// Adds a directive on all `FragmentDefinition`s in IR that are marked as a `@rootFragment` +/// for any resolver backed field in the schema (but not base schema) +pub fn annotate_resolver_root_fragments( + schema: &SDLSchema, + ir: Vec, +) -> Vec { + let resolver_root_fragment_names = get_resolver_root_fragment_names(schema); + ir.into_iter() + .map(|def| { + if let ExecutableDefinition::Fragment(ref fragment) = def { + return if resolver_root_fragment_names.contains(&fragment.name.item) { + ExecutableDefinition::Fragment(FragmentDefinition { + directives: fragment + .directives + .iter() + .cloned() + .chain(vec![IsResolverRootFragment().into()]) + .collect(), + ..fragment.clone() + }) + } else { + def + }; + } + def + }) + .collect() +} diff --git a/compiler/crates/relay-transforms/src/generate_typename.rs b/compiler/crates/relay-transforms/src/generate_typename.rs index 7b6c691bc1ab7..6b21adcf03494 100644 --- a/compiler/crates/relay-transforms/src/generate_typename.rs +++ b/compiler/crates/relay-transforms/src/generate_typename.rs @@ -100,7 +100,7 @@ impl<'s> Transformer for GenerateTypenameTransform<'s> { self.is_for_codegen, )); if let TransformedValue::Replace(selections) = selections { - next_selections.extend(selections.into_iter()) + next_selections.extend(selections) } else { next_selections.extend(fragment.selections.iter().cloned()) }; @@ -132,7 +132,7 @@ impl<'s> Transformer for GenerateTypenameTransform<'s> { directives: Default::default(), }))); if let TransformedValue::Replace(selections) = selections { - next_selections.extend(selections.into_iter()) + next_selections.extend(selections) } else { next_selections.extend(field.selections.iter().cloned()); } @@ -187,7 +187,7 @@ impl<'s> Transformer for GenerateTypenameTransform<'s> { self.is_for_codegen, )); if let TransformedValue::Replace(selections) = selections { - next_selections.extend(selections.into_iter()) + next_selections.extend(selections) } else { next_selections.extend(fragment.selections.iter().cloned()) }; diff --git a/compiler/crates/relay-transforms/src/handle_fields/mod.rs b/compiler/crates/relay-transforms/src/handle_fields.rs similarity index 100% rename from compiler/crates/relay-transforms/src/handle_fields/mod.rs rename to compiler/crates/relay-transforms/src/handle_fields.rs diff --git a/compiler/crates/relay-transforms/src/handle_fields/handle_field_transform.rs b/compiler/crates/relay-transforms/src/handle_fields/handle_field_transform.rs index 8f076d9c14023..27c997c5950a6 100644 --- a/compiler/crates/relay-transforms/src/handle_fields/handle_field_transform.rs +++ b/compiler/crates/relay-transforms/src/handle_fields/handle_field_transform.rs @@ -36,7 +36,7 @@ pub fn handle_field_transform(program: &Program) -> Program { struct HandleFieldTransform; -impl<'s> HandleFieldTransform { +impl HandleFieldTransform { fn update_arguments( &self, arguments: &mut Vec, @@ -58,7 +58,7 @@ impl<'s> HandleFieldTransform { } } -impl<'s> Transformer for HandleFieldTransform { +impl Transformer for HandleFieldTransform { const NAME: &'static str = "HandleFieldTransform"; const VISIT_ARGUMENTS: bool = false; const VISIT_DIRECTIVES: bool = false; diff --git a/compiler/crates/relay-transforms/src/hash_arguments.rs b/compiler/crates/relay-transforms/src/hash_arguments.rs index 4d5a8f73902f4..44bbebd61601b 100644 --- a/compiler/crates/relay-transforms/src/hash_arguments.rs +++ b/compiler/crates/relay-transforms/src/hash_arguments.rs @@ -107,7 +107,7 @@ fn build_constant_value_string(value: &ConstantValue) -> String { ConstantValue::List(val_list) => { let json_values = val_list .iter() - .map(|val| build_constant_value_string(val)) + .map(build_constant_value_string) .collect::>(); format!("[{}]", json_values.join(",")) diff --git a/compiler/crates/relay-transforms/src/inline_data_fragment.rs b/compiler/crates/relay-transforms/src/inline_data_fragment.rs index e75283f55b7c9..c71c4b4be1f14 100644 --- a/compiler/crates/relay-transforms/src/inline_data_fragment.rs +++ b/compiler/crates/relay-transforms/src/inline_data_fragment.rs @@ -24,9 +24,13 @@ use graphql_ir::Transformed; use graphql_ir::Transformer; use graphql_ir::VariableDefinition; use intern::string_key::Intern; -use once_cell::sync::Lazy; +use lazy_static::lazy_static; use thiserror::Error; +lazy_static! { + pub static ref INLINE_DIRECTIVE_NAME: DirectiveName = DirectiveName("inline".intern()); +} + pub fn inline_data_fragment(program: &Program) -> DiagnosticsResult { let mut transform = InlineDataFragmentsTransform::new(program); let next_program = transform @@ -40,9 +44,6 @@ pub fn inline_data_fragment(program: &Program) -> DiagnosticsResult { } } -pub const INLINE_DIRECTIVE_NAME: Lazy = - Lazy::new(|| DirectiveName("inline".intern())); - struct InlineDataFragmentsTransform<'s> { program: &'s Program, errors: Vec, @@ -168,7 +169,8 @@ impl<'s> Transformer for InlineDataFragmentsTransform<'s> { } } -#[derive(Error, Debug)] +#[derive(Error, Debug, serde::Serialize)] +#[serde(tag = "type")] enum ValidationMessage { #[error("Found a circular reference from fragment '{fragment_name}'.")] CircularFragmentReference { diff --git a/compiler/crates/relay-transforms/src/inline_fragments.rs b/compiler/crates/relay-transforms/src/inline_fragments.rs index 936fb2141cae5..9a9382dac3094 100644 --- a/compiler/crates/relay-transforms/src/inline_fragments.rs +++ b/compiler/crates/relay-transforms/src/inline_fragments.rs @@ -21,7 +21,6 @@ use graphql_ir::Selection; use graphql_ir::Transformed; use graphql_ir::Transformer; -use crate::relay_client_component::RELAY_CLIENT_COMPONENT_SERVER_DIRECTIVE_NAME; use crate::NoInlineFragmentSpreadMetadata; use crate::RelayLocationAgnosticBehavior; @@ -29,7 +28,14 @@ use crate::RelayLocationAgnosticBehavior; /// fragment's directives and selections. Used for constructing a Normalization /// AST that contains all the selections that may be found in the query response. pub fn inline_fragments(program: &Program) -> Program { - let mut transform = InlineFragmentsTransform::new(program); + let mut transform = InlineFragmentsTransform::new(program, true); + transform + .transform_program(program) + .replace_or_else(|| program.clone()) +} + +pub fn inline_fragments_keep_fragments(program: &Program) -> Program { + let mut transform = InlineFragmentsTransform::new(program, false); transform .transform_program(program) .replace_or_else(|| program.clone()) @@ -61,13 +67,15 @@ impl Hash for FragmentSpreadKey { struct InlineFragmentsTransform<'s> { program: &'s Program, seen: Seen, + remove_fragments: bool, } impl<'s> InlineFragmentsTransform<'s> { - fn new(program: &'s Program) -> Self { + fn new(program: &'s Program, remove_fragments: bool) -> Self { Self { program, seen: Default::default(), + remove_fragments, } } @@ -118,9 +126,13 @@ impl<'s> Transformer for InlineFragmentsTransform<'s> { fn transform_fragment( &mut self, - _fragment: &FragmentDefinition, + fragment: &FragmentDefinition, ) -> Transformed { - Transformed::Delete + if self.remove_fragments { + Transformed::Delete + } else { + self.default_transform_fragment(fragment) + } } fn transform_selection(&mut self, selection: &Selection) -> Transformed { @@ -128,7 +140,6 @@ impl<'s> Transformer for InlineFragmentsTransform<'s> { Selection::FragmentSpread(selection) => { let should_skip_inline = selection.directives.iter().any(|directive| { directive.name.item == NoInlineFragmentSpreadMetadata::directive_name() - || directive.name.item == *RELAY_CLIENT_COMPONENT_SERVER_DIRECTIVE_NAME }); if should_skip_inline { Transformed::Keep diff --git a/compiler/crates/relay-transforms/src/lib.rs b/compiler/crates/relay-transforms/src/lib.rs index 80837f84750a8..422e638bfe492 100644 --- a/compiler/crates/relay-transforms/src/lib.rs +++ b/compiler/crates/relay-transforms/src/lib.rs @@ -16,6 +16,7 @@ mod apply_custom_transforms; mod apply_fragment_arguments; mod apply_transforms; mod assignable_fragment_spread; +mod catch_directive; mod client_edges; mod client_extensions; mod client_extensions_abstract_types; @@ -29,8 +30,9 @@ mod fragment_alias_directive; mod generate_data_driven_dependency_metadata; mod generate_id_field; mod generate_live_query_metadata; -mod generate_relay_resolvers_model_fragments; +pub mod generate_relay_resolvers_model_fragments; mod generate_relay_resolvers_operations_for_nested_objects; +mod generate_relay_resolvers_root_fragment_split_operation; mod generate_typename; mod handle_fields; mod hash_arguments; @@ -43,13 +45,12 @@ mod murmurhash; mod no_inline; mod preloadable_directive; mod provided_variable_fragment_transform; -mod react_flight; mod refetchable_fragment; mod relay_actor_change; -mod relay_client_component; mod relay_directive; mod relay_node_identifier; -mod relay_resolvers; +pub mod relay_resolvers; +mod relay_resolvers_abstract_types; mod remove_base_fragments; mod required_directive; mod root_variables; @@ -102,11 +103,19 @@ pub use assignable_fragment_spread::ASSIGNABLE_DIRECTIVE; pub use assignable_fragment_spread::ASSIGNABLE_DIRECTIVE_FOR_TYPEGEN; pub use assignable_fragment_spread::UPDATABLE_DIRECTIVE; pub use assignable_fragment_spread::UPDATABLE_DIRECTIVE_FOR_TYPEGEN; +pub use catch_directive::catch_directive; +pub use catch_directive::CatchMetadataDirective; +pub use catch_directive::CatchTo; +pub use catch_directive::CATCH_DIRECTIVE_NAME; +pub use catch_directive::NULL_TO; +pub use catch_directive::RESULT_TO; +pub use catch_directive::TO_ARGUMENT; pub use client_edges::client_edges; pub use client_edges::remove_client_edge_selections; pub use client_edges::ClientEdgeGeneratedQueryMetadataDirective; pub use client_edges::ClientEdgeMetadata; pub use client_edges::ClientEdgeMetadataDirective; +pub use client_edges::ClientEdgeModelResolver; pub use client_edges::CLIENT_EDGE_SOURCE_NAME; pub use client_edges::CLIENT_EDGE_WATERFALL_DIRECTIVE_NAME; pub use client_extensions::client_extensions; @@ -121,7 +130,6 @@ pub use declarative_connection::transform_declarative_connection; pub use defer_stream::transform_defer_stream; pub use defer_stream::DeferDirective; pub use defer_stream::StreamDirective; -pub use defer_stream::DEFER_STREAM_CONSTANTS; pub use directive_finder::DirectiveFinder; pub use flatten::flatten; pub use fragment_alias_directive::fragment_alias_directive; @@ -130,7 +138,10 @@ pub use generate_data_driven_dependency_metadata::generate_data_driven_dependenc pub use generate_data_driven_dependency_metadata::RelayDataDrivenDependencyMetadata; pub use generate_id_field::generate_id_field; pub use generate_live_query_metadata::generate_live_query_metadata; +pub use generate_relay_resolvers_model_fragments::ArtifactSourceKeyData; pub use generate_relay_resolvers_operations_for_nested_objects::generate_relay_resolvers_operations_for_nested_objects; +pub use generate_relay_resolvers_root_fragment_split_operation::annotate_resolver_root_fragments; +pub use generate_relay_resolvers_root_fragment_split_operation::generate_relay_resolvers_root_fragment_split_operation; pub use generate_typename::generate_typename; pub use generate_typename::TYPE_DISCRIMINATOR_DIRECTIVE_NAME; pub use handle_fields::extract_handle_field_directives; @@ -141,6 +152,7 @@ pub use inline_data_fragment::inline_data_fragment; pub use inline_data_fragment::InlineDirectiveMetadata; pub use inline_data_fragment::INLINE_DIRECTIVE_NAME; pub use inline_fragments::inline_fragments; +pub use inline_fragments::inline_fragments_keep_fragments; pub use mask::mask; pub use match_::split_module_import; pub use match_::transform_match; @@ -154,9 +166,6 @@ pub use no_inline::NO_INLINE_DIRECTIVE_NAME; pub use preloadable_directive::is_operation_preloadable; pub use preloadable_directive::should_generate_hack_preloader; pub use provided_variable_fragment_transform::provided_variable_fragment_transform; -pub use react_flight::react_flight; -pub use react_flight::ReactFlightLocalComponentsMetadata; -pub use react_flight::REACT_FLIGHT_SCALAR_FLIGHT_FIELD_METADATA_KEY; pub use refetchable_fragment::transform_refetchable_fragment; pub use refetchable_fragment::RefetchableDerivedFromMetadata; pub use refetchable_fragment::RefetchableMetadata; @@ -164,25 +173,24 @@ pub use refetchable_fragment::CONSTANTS as REFETCHABLE_CONSTANTS; pub use refetchable_fragment::REFETCHABLE_NAME; pub use relay_actor_change::relay_actor_change_transform; pub use relay_actor_change::RELAY_ACTOR_CHANGE_DIRECTIVE_FOR_CODEGEN; -pub use relay_client_component::relay_client_component; -pub use relay_client_component::RelayClientComponentMetadata; -pub use relay_client_component::RELAY_CLIENT_COMPONENT_DIRECTIVE_NAME; -pub use relay_client_component::RELAY_CLIENT_COMPONENT_MODULE_ID_ARGUMENT_NAME; -pub use relay_client_component::RELAY_CLIENT_COMPONENT_SERVER_DIRECTIVE_NAME; pub use relay_directive::RelayDirective; pub use relay_node_identifier::RelayLocationAgnosticBehavior; pub use relay_resolvers::get_resolver_fragment_dependency_name; pub use relay_resolvers::relay_resolvers; +pub use relay_resolvers::resolver_type_import_alias; pub use relay_resolvers::FragmentDataInjectionMode; pub use relay_resolvers::RelayResolverMetadata; pub use relay_resolvers::ResolverOutputTypeInfo; +pub use relay_resolvers_abstract_types::relay_resolvers_abstract_types; pub use remove_base_fragments::remove_base_fragments; +pub use remove_base_fragments::RESOLVER_BELONGS_TO_BASE_SCHEMA_DIRECTIVE; pub use required_directive::required_directive; pub use required_directive::RequiredAction; pub use required_directive::RequiredMetadataDirective; pub use required_directive::ACTION_ARGUMENT; pub use required_directive::CHILDREN_CAN_BUBBLE_METADATA_KEY; pub use required_directive::REQUIRED_DIRECTIVE_NAME; +pub use required_directive::THROW_ACTION; pub use skip_client_directives::skip_client_directives; pub use skip_client_extensions::skip_client_extensions; pub use skip_null_arguments_transform::skip_null_arguments_transform; diff --git a/compiler/crates/relay-transforms/src/match_/mod.rs b/compiler/crates/relay-transforms/src/match_.rs similarity index 100% rename from compiler/crates/relay-transforms/src/match_/mod.rs rename to compiler/crates/relay-transforms/src/match_.rs diff --git a/compiler/crates/relay-transforms/src/match_/hash_supported_argument.rs b/compiler/crates/relay-transforms/src/match_/hash_supported_argument.rs index 74c9cf5fcb963..1c7ad5e55cab0 100644 --- a/compiler/crates/relay-transforms/src/match_/hash_supported_argument.rs +++ b/compiler/crates/relay-transforms/src/match_/hash_supported_argument.rs @@ -9,8 +9,6 @@ use std::sync::Arc; use common::Diagnostic; use common::DiagnosticsResult; -use common::FeatureFlag; -use common::FeatureFlags; use common::NamedItem; use graphql_ir::ConstantValue; use graphql_ir::LinkedField; @@ -29,13 +27,9 @@ use thiserror::Error; use super::MATCH_CONSTANTS; use crate::murmurhash::murmurhash; -pub fn hash_supported_argument( - program: &Program, - feature_flags: &FeatureFlags, -) -> DiagnosticsResult { +pub fn hash_supported_argument(program: &Program) -> DiagnosticsResult { let mut transformer = HashSupportedArgumentTransform { schema: &program.schema, - hash_supported_argument: &feature_flags.hash_supported_argument, errors: Default::default(), }; let next_program = transformer.transform_program(program); @@ -48,7 +42,6 @@ pub fn hash_supported_argument( struct HashSupportedArgumentTransform<'a> { schema: &'a SDLSchema, - hash_supported_argument: &'a FeatureFlag, errors: Vec, } @@ -135,14 +128,6 @@ impl<'a> HashSupportedArgumentTransform<'a> { .named(MATCH_CONSTANTS.supported_arg) .expect("field has supported arg, but missing from the schema"); - let field_type_name = { - let field_type = self.schema.field(field.definition.item).type_.inner(); - self.schema.get_type_name(field_type) - }; - if !self.hash_supported_argument.is_enabled_for(field_type_name) { - return false; - } - if let TypeReference::List(item_type) = supported_arg_def.type_.nullable_type() { if let TypeReference::Named(item_type_name) = item_type.nullable_type() { return self.schema.is_string(*item_type_name); @@ -152,7 +137,8 @@ impl<'a> HashSupportedArgumentTransform<'a> { } } -#[derive(Debug, Error)] +#[derive(Debug, Error, serde::Serialize)] +#[serde(tag = "type")] pub enum HashSupportedArgumentError { #[error( "Variables cannot be passed to the `supported` argument for data driven dependency fields, please use literal values like `\"ExampleValue\"`." diff --git a/compiler/crates/relay-transforms/src/match_/match_transform.rs b/compiler/crates/relay-transforms/src/match_/match_transform.rs index 2937c91ca32c8..249b8bb0b257c 100644 --- a/compiler/crates/relay-transforms/src/match_/match_transform.rs +++ b/compiler/crates/relay-transforms/src/match_/match_transform.rs @@ -44,6 +44,7 @@ use indexmap::IndexSet; use intern::string_key::Intern; use intern::string_key::StringKey; use intern::Lookup; +use relay_config::DeferStreamInterface; use relay_config::ModuleImportConfig; use schema::FieldID; use schema::ScalarID; @@ -52,7 +53,6 @@ use schema::Type; use schema::TypeReference; use super::validation_message::ValidationMessage; -use crate::defer_stream::DEFER_STREAM_CONSTANTS; use crate::inline_data_fragment::INLINE_DIRECTIVE_NAME; use crate::match_::MATCH_CONSTANTS; use crate::no_inline::attach_no_inline_directives_to_fragments; @@ -64,8 +64,14 @@ pub fn transform_match( program: &Program, feature_flags: &FeatureFlags, module_import_config: ModuleImportConfig, + defer_stream_interface: DeferStreamInterface, ) -> DiagnosticsResult { - let mut transformer = MatchTransform::new(program, feature_flags, module_import_config); + let mut transformer = MatchTransform::new( + program, + feature_flags, + module_import_config, + defer_stream_interface, + ); let next_program = transformer.transform_program(program); if transformer.errors.is_empty() { Ok(next_program.replace_or_else(|| program.clone())) @@ -113,6 +119,7 @@ pub struct MatchTransform<'program, 'flag> { // Stores the fragments that should use @no_inline and their parent document name no_inline_fragments: FragmentDefinitionNameMap>, module_import_config: ModuleImportConfig, + defer_stream_interface: DeferStreamInterface, } impl<'program, 'flag> MatchTransform<'program, 'flag> { @@ -120,6 +127,7 @@ impl<'program, 'flag> MatchTransform<'program, 'flag> { program: &'program Program, feature_flags: &'flag FeatureFlags, module_import_config: ModuleImportConfig, + defer_stream_interface: DeferStreamInterface, ) -> Self { Self { program, @@ -136,6 +144,7 @@ impl<'program, 'flag> MatchTransform<'program, 'flag> { no_inline_flag: &feature_flags.no_inline, no_inline_fragments: Default::default(), module_import_config, + defer_stream_interface, } } @@ -317,7 +326,7 @@ impl<'program, 'flag> MatchTransform<'program, 'flag> { && !(spread.directives.len() == 2 && spread .directives - .named(DEFER_STREAM_CONSTANTS.defer_name) + .named(self.defer_stream_interface.defer_name) .is_some()) { // allow @defer and @module in typegen transforms @@ -476,7 +485,7 @@ impl<'program, 'flag> MatchTransform<'program, 'flag> { if should_use_no_inline { self.no_inline_fragments .entry(fragment.name.item) - .or_insert_with(std::vec::Vec::new) + .or_default() .push(self.document_name); } diff --git a/compiler/crates/relay-transforms/src/match_/split_module_import.rs b/compiler/crates/relay-transforms/src/match_/split_module_import.rs index efcfd425ebe67..ed59b23d806c4 100644 --- a/compiler/crates/relay-transforms/src/match_/split_module_import.rs +++ b/compiler/crates/relay-transforms/src/match_/split_module_import.rs @@ -142,7 +142,7 @@ impl Transformer for SplitModuleImportTransform<'_, '_> { }, OperationDefinition { name: WithLocation::new( - module_metadata.location, + module_metadata.fragment_source_location, OperationDefinitionName(normalization_name), ), type_: parent_type, diff --git a/compiler/crates/relay-transforms/src/match_/subscription_transform.rs b/compiler/crates/relay-transforms/src/match_/subscription_transform.rs index 0f79d5d0ba7a2..97bab9c1d3785 100644 --- a/compiler/crates/relay-transforms/src/match_/subscription_transform.rs +++ b/compiler/crates/relay-transforms/src/match_/subscription_transform.rs @@ -96,8 +96,8 @@ impl<'program> SubscriptionTransform<'program> { let object_field = self.program.schema.field(*object_field_id); if object_field.name.item == MATCH_CONSTANTS.js_field_name { // if we find a js field, it must be valid - return self.is_valid_js_dependency(&object_field.type_).then( - || ValidFieldResult { + return self.is_valid_js_dependency(&object_field.type_).then_some( + ValidFieldResult { linked_field, js_field_id: *object_field_id, fragment_spread, @@ -122,7 +122,7 @@ impl<'program> SubscriptionTransform<'program> { if linked_field.selections.len() != 1 { return None; } - let first_item = linked_field.selections.get(0).unwrap(); + let first_item = linked_field.selections.first().unwrap(); match first_item { Selection::FragmentSpread(fragment_spread) => Some(fragment_spread), _ => None, @@ -139,10 +139,10 @@ impl<'program> SubscriptionTransform<'program> { } } - fn get_replacement_selection<'operation>( + fn get_replacement_selection( &self, operation: &OperationDefinition, - valid_result: ValidFieldResult<'operation>, + valid_result: ValidFieldResult<'_>, ) -> Selection { let ValidFieldResult { linked_field, @@ -176,7 +176,8 @@ impl<'program> SubscriptionTransform<'program> { }))); let type_condition = Some( - (&self.program.schema) + self.program + .schema .field(linked_field.definition.item) .type_ .inner(), diff --git a/compiler/crates/relay-transforms/src/match_/validation_message.rs b/compiler/crates/relay-transforms/src/match_/validation_message.rs index cb610f3df8494..1f4210fbd1585 100644 --- a/compiler/crates/relay-transforms/src/match_/validation_message.rs +++ b/compiler/crates/relay-transforms/src/match_/validation_message.rs @@ -12,7 +12,8 @@ use graphql_ir::FragmentDefinitionName; use intern::string_key::StringKey; use thiserror::Error; -#[derive(Error, Debug)] +#[derive(Error, Debug, serde::Serialize)] +#[serde(tag = "type")] pub enum ValidationMessage { #[error("Invalid @match selection: all selections should be fragment spreads with @module.")] InvalidMatchNotAllSelectionsFragmentSpreadWithModule, diff --git a/compiler/crates/relay-transforms/src/no_inline.rs b/compiler/crates/relay-transforms/src/no_inline.rs new file mode 100644 index 0000000000000..93df115048152 --- /dev/null +++ b/compiler/crates/relay-transforms/src/no_inline.rs @@ -0,0 +1,177 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::ArgumentName; +use common::Diagnostic; +use common::DiagnosticsResult; +use common::DirectiveName; +use common::NamedItem; +use common::WithLocation; +use graphql_ir::Argument; +use graphql_ir::ConstantValue; +use graphql_ir::Directive; +use graphql_ir::ExecutableDefinitionName; +use graphql_ir::FragmentDefinitionNameMap; +use graphql_ir::FragmentSpread; +use graphql_ir::Program; +use graphql_ir::Validator; +use graphql_ir::Value; +use intern::string_key::Intern; +use intern::string_key::StringKey; +use lazy_static::lazy_static; + +use crate::ValidationMessage; +use crate::MATCH_CONSTANTS; + +lazy_static! { + pub static ref NO_INLINE_DIRECTIVE_NAME: DirectiveName = DirectiveName("no_inline".intern()); + pub static ref PARENT_DOCUMENTS_ARG: ArgumentName = ArgumentName("__parentDocuments".intern()); + // Note: this is used as both an ArgumentName and as a DirectiveName + pub static ref RAW_RESPONSE_TYPE_NAME: StringKey = "raw_response_type".intern(); +} + +pub fn attach_no_inline_directives_to_fragments( + no_inline_fragments: &mut FragmentDefinitionNameMap>, + program: &mut Program, +) { + for (fragment_name, parent_sources) in no_inline_fragments.drain() { + let fragment = Arc::make_mut(program.fragment_mut(fragment_name).unwrap()); + + let no_inline_directive = fragment + .directives + .iter_mut() + .find(|d| d.name.item == *NO_INLINE_DIRECTIVE_NAME); + if let Some(no_inline_directive) = no_inline_directive { + let parent_documents_arg = no_inline_directive + .arguments + .iter_mut() + .find(|arg| arg.name.item == *PARENT_DOCUMENTS_ARG); + if let Some(parent_documents_arg) = parent_documents_arg { + if let Value::Constant(ConstantValue::List(parent_documents)) = + &mut parent_documents_arg.value.item + { + parent_documents.extend( + parent_sources + .into_iter() + .map(|name| ConstantValue::String(name.into())), + ); + } else { + panic!("Expected parent arguments to be a constant list of String"); + } + } else { + no_inline_directive + .arguments + .push(create_parent_documents_arg(parent_sources)); + } + } else { + fragment.directives.push(Directive { + name: WithLocation::new(fragment.name.location, *NO_INLINE_DIRECTIVE_NAME), + arguments: vec![create_parent_documents_arg(parent_sources)], + data: None, + }) + } + } +} + +pub fn is_raw_response_type_enabled(directive: &Directive) -> bool { + if let Some(Value::Constant(ConstantValue::Boolean(val))) = directive + .arguments + .named(ArgumentName(*RAW_RESPONSE_TYPE_NAME)) + .map(|arg| &arg.value.item) + { + *val + } else { + false + } +} + +/// If `@no_inline` is added to a fragment by @module or @relay_client_component +/// transform, and the fragment is also used without these directives, manually +/// adding `@no_inline` is required. Because in watch mode, if the path with @module +/// or @relay_client_component isn't changed, `@no_inline` won't get added. +pub fn validate_required_no_inline_directive( + no_inline_fragments: &FragmentDefinitionNameMap>, + program: &Program, +) -> DiagnosticsResult<()> { + let mut validator = RequiredNoInlineValidator::new(no_inline_fragments, program); + validator.validate_program(program) +} + +pub(crate) fn create_parent_documents_arg( + parent_sources: Vec, +) -> Argument { + Argument { + name: WithLocation::generated(*PARENT_DOCUMENTS_ARG), + value: WithLocation::generated(Value::Constant(ConstantValue::List( + parent_sources + .into_iter() + .map(|executable_definition_name| { + ConstantValue::String(executable_definition_name.into()) + }) + .collect(), + ))), + } +} + +struct RequiredNoInlineValidator<'f, 'p> { + no_inline_fragments: &'f FragmentDefinitionNameMap>, + program: &'p Program, +} + +impl<'f, 'p> RequiredNoInlineValidator<'f, 'p> { + fn new( + no_inline_fragments: &'f FragmentDefinitionNameMap>, + program: &'p Program, + ) -> Self { + Self { + no_inline_fragments, + program, + } + } +} + +impl<'f, 'p> Validator for RequiredNoInlineValidator<'f, 'p> { + const NAME: &'static str = "RequiredNoInlineValidator"; + const VALIDATE_ARGUMENTS: bool = false; + const VALIDATE_DIRECTIVES: bool = false; + + fn validate_fragment_spread(&mut self, spread: &FragmentSpread) -> DiagnosticsResult<()> { + if !self.no_inline_fragments.contains_key(&spread.fragment.item) { + return Ok(()); + } + let fragment = self.program.fragment(spread.fragment.item).unwrap(); + let has_no_inline = fragment + .directives + .named(*NO_INLINE_DIRECTIVE_NAME) + .is_some(); + if has_no_inline { + return Ok(()); + } + // If the fragment spread isn't used for @module or @relay_client_component + // then explicit @no_inline is required. + if spread.directives.is_empty() + || !spread + .directives + .iter() + .any(|directive| directive.name.item == MATCH_CONSTANTS.module_directive_name) + { + Err(vec![ + Diagnostic::error( + ValidationMessage::RequiredExplicitNoInlineDirective { + fragment_name: spread.fragment.item, + }, + spread.fragment.location, + ) + .annotate("fragment definition", fragment.name.location), + ]) + } else { + Ok(()) + } + } +} diff --git a/compiler/crates/relay-transforms/src/no_inline/mod.rs b/compiler/crates/relay-transforms/src/no_inline/mod.rs deleted file mode 100644 index 432989c7fea4a..0000000000000 --- a/compiler/crates/relay-transforms/src/no_inline/mod.rs +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::ArgumentName; -use common::Diagnostic; -use common::DiagnosticsResult; -use common::DirectiveName; -use common::NamedItem; -use common::WithLocation; -use graphql_ir::Argument; -use graphql_ir::ConstantValue; -use graphql_ir::Directive; -use graphql_ir::ExecutableDefinitionName; -use graphql_ir::FragmentDefinitionNameMap; -use graphql_ir::FragmentSpread; -use graphql_ir::Program; -use graphql_ir::Validator; -use graphql_ir::Value; -use intern::string_key::Intern; -use intern::string_key::StringKey; -use lazy_static::lazy_static; - -use crate::ValidationMessage; -use crate::MATCH_CONSTANTS; -use crate::RELAY_CLIENT_COMPONENT_DIRECTIVE_NAME; - -lazy_static! { - pub static ref NO_INLINE_DIRECTIVE_NAME: DirectiveName = DirectiveName("no_inline".intern()); - pub static ref PARENT_DOCUMENTS_ARG: ArgumentName = ArgumentName("__parentDocuments".intern()); - // Note: this is used as both an ArgumentName and as a DirectiveName - pub static ref RAW_RESPONSE_TYPE_NAME: StringKey = "raw_response_type".intern(); -} - -pub fn attach_no_inline_directives_to_fragments( - no_inline_fragments: &mut FragmentDefinitionNameMap>, - program: &mut Program, -) { - for (fragment_name, parent_sources) in no_inline_fragments.drain() { - let fragment = Arc::make_mut(program.fragment_mut(fragment_name).unwrap()); - - let no_inline_directive = fragment - .directives - .iter_mut() - .find(|d| d.name.item == *NO_INLINE_DIRECTIVE_NAME); - if let Some(no_inline_directive) = no_inline_directive { - let parent_documents_arg = no_inline_directive - .arguments - .iter_mut() - .find(|arg| arg.name.item == *PARENT_DOCUMENTS_ARG); - if let Some(parent_documents_arg) = parent_documents_arg { - if let Value::Constant(ConstantValue::List(parent_documents)) = - &mut parent_documents_arg.value.item - { - parent_documents.extend( - parent_sources - .into_iter() - .map(|name| ConstantValue::String(name.into())), - ); - } else { - panic!("Expected parent arguments to be a constant list of String"); - } - } else { - no_inline_directive - .arguments - .push(create_parent_documents_arg(parent_sources)); - } - } else { - fragment.directives.push(Directive { - name: WithLocation::new(fragment.name.location, *NO_INLINE_DIRECTIVE_NAME), - arguments: vec![create_parent_documents_arg(parent_sources)], - data: None, - }) - } - } -} - -pub fn is_raw_response_type_enabled(directive: &Directive) -> bool { - if let Some(Value::Constant(ConstantValue::Boolean(val))) = directive - .arguments - .named(ArgumentName(*RAW_RESPONSE_TYPE_NAME)) - .map(|arg| &arg.value.item) - { - *val - } else { - false - } -} - -/// If `@no_inline` is added to a fragment by @module or @relay_client_component -/// transform, and the fragment is also used without these directives, manually -/// adding `@no_inline` is required. Because in watch mode, if the path with @module -/// or @relay_client_component isn't changed, `@no_inline` won't get added. -pub fn validate_required_no_inline_directive( - no_inline_fragments: &FragmentDefinitionNameMap>, - program: &Program, -) -> DiagnosticsResult<()> { - let mut validator = RequiredNoInlineValidator::new(no_inline_fragments, program); - validator.validate_program(program) -} - -pub(crate) fn create_parent_documents_arg( - parent_sources: Vec, -) -> Argument { - Argument { - name: WithLocation::generated(*PARENT_DOCUMENTS_ARG), - value: WithLocation::generated(Value::Constant(ConstantValue::List( - parent_sources - .into_iter() - .map(|executable_definition_name| { - ConstantValue::String(executable_definition_name.into()) - }) - .collect(), - ))), - } -} - -struct RequiredNoInlineValidator<'f, 'p> { - no_inline_fragments: &'f FragmentDefinitionNameMap>, - program: &'p Program, -} - -impl<'f, 'p> RequiredNoInlineValidator<'f, 'p> { - fn new( - no_inline_fragments: &'f FragmentDefinitionNameMap>, - program: &'p Program, - ) -> Self { - Self { - no_inline_fragments, - program, - } - } -} - -impl<'f, 'p> Validator for RequiredNoInlineValidator<'f, 'p> { - const NAME: &'static str = "RequiredNoInlineValidator"; - const VALIDATE_ARGUMENTS: bool = false; - const VALIDATE_DIRECTIVES: bool = false; - - fn validate_fragment_spread(&mut self, spread: &FragmentSpread) -> DiagnosticsResult<()> { - if !self.no_inline_fragments.contains_key(&spread.fragment.item) { - return Ok(()); - } - let fragment = self.program.fragment(spread.fragment.item).unwrap(); - let has_no_inline = fragment - .directives - .named(*NO_INLINE_DIRECTIVE_NAME) - .is_some(); - if has_no_inline { - return Ok(()); - } - // If the fragment spread isn't used for @module or @relay_client_component - // then explicit @no_inline is required. - if spread.directives.is_empty() - || !spread.directives.iter().any(|directive| { - directive.name.item == MATCH_CONSTANTS.module_directive_name - || directive.name.item == *RELAY_CLIENT_COMPONENT_DIRECTIVE_NAME - }) - { - Err(vec![ - Diagnostic::error( - ValidationMessage::RequiredExplicitNoInlineDirective { - fragment_name: spread.fragment.item, - }, - spread.fragment.location, - ) - .annotate("fragment definition", fragment.name.location), - ]) - } else { - Ok(()) - } - } -} diff --git a/compiler/crates/relay-transforms/src/provided_variable_fragment_transform.rs b/compiler/crates/relay-transforms/src/provided_variable_fragment_transform.rs index 4cab2dd0e14b6..caf2d032fb179 100644 --- a/compiler/crates/relay-transforms/src/provided_variable_fragment_transform.rs +++ b/compiler/crates/relay-transforms/src/provided_variable_fragment_transform.rs @@ -36,12 +36,12 @@ use crate::util::format_provided_variable_name; /// This transform applies provided variables in each fragment. /// - Rename all uses of provided variables (in values) -/// [provided_variable_name] --> __pv__[module_name] +/// \[provided_variable_name\] --> __pv__\[module_name\] /// - Remove provided variables from (local) argument definitions /// - Add provided variables to list of used global variables /// apply_fragment_arguments depends on provide_variable_fragment_transform pub fn provided_variable_fragment_transform(program: &Program) -> DiagnosticsResult { - let mut transform = ProvidedVariableFragmentTransform::new(&*program.schema); + let mut transform = ProvidedVariableFragmentTransform::new(&program.schema); let program = transform .transform_program(program) .replace_or_else(|| program.clone()); @@ -72,7 +72,7 @@ impl ProvidedVariableDefinitions { let usages = self .usages_map .entry((module_name, variable_def.type_.clone())) - .or_insert_with(Vec::new); + .or_default(); usages.push(variable_def.name.location); } @@ -239,7 +239,8 @@ impl<'schema> Transformer for ProvidedVariableFragmentTransform<'schema> { } } -#[derive(Debug, Error)] +#[derive(Debug, Error, serde::Serialize)] +#[serde(tag = "type")] enum ValidationMessage { #[error( "Modules '{module1}' and '{module2}' used by provided variables have indistinguishable names. (All non ascii-alphanumeric characters are stripped in Relay transform)" diff --git a/compiler/crates/relay-transforms/src/react_flight.rs b/compiler/crates/relay-transforms/src/react_flight.rs deleted file mode 100644 index 1e1702245afc3..0000000000000 --- a/compiler/crates/relay-transforms/src/react_flight.rs +++ /dev/null @@ -1,453 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::ArgumentName; -use common::Diagnostic; -use common::DiagnosticsResult; -use common::DirectiveName; -use common::Location; -use common::NamedItem; -use common::WithLocation; -use graphql_ir::associated_data_impl; -use graphql_ir::Argument; -use graphql_ir::ConstantValue; -use graphql_ir::Directive; -use graphql_ir::FragmentDefinition; -use graphql_ir::FragmentDefinitionNameMap; -use graphql_ir::FragmentSpread; -use graphql_ir::OperationDefinition; -use graphql_ir::Program; -use graphql_ir::ScalarField; -use graphql_ir::Selection; -use graphql_ir::Transformed; -use graphql_ir::Transformer; -use graphql_ir::Value; -use intern::string_key::Intern; -use intern::string_key::StringKey; -use intern::string_key::StringKeySet; -use itertools::Itertools; -use lazy_static::lazy_static; -use schema::Field; -use schema::FieldID; -use schema::Schema; -use schema::Type; -use thiserror::Error; - -lazy_static! { - static ref REACT_FLIGHT_TRANSITIVE_COMPONENTS_DIRECTIVE_NAME: DirectiveName = - DirectiveName("react_flight".intern()); - static ref REACT_FLIGHT_TRANSITIVE_COMPONENTS_DIRECTIVE_ARG: ArgumentName = - ArgumentName("components".intern()); - pub static ref REACT_FLIGHT_SCALAR_FLIGHT_FIELD_METADATA_KEY: DirectiveName = - DirectiveName("__ReactFlightComponent".intern()); - static ref REACT_FLIGHT_COMPONENT_ARGUMENT_NAME: ArgumentName = - ArgumentName("component".intern()); - static ref REACT_FLIGHT_PROPS_ARGUMENT_NAME: ArgumentName = ArgumentName("props".intern()); - static ref REACT_FLIGHT_PROPS_TYPE: StringKey = "ReactFlightProps".intern(); - static ref REACT_FLIGHT_COMPONENT_TYPE: StringKey = "ReactFlightComponent".intern(); - static ref REACT_FLIGHT_FIELD_NAME: StringKey = "flight".intern(); - static ref REACT_FLIGHT_EXTENSION_DIRECTIVE_NAME: DirectiveName = - DirectiveName("react_flight_component".intern()); - static ref NAME_ARGUMENT: ArgumentName = ArgumentName("name".intern()); -} - -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct ReactFlightLocalComponentsMetadata { - pub components: Vec, -} -associated_data_impl!(ReactFlightLocalComponentsMetadata); - -/// Transform to find calls to React Flight schema extension fields and rewrite them into calls -/// to a generic `flight(component, props)` field. Also tracks which Flight fields each document -/// references locally (stored as a metadata directive on fragments/operations) as well as which -/// Flight fields each operation uses transitively (stored as a server directive on operations). -pub fn react_flight(program: &Program) -> DiagnosticsResult { - // No-op unless the special props/component types and flight directive are defined - let props_type = program.schema.get_type(*REACT_FLIGHT_PROPS_TYPE); - let component_type = program.schema.get_type(*REACT_FLIGHT_COMPONENT_TYPE); - let (props_type, component_type) = match (props_type, component_type) { - (Some(props_type), Some(component_type)) => (props_type, component_type), - _ => return Ok(program.clone()), - }; - let mut transform = ReactFlightTransform::new(program, props_type, component_type); - let transform_result = transform.transform_program(program); - if transform.errors.is_empty() { - Ok(transform_result.replace_or_else(|| program.clone())) - } else { - Err(transform.errors) - } -} - -struct ReactFlightTransform<'s> { - component_type: Type, - errors: Vec, - program: &'s Program, - props_type: Type, - // server components encountered as a dependency of the visited operation/fragment - // NOTE: this is operation/fragment-specific - local_components: StringKeySet, - transitive_components: StringKeySet, - fragments: FragmentDefinitionNameMap, -} - -enum FragmentResult { - Pending, - Resolved { - fragment: Transformed, - transitive_components: StringKeySet, - }, -} - -impl<'s> ReactFlightTransform<'s> { - fn new(program: &'s Program, props_type: Type, component_type: Type) -> Self { - Self { - component_type, - errors: Default::default(), - program, - props_type, - local_components: Default::default(), - transitive_components: Default::default(), - fragments: Default::default(), - } - } - - fn get_component_name_for_field( - &mut self, - field_definition: &Field, - location: Location, - ) -> Result { - // the field definition must specify the backing component's module name - let component_directive = match field_definition - .directives - .named(*REACT_FLIGHT_EXTENSION_DIRECTIVE_NAME) - { - Some(component_directive) => component_directive, - None => { - self.errors.push(Diagnostic::error( - ValidationMessage::InvalidFlightFieldMissingModuleDirective, - location, - )); - return Err(()); - } - }; - // extract the component name - let value = component_directive - .arguments - .iter() - .cloned() - .find(|arg| arg.name == *NAME_ARGUMENT) - .unwrap() - .value; - match value { - graphql_syntax::ConstantValue::String(node) => Ok(node.value), - _ => { - self.errors.push(Diagnostic::error( - ValidationMessage::InvalidFlightFieldExpectedModuleNameString, - location, - )); - Err(()) - } - } - } - - // validates that the field's parent type also has a field conforming to the - // following specification: - // - // ``` - // flight( - // component: String - // props: ReactFlightProps - // ): ReactFlightComponent - // ``` - fn validate_flight_field( - &mut self, - field_definition: &Field, - location: Location, - ) -> Result { - // not a built-in field, so there must be a parent type or the schema is - // invalid (which is a compiler error not a user error) - let parent_type = field_definition - .parent_type - .unwrap_or_else(|| panic!("Expected field to have a parent type")); - - // the parent type must have the generic `flight` field - let flight_field_id = match self - .program - .schema - .named_field(parent_type, *REACT_FLIGHT_FIELD_NAME) - { - Some(flight_field_id) => flight_field_id, - None => { - self.errors.push(Diagnostic::error( - ValidationMessage::InvalidFlightFieldNotDefinedOnType { - field_name: field_definition.name.item, - }, - location, - )); - return Err(()); - } - }; - let flight_field_definition = self.program.schema.field(flight_field_id); - - // flight field must have `props: ReactFlightProps` arg - let props_argument = flight_field_definition.arguments.iter().find(|arg| { - arg.name == *REACT_FLIGHT_PROPS_ARGUMENT_NAME && arg.type_.inner() == self.props_type - }); - if props_argument.is_none() { - self.errors.push(Diagnostic::error( - ValidationMessage::InvalidFlightFieldPropsArgument, - location, - )); - return Err(()); - } - // flight field must have `component: String` arg - let component_argument = flight_field_definition.arguments.iter().find(|arg| { - arg.name == *REACT_FLIGHT_COMPONENT_ARGUMENT_NAME - && Some(arg.type_.inner()) == self.program.schema.get_type("String".intern()) - }); - if component_argument.is_none() { - self.errors.push(Diagnostic::error( - ValidationMessage::InvalidFlightFieldComponentArgument, - location, - )); - return Err(()); - } - // flight field must return `ReactFlightComponent` - if flight_field_definition.type_.inner() != self.component_type { - self.errors.push(Diagnostic::error( - ValidationMessage::InvalidFlightFieldReturnType, - location, - )); - return Err(()); - } - Ok(flight_field_id) - } - - // Generate a metadata directive recording which server components were reachable - // from the visited IR nodes - fn generate_flight_local_flight_components_metadata_directive(&self) -> Directive { - ReactFlightLocalComponentsMetadata { - components: self.local_components.iter().copied().sorted().collect(), - } - .into() - } - - // Generate a server directive recording which server components were *transitively* reachable - // from the visited IR nodes - fn generate_flight_transitive_flight_components_server_directive(&self) -> Directive { - let mut components: Vec = self.transitive_components.iter().copied().collect(); - components.sort(); - Directive { - name: WithLocation::generated(*REACT_FLIGHT_TRANSITIVE_COMPONENTS_DIRECTIVE_NAME), - arguments: vec![Argument { - name: WithLocation::generated(*REACT_FLIGHT_TRANSITIVE_COMPONENTS_DIRECTIVE_ARG), - value: WithLocation::generated(Value::Constant(ConstantValue::List( - components.into_iter().map(ConstantValue::String).collect(), - ))), - }], - data: None, - } - } -} - -impl<'s> Transformer for ReactFlightTransform<'s> { - const NAME: &'static str = "ReactFlightTransform"; - const VISIT_ARGUMENTS: bool = false; - const VISIT_DIRECTIVES: bool = false; - - fn transform_operation( - &mut self, - operation: &OperationDefinition, - ) -> Transformed { - // reset component lists per document - self.local_components.clear(); - self.transitive_components.clear(); - let transformed = self.default_transform_operation(operation); - - // if there are no locally or transitively referenced server components there is no metadata - // to add to the fragment - if self.transitive_components.is_empty() && self.local_components.is_empty() { - return transformed; - } - - let mut operation = transformed.unwrap_or_else(|| operation.clone()); - self.transitive_components - .extend(self.local_components.iter().cloned()); - operation.directives.reserve_exact(2); - operation - .directives - .push(self.generate_flight_local_flight_components_metadata_directive()); - if self - .program - .schema - .has_directive(*REACT_FLIGHT_TRANSITIVE_COMPONENTS_DIRECTIVE_NAME) - { - operation - .directives - .push(self.generate_flight_transitive_flight_components_server_directive()); - } - Transformed::Replace(operation) - } - - fn transform_fragment( - &mut self, - fragment: &FragmentDefinition, - ) -> Transformed { - if let Some(FragmentResult::Resolved { fragment, .. }) = - self.fragments.get(&fragment.name.item) - { - // fragment has already been visited (a previous fragment transitively referenced this one) - return fragment.clone(); - } - - // reset component lists per document - self.local_components.clear(); - self.transitive_components.clear(); - let transformed = self.default_transform_fragment(fragment); - - // if there are no locally referenced server components there is no metadata to add to the fragment - if self.local_components.is_empty() { - return transformed; - } - - let mut fragment = transformed.unwrap_or_else(|| fragment.clone()); - fragment.directives.reserve_exact(1); - fragment - .directives - .push(self.generate_flight_local_flight_components_metadata_directive()); - - Transformed::Replace(fragment) - } - - fn transform_fragment_spread(&mut self, spread: &FragmentSpread) -> Transformed { - match self.fragments.get(&spread.fragment.item) { - Some(FragmentResult::Resolved { - transitive_components, - .. - }) => { - self.transitive_components - .extend(transitive_components.iter().cloned()); - return Transformed::Keep; - } - Some(FragmentResult::Pending) => { - // recursive fragment, immediately return to avoid infinite loop. components will be added - // at the point where the fragment was first reached - return Transformed::Keep; - } - None => {} - }; - // capture the local/transitive component sets prior to visiting the fragment - let mut local_components = std::mem::take(&mut self.local_components); - let mut transitive_components = std::mem::take(&mut self.transitive_components); - // mark the fragment as pending in case of a recursive fragment and then visit it - self.fragments - .insert(spread.fragment.item, FragmentResult::Pending); - let fragment = - self.transform_fragment(self.program.fragment(spread.fragment.item).unwrap_or_else( - || { - panic!( - "Tried to spread missing fragment: `{}`.", - spread.fragment.item - ); - }, - )); - // extend the parent's transitive component set w the local and transitive components from the fragment - transitive_components.extend(self.local_components.iter().cloned()); - transitive_components.extend(self.transitive_components.iter().cloned()); - // then make the parent's sets active again - std::mem::swap(&mut self.local_components, &mut local_components); - std::mem::swap(&mut self.transitive_components, &mut transitive_components); - - transitive_components.extend(local_components); - self.fragments.insert( - spread.fragment.item, - FragmentResult::Resolved { - fragment, - transitive_components, - }, - ); - Transformed::Keep - } - - fn transform_scalar_field(&mut self, field: &ScalarField) -> Transformed { - let field_definition = self.program.schema.field(field.definition.item); - // Activate the transform based on the return type since this is a fast check - if field_definition.type_.inner() != self.component_type { - return Transformed::Keep; - } - - // Extract the backing component's name from the field definition - let component_name = - match self.get_component_name_for_field(field_definition, field.definition.location) { - Ok(value) => value, - Err(_) => return Transformed::Keep, - }; - - // Determine the type's `flight` field - let flight_field_id = - match self.validate_flight_field(field_definition, field.definition.location) { - Ok(value) => value, - Err(_) => return Transformed::Keep, - }; - - // Record that the given component is reachable from this field - self.local_components.insert(component_name); - - // Rewrite into a call to the `flight` field, passing the original arguments - // as values of the `props` argument: - let alias = field.alias.unwrap_or(field_definition.name); - let mut directives = Vec::with_capacity(field.directives.len() + 1); - directives.extend(field.directives.iter().cloned()); - directives.push(Directive { - name: WithLocation::generated(*REACT_FLIGHT_SCALAR_FLIGHT_FIELD_METADATA_KEY), - arguments: vec![], - data: None, - }); - Transformed::Replace(Selection::ScalarField(Arc::new(ScalarField { - alias: Some(alias), - arguments: vec![ - Argument { - name: WithLocation::generated(*REACT_FLIGHT_COMPONENT_ARGUMENT_NAME), - value: WithLocation::generated(Value::Constant(ConstantValue::String( - component_name, - ))), - }, - Argument { - name: WithLocation::generated(*REACT_FLIGHT_PROPS_ARGUMENT_NAME), - value: WithLocation::generated(Value::Object(field.arguments.clone())), - }, - ], - definition: WithLocation::generated(flight_field_id), - directives, - }))) - } -} - -#[derive(Error, Debug)] -enum ValidationMessage { - #[error( - "Expected 'flight' field schema definition to specify its component name with @react_flight_component" - )] - InvalidFlightFieldMissingModuleDirective, - - #[error("Cannot query field '{field_name}', this type does not define a 'flight' field")] - InvalidFlightFieldNotDefinedOnType { field_name: StringKey }, - - #[error("Expected @react_flight_component value to be a literal string")] - InvalidFlightFieldExpectedModuleNameString, - - #[error("Expected flight field to have a 'props: ReactFlightProps' argument")] - InvalidFlightFieldPropsArgument, - - #[error("Expected flight field to have a 'component: String' argument")] - InvalidFlightFieldComponentArgument, - - #[error("Expected flight field to return 'ReactFlightComponent'")] - InvalidFlightFieldReturnType, -} diff --git a/compiler/crates/relay-transforms/src/refetchable_fragment.rs b/compiler/crates/relay-transforms/src/refetchable_fragment.rs new file mode 100644 index 0000000000000..460d19823e004 --- /dev/null +++ b/compiler/crates/relay-transforms/src/refetchable_fragment.rs @@ -0,0 +1,399 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +mod fetchable_query_generator; +mod node_query_generator; +mod query_query_generator; +mod refetchable_directive; +mod utils; +mod validation_message; +mod viewer_query_generator; + +use std::fmt::Write; +use std::sync::Arc; + +use ::errors::validate_map; +use common::Diagnostic; +use common::DiagnosticsResult; +use common::NamedItem; +use common::WithLocation; +use fetchable_query_generator::FETCHABLE_QUERY_GENERATOR; +use graphql_ir::Directive; +use graphql_ir::FragmentDefinition; +use graphql_ir::FragmentDefinitionName; +use graphql_ir::FragmentDefinitionNameSet; +use graphql_ir::OperationDefinition; +use graphql_ir::OperationDefinitionName; +use graphql_ir::Program; +use graphql_ir::Selection; +use graphql_ir::VariableDefinition; +use graphql_syntax::OperationKind; +use intern::string_key::StringKeyMap; +use node_query_generator::NODE_QUERY_GENERATOR; +use query_query_generator::QUERY_QUERY_GENERATOR; +use relay_config::ProjectConfig; +use relay_config::SchemaConfig; +use schema::SDLSchema; +use schema::Schema; +pub use utils::RefetchableDerivedFromMetadata; +pub use utils::RefetchableMetadata; +pub use utils::CONSTANTS; +use utils::*; +use viewer_query_generator::VIEWER_QUERY_GENERATOR; + +use self::refetchable_directive::RefetchableDirective; +pub use self::refetchable_directive::REFETCHABLE_NAME; +use self::validation_message::ValidationMessage; +use crate::connections::extract_connection_metadata_from_directive; +use crate::connections::ConnectionConstants; +use crate::relay_directive::PLURAL_ARG_NAME; +use crate::relay_directive::RELAY_DIRECTIVE_NAME; +use crate::root_variables::InferVariablesVisitor; +use crate::root_variables::VariableMap; + +/// This transform synthesizes "refetch" queries for fragments that +/// are trivially refetchable. This is comprised of three main stages: +/// +/// 1. Validating that fragments marked with @refetchable qualify for +/// refetch query generation; mainly this means that the fragment +/// type is able to be refetched in some canonical way. +/// 2. Determining the variable definitions to use for each generated +/// query. GraphQL does not have a notion of fragment-local variables +/// at all, and although Relay adds this concept developers are still +/// allowed to reference global variables. This necessitates a +/// visiting all reachable fragments for each @refetchable fragment, +/// and finding the union of all global variables expected to be defined. +/// 3. Building the refetch queries, a straightforward copying transform from +/// Fragment to Root IR nodes. +pub fn transform_refetchable_fragment( + program: &Program, + project_config: &ProjectConfig, + base_fragment_names: &'_ FragmentDefinitionNameSet, + for_typegen: bool, +) -> DiagnosticsResult { + let mut next_program = Program::new(Arc::clone(&program.schema)); + + let mut transformer = RefetchableFragment::new(program, project_config, for_typegen); + + for operation in program.operations() { + next_program.insert_operation(Arc::clone(operation)); + } + + validate_map(program.fragments(), |fragment| { + let operation_result = transformer.transform_refetch_fragment(fragment)?; + if let Some((refetchable_directive, operation_result)) = operation_result { + next_program.insert_fragment(operation_result.fragment); + if !base_fragment_names.contains(&fragment.name.item) { + let mut directives = refetchable_directive.directives; + directives.push(RefetchableDerivedFromMetadata(fragment.name.item).into()); + + next_program.insert_operation(Arc::new(OperationDefinition { + kind: OperationKind::Query, + name: WithLocation::new( + fragment.name.location, + refetchable_directive.query_name.item, + ), + type_: program.schema.query_type().unwrap(), + variable_definitions: operation_result.variable_definitions, + directives, + selections: operation_result.selections, + })); + } + } else { + next_program.insert_fragment(Arc::clone(fragment)); + } + Ok(()) + })?; + + Ok(next_program) +} + +type ExistingRefetchOperations = StringKeyMap>; + +pub struct RefetchableFragment<'program, 'pc> { + connection_constants: ConnectionConstants, + existing_refetch_operations: ExistingRefetchOperations, + for_typegen: bool, + program: &'program Program, + project_config: &'pc ProjectConfig, +} + +impl<'program, 'pc> RefetchableFragment<'program, 'pc> { + pub fn new( + program: &'program Program, + project_config: &'pc ProjectConfig, + for_typegen: bool, + ) -> Self { + RefetchableFragment { + connection_constants: Default::default(), + existing_refetch_operations: Default::default(), + for_typegen, + program, + project_config, + } + } + + fn transform_refetch_fragment( + &mut self, + fragment: &Arc, + ) -> DiagnosticsResult> { + let refetchable_directive = fragment.directives.named(*REFETCHABLE_NAME); + if refetchable_directive.is_some() && self.program.schema.query_type().is_none() { + return Err(vec![Diagnostic::error( + "Unable to use @refetchable directive. The `Query` type is not defined on the schema.", + refetchable_directive.unwrap().name.location, + )]); + } + + refetchable_directive + .map(|refetchable_directive| { + self.transform_refetch_fragment_with_refetchable_directive( + fragment, + refetchable_directive, + ) + }) + .transpose() + } + + pub fn transform_refetch_fragment_with_refetchable_directive( + &mut self, + fragment: &Arc, + directive: &Directive, + ) -> DiagnosticsResult<(RefetchableDirective, RefetchRoot)> { + let refetchable_directive = + RefetchableDirective::from_directive(&self.program.schema, directive)?; + self.validate_sibling_directives(fragment)?; + self.validate_refetch_name(fragment, &refetchable_directive)?; + let variables_map = + InferVariablesVisitor::new(self.program).infer_fragment_variables(fragment); + + let generators = get_query_generators(&refetchable_directive, self.project_config); + + for generator in generators { + if let Some(refetch_root) = (generator.build_refetch_operation)( + &self.program.schema, + &self.project_config.schema_config, + fragment, + refetchable_directive.query_name.item, + &variables_map, + )? { + if !self.for_typegen { + self.validate_connection_metadata(refetch_root.fragment.as_ref())?; + } + return Ok((refetchable_directive, refetch_root)); + } + } + let mut descriptions = String::new(); + for generator in generators.iter() { + writeln!(descriptions, " - {}", generator.description).unwrap(); + } + descriptions.pop(); + Err(vec![Diagnostic::error( + ValidationMessage::UnsupportedRefetchableFragment { + fragment_name: fragment.name.item, + descriptions, + }, + fragment.name.location, + )]) + } + + fn validate_sibling_directives( + &mut self, + fragment: &FragmentDefinition, + ) -> DiagnosticsResult<()> { + let relay_directive = fragment.directives.named(*RELAY_DIRECTIVE_NAME); + let plural_directive = relay_directive + .filter(|directive| directive.arguments.named(*PLURAL_ARG_NAME).is_some()); + if let Some(directive) = plural_directive { + Err(vec![Diagnostic::error( + ValidationMessage::InvalidRefetchableFragmentWithRelayPlural { + fragment_name: fragment.name.item, + }, + directive.name.location, + )]) + } else { + Ok(()) + } + } + + fn validate_refetch_name( + &mut self, + fragment: &FragmentDefinition, + refetchable_directive: &RefetchableDirective, + ) -> DiagnosticsResult<()> { + let fragment_name = fragment.name; + + // check for conflict with other @refetchable names + if let Some(previous_fragment) = self + .existing_refetch_operations + .insert(refetchable_directive.query_name.item.0, fragment_name) + { + let (first_fragment, second_fragment) = if fragment.name.item > previous_fragment.item { + (previous_fragment, fragment_name) + } else { + (fragment_name, previous_fragment) + }; + return Err(vec![ + Diagnostic::error( + ValidationMessage::DuplicateRefetchableOperation { + query_name: refetchable_directive.query_name.item, + first_fragment_name: first_fragment.item, + second_fragment_name: second_fragment.item, + }, + first_fragment.location, + ) + .annotate("also defined here", second_fragment.location), + ]); + } + + // check for conflict with operations + if let Some(existing_query) = self + .program + .operation(refetchable_directive.query_name.item) + { + return Err(vec![ + Diagnostic::error( + ValidationMessage::RefetchableQueryConflictWithDefinition { + definition_name: refetchable_directive.query_name.item.0, + }, + refetchable_directive.query_name.location, + ) + .annotate( + "an operation with that name is already defined here", + existing_query.name.location, + ), + ]); + } + + if let Some(existing_fragment) = self.program.fragment(FragmentDefinitionName( + refetchable_directive.query_name.item.0, + )) { + return Err(vec![ + Diagnostic::error( + ValidationMessage::RefetchableQueryConflictWithDefinition { + definition_name: refetchable_directive.query_name.item.0, + }, + refetchable_directive.query_name.location, + ) + .annotate( + "a fragment with that name is already defined here", + existing_fragment.name.location, + ), + ]); + } + + Ok(()) + } + + /// Validate that any @connection usage is valid for refetching: + /// - Variables are used for both the "count" and "cursor" arguments + /// (after/first or before/last) + /// - Exactly one connection + /// - Has a stable path to the connection data + /// + /// Connection metadata is extracted in `transform_connection` + fn validate_connection_metadata(&self, fragment: &FragmentDefinition) -> DiagnosticsResult<()> { + if let Some(metadatas) = extract_connection_metadata_from_directive(&fragment.directives) { + // TODO: path or connection field locations in the error messages + if metadatas.len() > 1 { + return Err(vec![Diagnostic::error( + ValidationMessage::RefetchableWithMultipleConnections { + fragment_name: fragment.name.item, + }, + fragment.name.location, + )]); + } else if metadatas.len() == 1 { + let metadata = &metadatas[0]; + if metadata.path.is_none() { + return Err(vec![Diagnostic::error( + ValidationMessage::RefetchableWithConnectionInPlural { + fragment_name: fragment.name.item, + }, + fragment.name.location, + )]); + } + if (metadata.after.is_none() || metadata.first.is_none()) + && metadata.direction != self.connection_constants.direction_backward + { + return Err(vec![Diagnostic::error( + ValidationMessage::RefetchableWithConstConnectionArguments { + fragment_name: fragment.name.item, + arguments: "after and first", + }, + fragment.name.location, + )]); + } else if (metadata.before.is_none() || metadata.last.is_none()) + && metadata.direction != self.connection_constants.direction_forward + { + return Err(vec![Diagnostic::error( + ValidationMessage::RefetchableWithConstConnectionArguments { + fragment_name: fragment.name.item, + arguments: "before and last", + }, + fragment.name.location, + )]); + } + } + } + Ok(()) + } +} + +type BuildRefetchOperationFn = fn( + schema: &SDLSchema, + schema_config: &SchemaConfig, + fragment: &Arc, + query_name: OperationDefinitionName, + variables_map: &VariableMap, +) -> DiagnosticsResult>; +/// A strategy to generate queries for a given fragment. Multiple strategies +/// can be tried, such as generating a `node(id: ID)` query or a query directly +/// on the root query type. +pub struct QueryGenerator { + /// Used to describe what fragments this QueryGenerator applies to, used in + /// error messages. + pub description: &'static str, + + /// Returns RefetchRoot or null if not applicable. Might throw a user error + /// for an invalid schema or other problems. + pub build_refetch_operation: BuildRefetchOperationFn, +} + +const GENERATORS: [QueryGenerator; 4] = [ + VIEWER_QUERY_GENERATOR, + QUERY_QUERY_GENERATOR, + NODE_QUERY_GENERATOR, + FETCHABLE_QUERY_GENERATOR, +]; + +const PREFER_FETCHABLE_GENERATORS: [QueryGenerator; 4] = [ + VIEWER_QUERY_GENERATOR, + QUERY_QUERY_GENERATOR, + FETCHABLE_QUERY_GENERATOR, + NODE_QUERY_GENERATOR, +]; + +fn get_query_generators( + directive: &RefetchableDirective, + project_config: &ProjectConfig, +) -> &'static [QueryGenerator; 4] { + if directive.prefer_fetchable + || project_config + .feature_flags + .prefer_fetchable_in_refetch_queries + { + &PREFER_FETCHABLE_GENERATORS + } else { + &GENERATORS + } +} + +pub struct RefetchRoot { + pub fragment: Arc, + pub selections: Vec, + pub variable_definitions: Vec, +} diff --git a/compiler/crates/relay-transforms/src/refetchable_fragment/fetchable_query_generator.rs b/compiler/crates/relay-transforms/src/refetchable_fragment/fetchable_query_generator.rs index a0f6a7f259e17..9924eedf4b4f1 100644 --- a/compiler/crates/relay-transforms/src/refetchable_fragment/fetchable_query_generator.rs +++ b/compiler/crates/relay-transforms/src/refetchable_fragment/fetchable_query_generator.rs @@ -37,6 +37,7 @@ use super::build_used_global_variables; use super::validation_message::ValidationMessage; use super::QueryGenerator; use super::RefetchRoot; +use super::RefetchableIdentifierInfo; use super::RefetchableMetadata; use super::CONSTANTS; use crate::root_variables::VariableMap; @@ -72,7 +73,11 @@ fn build_refetch_operation( RefetchableMetadata { operation_name: query_name, path: vec![fetch_field_name], - identifier_field: Some(identifier_field_name), + identifier_info: Some(RefetchableIdentifierInfo { + identifier_field: identifier_field_name, + identifier_query_variable_name: schema_config + .node_interface_id_variable_name, + }), }, ), selections: enforce_selections_with_id_field( @@ -103,7 +108,7 @@ fn build_refetch_operation( alias: None, definition: WithLocation::new(fragment.name.location, fetch_field_id), arguments: vec![Argument { - name: WithLocation::new(fragment.name.location, id_arg.name), + name: WithLocation::new(fragment.name.location, id_arg.name.item), value: WithLocation::new( fragment.name.location, Value::Variable(Variable { diff --git a/compiler/crates/relay-transforms/src/refetchable_fragment/mod.rs b/compiler/crates/relay-transforms/src/refetchable_fragment/mod.rs deleted file mode 100644 index a79c1cb4606d8..0000000000000 --- a/compiler/crates/relay-transforms/src/refetchable_fragment/mod.rs +++ /dev/null @@ -1,357 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -mod fetchable_query_generator; -mod node_query_generator; -mod query_query_generator; -mod refetchable_directive; -mod utils; -mod validation_message; -mod viewer_query_generator; - -use std::fmt::Write; -use std::sync::Arc; - -use ::errors::validate_map; -use common::Diagnostic; -use common::DiagnosticsResult; -use common::NamedItem; -use common::WithLocation; -use fetchable_query_generator::FETCHABLE_QUERY_GENERATOR; -use graphql_ir::Directive; -use graphql_ir::FragmentDefinition; -use graphql_ir::FragmentDefinitionName; -use graphql_ir::FragmentDefinitionNameSet; -use graphql_ir::OperationDefinition; -use graphql_ir::OperationDefinitionName; -use graphql_ir::Program; -use graphql_ir::Selection; -use graphql_ir::VariableDefinition; -use graphql_syntax::OperationKind; -use intern::string_key::StringKeyMap; -use node_query_generator::NODE_QUERY_GENERATOR; -use query_query_generator::QUERY_QUERY_GENERATOR; -use relay_config::SchemaConfig; -use schema::SDLSchema; -use schema::Schema; -pub use utils::RefetchableDerivedFromMetadata; -pub use utils::RefetchableMetadata; -pub use utils::CONSTANTS; -use utils::*; -use viewer_query_generator::VIEWER_QUERY_GENERATOR; - -use self::refetchable_directive::RefetchableDirective; -pub use self::refetchable_directive::REFETCHABLE_NAME; -use self::validation_message::ValidationMessage; -use crate::connections::extract_connection_metadata_from_directive; -use crate::connections::ConnectionConstants; -use crate::relay_directive::PLURAL_ARG_NAME; -use crate::relay_directive::RELAY_DIRECTIVE_NAME; -use crate::root_variables::InferVariablesVisitor; -use crate::root_variables::VariableMap; - -/// This transform synthesizes "refetch" queries for fragments that -/// are trivially refetchable. This is comprised of three main stages: -/// -/// 1. Validating that fragments marked with @refetchable qualify for -/// refetch query generation; mainly this means that the fragment -/// type is able to be refetched in some canonical way. -/// 2. Determining the variable definitions to use for each generated -/// query. GraphQL does not have a notion of fragment-local variables -/// at all, and although Relay adds this concept developers are still -/// allowed to reference global variables. This necessitates a -/// visiting all reachable fragments for each @refetchable fragment, -/// and finding the union of all global variables expected to be defined. -/// 3. Building the refetch queries, a straightforward copying transform from -/// Fragment to Root IR nodes. -pub fn transform_refetchable_fragment( - program: &Program, - schema_config: &SchemaConfig, - base_fragment_names: &'_ FragmentDefinitionNameSet, - for_typegen: bool, -) -> DiagnosticsResult { - let mut next_program = Program::new(Arc::clone(&program.schema)); - - let mut transformer = RefetchableFragment::new(program, schema_config, for_typegen); - - for operation in program.operations() { - next_program.insert_operation(Arc::clone(operation)); - } - - validate_map(program.fragments(), |fragment| { - let operation_result = transformer.transform_refetch_fragment(fragment)?; - if let Some((refetchable_directive, operation_result)) = operation_result { - next_program.insert_fragment(operation_result.fragment); - if !base_fragment_names.contains(&fragment.name.item) { - let mut directives = refetchable_directive.directives; - directives.push(RefetchableDerivedFromMetadata(fragment.name.item).into()); - - next_program.insert_operation(Arc::new(OperationDefinition { - kind: OperationKind::Query, - name: WithLocation::new( - fragment.name.location, - refetchable_directive.query_name.item, - ), - type_: program.schema.query_type().unwrap(), - variable_definitions: operation_result.variable_definitions, - directives, - selections: operation_result.selections, - })); - } - } else { - next_program.insert_fragment(Arc::clone(fragment)); - } - Ok(()) - })?; - - Ok(next_program) -} - -type ExistingRefetchOperations = StringKeyMap>; - -pub struct RefetchableFragment<'program, 'sc> { - connection_constants: ConnectionConstants, - existing_refetch_operations: ExistingRefetchOperations, - for_typegen: bool, - program: &'program Program, - schema_config: &'sc SchemaConfig, -} - -impl<'program, 'sc> RefetchableFragment<'program, 'sc> { - pub fn new( - program: &'program Program, - schema_config: &'sc SchemaConfig, - for_typegen: bool, - ) -> Self { - RefetchableFragment { - connection_constants: Default::default(), - existing_refetch_operations: Default::default(), - for_typegen, - program, - schema_config, - } - } - - fn transform_refetch_fragment( - &mut self, - fragment: &Arc, - ) -> DiagnosticsResult> { - let refetchable_directive = fragment.directives.named(*REFETCHABLE_NAME); - if refetchable_directive.is_some() && self.program.schema.query_type().is_none() { - return Err(vec![Diagnostic::error( - "Unable to use @refetchable directive. The `Query` type is not defined on the schema.", - refetchable_directive.unwrap().name.location, - )]); - } - - refetchable_directive - .map(|refetchable_directive| { - self.transform_refetch_fragment_with_refetchable_directive( - fragment, - refetchable_directive, - ) - }) - .transpose() - } - - pub fn transform_refetch_fragment_with_refetchable_directive( - &mut self, - fragment: &Arc, - directive: &Directive, - ) -> DiagnosticsResult<(RefetchableDirective, RefetchRoot)> { - let refetchable_directive = - RefetchableDirective::from_directive(&self.program.schema, directive)?; - self.validate_sibling_directives(fragment)?; - self.validate_refetch_name(fragment, &refetchable_directive)?; - let variables_map = - InferVariablesVisitor::new(self.program).infer_fragment_variables(fragment); - - for generator in GENERATORS.iter() { - if let Some(refetch_root) = (generator.build_refetch_operation)( - &self.program.schema, - self.schema_config, - fragment, - refetchable_directive.query_name.item, - &variables_map, - )? { - if !self.for_typegen { - self.validate_connection_metadata(refetch_root.fragment.as_ref())?; - } - return Ok((refetchable_directive, refetch_root)); - } - } - let mut descriptions = String::new(); - for generator in GENERATORS.iter() { - writeln!(descriptions, " - {}", generator.description).unwrap(); - } - descriptions.pop(); - Err(vec![Diagnostic::error( - ValidationMessage::UnsupportedRefetchableFragment { - fragment_name: fragment.name.item, - descriptions, - }, - fragment.name.location, - )]) - } - - fn validate_sibling_directives( - &mut self, - fragment: &FragmentDefinition, - ) -> DiagnosticsResult<()> { - let relay_directive = fragment.directives.named(*RELAY_DIRECTIVE_NAME); - let plural_directive = relay_directive - .filter(|directive| directive.arguments.named(*PLURAL_ARG_NAME).is_some()); - if let Some(directive) = plural_directive { - Err(vec![Diagnostic::error( - ValidationMessage::InvalidRefetchableFragmentWithRelayPlural { - fragment_name: fragment.name.item, - }, - directive.name.location, - )]) - } else { - Ok(()) - } - } - - fn validate_refetch_name( - &mut self, - fragment: &FragmentDefinition, - refetchable_directive: &RefetchableDirective, - ) -> DiagnosticsResult<()> { - let fragment_name = fragment.name; - - // check for conflict with other @refetchable names - if let Some(previous_fragment) = self - .existing_refetch_operations - .insert(refetchable_directive.query_name.item.0, fragment_name) - { - let (first_fragment, second_fragment) = if fragment.name.item > previous_fragment.item { - (previous_fragment, fragment_name) - } else { - (fragment_name, previous_fragment) - }; - return Err(vec![ - Diagnostic::error( - ValidationMessage::DuplicateRefetchableOperation { - query_name: refetchable_directive.query_name.item, - first_fragment_name: first_fragment.item, - second_fragment_name: second_fragment.item, - }, - first_fragment.location, - ) - .annotate("also defined here", second_fragment.location), - ]); - } - - // check for conflict with operations - if let Some(existing_query) = self - .program - .operation(refetchable_directive.query_name.item) - { - return Err(vec![ - Diagnostic::error( - ValidationMessage::RefetchableQueryConflictWithQuery { - query_name: refetchable_directive.query_name.item, - }, - refetchable_directive.query_name.location, - ) - .annotate( - "an operation with that name is already defined here", - existing_query.name.location, - ), - ]); - } - - Ok(()) - } - - /// Validate that any @connection usage is valid for refetching: - /// - Variables are used for both the "count" and "cursor" arguments - /// (after/first or before/last) - /// - Exactly one connection - /// - Has a stable path to the connection data - /// - /// Connection metadata is extracted in `transform_connection` - fn validate_connection_metadata(&self, fragment: &FragmentDefinition) -> DiagnosticsResult<()> { - if let Some(metadatas) = extract_connection_metadata_from_directive(&fragment.directives) { - // TODO: path or connection field locations in the error messages - if metadatas.len() > 1 { - return Err(vec![Diagnostic::error( - ValidationMessage::RefetchableWithMultipleConnections { - fragment_name: fragment.name.item, - }, - fragment.name.location, - )]); - } else if metadatas.len() == 1 { - let metadata = &metadatas[0]; - if metadata.path.is_none() { - return Err(vec![Diagnostic::error( - ValidationMessage::RefetchableWithConnectionInPlural { - fragment_name: fragment.name.item, - }, - fragment.name.location, - )]); - } - if (metadata.after.is_none() || metadata.first.is_none()) - && metadata.direction != self.connection_constants.direction_backward - { - return Err(vec![Diagnostic::error( - ValidationMessage::RefetchableWithConstConnectionArguments { - fragment_name: fragment.name.item, - arguments: "after and first", - }, - fragment.name.location, - )]); - } else if (metadata.before.is_none() || metadata.last.is_none()) - && metadata.direction != self.connection_constants.direction_forward - { - return Err(vec![Diagnostic::error( - ValidationMessage::RefetchableWithConstConnectionArguments { - fragment_name: fragment.name.item, - arguments: "before and last", - }, - fragment.name.location, - )]); - } - } - } - Ok(()) - } -} - -type BuildRefetchOperationFn = fn( - schema: &SDLSchema, - schema_config: &SchemaConfig, - fragment: &Arc, - query_name: OperationDefinitionName, - variables_map: &VariableMap, -) -> DiagnosticsResult>; -/// A strategy to generate queries for a given fragment. Multiple strategies -/// can be tried, such as generating a `node(id: ID)` query or a query directly -/// on the root query type. -pub struct QueryGenerator { - /// Used to describe what fragments this QueryGenerator applies to, used in - /// error messages. - pub description: &'static str, - - /// Returns RefetchRoot or null if not applicable. Might throw a user error - /// for an invalid schema or other problems. - pub build_refetch_operation: BuildRefetchOperationFn, -} - -const GENERATORS: [QueryGenerator; 4] = [ - VIEWER_QUERY_GENERATOR, - QUERY_QUERY_GENERATOR, - NODE_QUERY_GENERATOR, - FETCHABLE_QUERY_GENERATOR, -]; - -pub struct RefetchRoot { - pub fragment: Arc, - pub selections: Vec, - pub variable_definitions: Vec, -} diff --git a/compiler/crates/relay-transforms/src/refetchable_fragment/node_query_generator.rs b/compiler/crates/relay-transforms/src/refetchable_fragment/node_query_generator.rs index 3a4ea581ae061..1d102ff9b73f1 100644 --- a/compiler/crates/relay-transforms/src/refetchable_fragment/node_query_generator.rs +++ b/compiler/crates/relay-transforms/src/refetchable_fragment/node_query_generator.rs @@ -40,6 +40,7 @@ use super::build_used_global_variables; use super::validation_message::ValidationMessage; use super::QueryGenerator; use super::RefetchRoot; +use super::RefetchableIdentifierInfo; use super::RefetchableMetadata; use super::CONSTANTS; use crate::root_variables::VariableMap; @@ -52,7 +53,6 @@ fn build_refetch_operation( variables_map: &VariableMap, ) -> DiagnosticsResult> { let id_name = schema_config.node_interface_id_field; - let node_interface_id = schema.get_type(CONSTANTS.node_type_name).and_then(|type_| { if let Type::Interface(id) = type_ { Some(id) @@ -120,7 +120,11 @@ fn build_refetch_operation( RefetchableMetadata { operation_name: query_name, path: vec![CONSTANTS.node_field_name], - identifier_field: Some(id_name), + identifier_info: Some(RefetchableIdentifierInfo { + identifier_field: id_name, + identifier_query_variable_name: schema_config + .node_interface_id_variable_name, + }), }, ), used_global_variables: build_used_global_variables( @@ -163,7 +167,7 @@ fn build_refetch_operation( alias: None, definition: WithLocation::new(fragment.name.location, node_field_id), arguments: vec![Argument { - name: WithLocation::new(fragment.name.location, id_arg.name), + name: WithLocation::new(fragment.name.location, id_arg.name.item), value: WithLocation::new( fragment.name.location, Value::Variable(Variable { diff --git a/compiler/crates/relay-transforms/src/refetchable_fragment/query_query_generator.rs b/compiler/crates/relay-transforms/src/refetchable_fragment/query_query_generator.rs index dc0498f6c2ecb..c57212723a43c 100644 --- a/compiler/crates/relay-transforms/src/refetchable_fragment/query_query_generator.rs +++ b/compiler/crates/relay-transforms/src/refetchable_fragment/query_query_generator.rs @@ -41,7 +41,7 @@ fn build_refetch_operation( RefetchableMetadata { operation_name: query_name, path: vec![], - identifier_field: None, + identifier_info: None, }, ), used_global_variables: build_used_global_variables( diff --git a/compiler/crates/relay-transforms/src/refetchable_fragment/refetchable_directive.rs b/compiler/crates/relay-transforms/src/refetchable_fragment/refetchable_directive.rs index 48c61fa546f8b..994a8c56061f8 100644 --- a/compiler/crates/relay-transforms/src/refetchable_fragment/refetchable_directive.rs +++ b/compiler/crates/relay-transforms/src/refetchable_fragment/refetchable_directive.rs @@ -29,6 +29,7 @@ lazy_static! { pub static ref REFETCHABLE_NAME: DirectiveName = DirectiveName("refetchable".intern()); static ref QUERY_NAME_ARG: ArgumentName = ArgumentName("queryName".intern()); static ref DIRECTIVES_ARG: ArgumentName = ArgumentName("directives".intern()); + static ref PREFER_FETCHABLE_ARG: ArgumentName = ArgumentName("preferFetchable".intern()); } /// Represents the @refetchable Relay directive: @@ -37,17 +38,20 @@ lazy_static! { /// directive @refetchable( /// queryName: String! /// directives: [String!] +/// preferFetchable: Boolean /// ) on FRAGMENT_DEFINITION /// ``` pub struct RefetchableDirective { pub query_name: WithLocation, pub directives: Vec, + pub prefer_fetchable: bool, } impl RefetchableDirective { pub fn from_directive(schema: &SDLSchema, directive: &Directive) -> DiagnosticsResult { let mut name = None; let mut directives = Vec::new(); + let mut prefer_fetchable = false; for argument in &directive.arguments { if argument.name.item == *QUERY_NAME_ARG { @@ -120,6 +124,8 @@ impl RefetchableDirective { argument.value.location, )]) }? + } else if argument.name.item == *PREFER_FETCHABLE_ARG { + prefer_fetchable = true } else { // should be validated by general directive validations panic!( @@ -131,6 +137,7 @@ impl RefetchableDirective { Ok(Self { query_name: name.unwrap(), directives, + prefer_fetchable, }) } } diff --git a/compiler/crates/relay-transforms/src/refetchable_fragment/utils.rs b/compiler/crates/relay-transforms/src/refetchable_fragment/utils.rs index b8e7886aa82eb..fb57ad1f6d9a6 100644 --- a/compiler/crates/relay-transforms/src/refetchable_fragment/utils.rs +++ b/compiler/crates/relay-transforms/src/refetchable_fragment/utils.rs @@ -20,6 +20,7 @@ use graphql_ir::FragmentDefinition; use graphql_ir::FragmentDefinitionName; use graphql_ir::FragmentSpread; use graphql_ir::OperationDefinitionName; +use graphql_ir::ProvidedVariableMetadata; use graphql_ir::Selection; use graphql_ir::Value; use graphql_ir::Variable; @@ -31,11 +32,17 @@ use lazy_static::lazy_static; use super::validation_message::ValidationMessage; use crate::root_variables::VariableMap; +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct RefetchableIdentifierInfo { + pub identifier_field: StringKey, + pub identifier_query_variable_name: StringKey, +} + #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct RefetchableMetadata { pub operation_name: OperationDefinitionName, pub path: Vec, - pub identifier_field: Option, + pub identifier_info: Option, } associated_data_impl!(RefetchableMetadata); @@ -66,6 +73,7 @@ pub fn build_fragment_spread(fragment: &FragmentDefinition) -> Selection { arguments: fragment .variable_definitions .iter() + .filter(|def| ProvidedVariableMetadata::find(&def.directives).is_none()) .map(|var| Argument { name: var.name.map(|x| ArgumentName(x.0)), value: WithLocation::new( diff --git a/compiler/crates/relay-transforms/src/refetchable_fragment/validation_message.rs b/compiler/crates/relay-transforms/src/refetchable_fragment/validation_message.rs index 0cdef7b2d0cf9..b66baa3032a71 100644 --- a/compiler/crates/relay-transforms/src/refetchable_fragment/validation_message.rs +++ b/compiler/crates/relay-transforms/src/refetchable_fragment/validation_message.rs @@ -11,7 +11,8 @@ use graphql_ir::VariableName; use intern::string_key::StringKey; use thiserror::Error; -#[derive(Error, Debug)] +#[derive(Error, Debug, serde::Serialize)] +#[serde(tag = "type")] pub(super) enum ValidationMessage { #[error( "Invalid use of @refetchable on fragment '{fragment_name}', only supported are fragments on:\n{descriptions}" @@ -38,9 +39,9 @@ pub(super) enum ValidationMessage { }, #[error( - "A unique query name has to be specified in `@refetchable`, an operation `{query_name}` already exists." + "The `queryName` specified in `@refetchable` must be unique, a definition with the name `{definition_name}` already exists." )] - RefetchableQueryConflictWithQuery { query_name: OperationDefinitionName }, + RefetchableQueryConflictWithDefinition { definition_name: StringKey }, #[error( "Invalid use of @refetchable with @connection in fragment '{fragment_name}', at most once @connection can appear in a refetchable fragment." diff --git a/compiler/crates/relay-transforms/src/refetchable_fragment/viewer_query_generator.rs b/compiler/crates/relay-transforms/src/refetchable_fragment/viewer_query_generator.rs index 54ce6bd2175e7..1179fc2e7c50a 100644 --- a/compiler/crates/relay-transforms/src/refetchable_fragment/viewer_query_generator.rs +++ b/compiler/crates/relay-transforms/src/refetchable_fragment/viewer_query_generator.rs @@ -50,7 +50,7 @@ fn build_refetch_operation( RefetchableMetadata { operation_name: query_name, path: vec![CONSTANTS.viewer_field_name], - identifier_field: None, + identifier_info: None, }, ), used_global_variables: build_used_global_variables( diff --git a/compiler/crates/relay-transforms/src/relay_client_component.rs b/compiler/crates/relay-transforms/src/relay_client_component.rs deleted file mode 100644 index 4d7099622aa9c..0000000000000 --- a/compiler/crates/relay-transforms/src/relay_client_component.rs +++ /dev/null @@ -1,417 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::ArgumentName; -use common::Diagnostic; -use common::DiagnosticsResult; -use common::DirectiveName; -use common::FeatureFlag; -use common::FeatureFlags; -use common::InterfaceName; -use common::NamedItem; -use common::WithLocation; -use graphql_ir::associated_data_impl; -use graphql_ir::Argument; -use graphql_ir::ConstantValue; -use graphql_ir::Directive; -use graphql_ir::ExecutableDefinitionName; -use graphql_ir::FragmentDefinition; -use graphql_ir::FragmentDefinitionNameMap; -use graphql_ir::FragmentSpread; -use graphql_ir::OperationDefinition; -use graphql_ir::OperationDefinitionName; -use graphql_ir::Program; -use graphql_ir::Selection; -use graphql_ir::Transformed; -use graphql_ir::Transformer; -use graphql_ir::Value; -use graphql_syntax::OperationKind; -use intern::string_key::Intern; -use intern::string_key::StringKey; -use intern::string_key::StringKeyMap; -use intern::string_key::StringKeySet; -use itertools::Itertools; -use lazy_static::lazy_static; -use schema::InterfaceID; -use schema::Schema; -use schema::Type; - -use super::ValidationMessage; -use crate::match_::SplitOperationMetadata; -use crate::no_inline::attach_no_inline_directives_to_fragments; -use crate::no_inline::validate_required_no_inline_directive; -use crate::util::get_fragment_filename; -use crate::util::get_normalization_operation_name; - -lazy_static! { - pub static ref RELAY_CLIENT_COMPONENT_SERVER_DIRECTIVE_NAME: DirectiveName = - DirectiveName("relay_client_component_server".intern()); - pub static ref RELAY_CLIENT_COMPONENT_MODULE_ID_ARGUMENT_NAME: ArgumentName = - ArgumentName("module_id".intern()); - pub static ref RELAY_CLIENT_COMPONENT_DIRECTIVE_NAME: DirectiveName = - DirectiveName("relay_client_component".intern()); - static ref STRING_TYPE: StringKey = "String".intern(); - static ref NODE_TYPE_NAME: InterfaceName = InterfaceName("Node".intern()); - static ref VIEWER_TYPE_NAME: StringKey = "Viewer".intern(); -} - -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct RelayClientComponentMetadata { - pub split_operation_filenames: Vec, -} -associated_data_impl!(RelayClientComponentMetadata); - -pub fn relay_client_component( - program: &Program, - feature_flags: &FeatureFlags, -) -> DiagnosticsResult { - // Noop, the @relay_client_component_server directive is not defined in the schema - if program - .schema - .get_directive(*RELAY_CLIENT_COMPONENT_SERVER_DIRECTIVE_NAME) - .is_none() - { - return Ok(program.clone()); - } - let node_interface_id = program - .schema - .get_type(NODE_TYPE_NAME.0) - .and_then(|type_| { - if let Type::Interface(id) = type_ { - Some(id) - } else { - None - } - }) - .expect("@relay_client_component requires your schema to define the Node interface."); - - let mut transform = - RelayClientComponentTransform::new(program, node_interface_id, feature_flags); - let mut next_program = transform - .transform_program(program) - .replace_or_else(|| program.clone()); - - if !transform.no_inline_fragments.is_empty() { - validate_required_no_inline_directive(&transform.no_inline_fragments, program)?; - attach_no_inline_directives_to_fragments( - &mut transform.no_inline_fragments, - &mut next_program, - ); - } - if !transform.split_operations.is_empty() { - for (_, (metadata, mut operation)) in transform.split_operations.drain() { - operation.directives.push(metadata.into()); - if let Some(prev_operation) = - next_program.operation(OperationDefinitionName(operation.name.item.0)) - { - transform.errors.push(Diagnostic::error( - ValidationMessage::DuplicateRelayClientComponentSplitOperation, - prev_operation.name.location, - )); - } else { - next_program.insert_operation(Arc::new(operation)) - } - } - } - - if transform.errors.is_empty() { - Ok(next_program) - } else { - Err(transform.errors) - } -} - -struct RelayClientComponentTransform<'program, 'flag> { - program: &'program Program, - errors: Vec, - split_operations: StringKeyMap<(SplitOperationMetadata, OperationDefinition)>, - node_interface_id: InterfaceID, - /// Name of the document currently being transformed. - document_name: Option, - split_operation_filenames: StringKeySet, - no_inline_flag: &'flag FeatureFlag, - // Stores the fragments that should use @no_inline and their parent document name - no_inline_fragments: FragmentDefinitionNameMap>, -} - -impl<'program, 'flag> RelayClientComponentTransform<'program, 'flag> { - fn new( - program: &'program Program, - node_interface_id: InterfaceID, - feature_flags: &'flag FeatureFlags, - ) -> Self { - Self { - program, - errors: Default::default(), - split_operations: Default::default(), - node_interface_id, - document_name: None, - split_operation_filenames: Default::default(), - no_inline_flag: &feature_flags.no_inline, - no_inline_fragments: Default::default(), - } - } - - fn transform_relay_client_component( - &mut self, - spread: &FragmentSpread, - ) -> Result, Diagnostic> { - if let Some(incompatible_directives_diagnostic) = - self.get_incompatible_directives_diagnostic(spread) - { - return Err(incompatible_directives_diagnostic); - } - // @relay_client_component does not take arguments (yet) - if let Some(argument) = spread.arguments.first() { - return Err(Diagnostic::error( - ValidationMessage::InvalidRelayClientComponentWithArguments, - argument.name.location, - )); - } - - let fragment = self - .program - .fragment(spread.fragment.item) - .unwrap_or_else(|| panic!("Expected to find fragment `{}`", spread.fragment.item)); - // Validate that the fragment's type condition MUST implement `Node`. - let node_interface_id = self.node_interface_id; - let implements_node = match fragment.type_condition { - // Fragments can be specified on object types, interfaces, and unions. - // https://spec.graphql.org/June2018/#sec-Type-Conditions - Type::Interface(id) => { - id == node_interface_id - || self - .program - .schema - .interface(id) - .implementing_objects - .iter() - .all(|&object_id| { - self.program - .schema - .object(object_id) - .interfaces - .iter() - .any(|interface_id| *interface_id == node_interface_id) - }) - } - Type::Object(id) => self - .program - .schema - .object(id) - .interfaces - .iter() - .any(|interface_id| *interface_id == node_interface_id), - Type::Union(id) => self - .program - .schema - .union(id) - .members - .iter() - .all(|&object_id| { - self.program - .schema - .object(object_id) - .interfaces - .iter() - .any(|interface_id| *interface_id == node_interface_id) - }), - _ => false, - }; - let is_fragment_on_query = - fragment.type_condition == self.program.schema.query_type().unwrap(); - let is_fragment_on_viewer = - self.program.schema.get_type_name(fragment.type_condition) == *VIEWER_TYPE_NAME; - if !implements_node && !is_fragment_on_query && !is_fragment_on_viewer { - return Err(Diagnostic::error( - ValidationMessage::InvalidRelayClientComponentNonNodeFragment, - fragment.name.location, - )); - } - - let should_use_no_inline = self.no_inline_flag.is_enabled_for(spread.fragment.item.0); - if should_use_no_inline { - self.no_inline_fragments - .entry(fragment.name.item) - .or_default() - .push(self.document_name.unwrap()); - } else { - // Generate a SplitOperation AST - let created_split_operation = self - .split_operations - .entry(spread.fragment.item.0) - .or_insert_with(|| { - let normalization_name = - get_normalization_operation_name(spread.fragment.item.0).intern(); - ( - SplitOperationMetadata { - derived_from: Some(spread.fragment.item), - location: self - .program - .fragment(spread.fragment.item) - .unwrap() - .name - .location, - parent_documents: Default::default(), - raw_response_type_generation_mode: None, - }, - OperationDefinition { - name: WithLocation::new( - spread.fragment.location, - OperationDefinitionName(normalization_name), - ), - type_: fragment.type_condition, - kind: OperationKind::Query, - variable_definitions: fragment.variable_definitions.clone(), - directives: fragment.directives.clone(), - selections: vec![Selection::FragmentSpread(Arc::new(FragmentSpread { - arguments: Default::default(), - directives: Default::default(), - fragment: spread.fragment, - }))], - }, - ) - }); - created_split_operation - .0 - .parent_documents - .insert(self.document_name.unwrap()); - } - - // @relay_client_component -> @relay_client_component_server(module_id: "...") - let module_id = get_fragment_filename(spread.fragment.item); - let mut next_directives = spread.directives.clone(); - if let Some(relay_client_component_directive) = next_directives - .iter_mut() - .find(|directive| directive.name.item == *RELAY_CLIENT_COMPONENT_DIRECTIVE_NAME) - { - *relay_client_component_directive = Directive { - name: WithLocation { - item: *RELAY_CLIENT_COMPONENT_SERVER_DIRECTIVE_NAME, - location: relay_client_component_directive.name.location, - }, - arguments: vec![Argument { - name: WithLocation::generated(*RELAY_CLIENT_COMPONENT_MODULE_ID_ARGUMENT_NAME), - value: WithLocation::generated(Value::Constant(ConstantValue::String( - module_id, - ))), - }], - data: None, - }; - } - - // Record the SplitOperation so we can emit metadata later - self.split_operation_filenames.insert(module_id); - - Ok(Transformed::Replace(Selection::FragmentSpread(Arc::new( - FragmentSpread { - directives: next_directives, - ..spread.clone() - }, - )))) - } - - fn get_incompatible_directives_diagnostic( - &self, - spread: &FragmentSpread, - ) -> Option { - let incompatible_directives = spread - .directives - .iter() - .filter_map(|directive| { - if directive.name.item != *RELAY_CLIENT_COMPONENT_DIRECTIVE_NAME { - Some(directive.name.item) - } else { - None - } - }) - .collect::>(); - if !incompatible_directives.is_empty() { - Some(Diagnostic::error( - ValidationMessage::IncompatibleRelayClientComponentDirectives { - incompatible_directives, - }, - spread.fragment.location, - )) - } else { - None - } - } - - fn generate_relay_client_component_client_metadata_directive(&mut self) -> Directive { - let split_operation_filenames = self.split_operation_filenames.drain().sorted().collect(); - RelayClientComponentMetadata { - split_operation_filenames, - } - .into() - } -} - -impl<'program, 'flag> Transformer for RelayClientComponentTransform<'program, 'flag> { - const NAME: &'static str = "RelayClientComponentTransform"; - const VISIT_ARGUMENTS: bool = false; - const VISIT_DIRECTIVES: bool = false; - - fn transform_operation( - &mut self, - operation: &OperationDefinition, - ) -> Transformed { - assert!(self.split_operation_filenames.is_empty()); - self.document_name = Some(operation.name.item.into()); - - let transformed = self.default_transform_operation(operation); - if self.split_operation_filenames.is_empty() { - return transformed; - } - - let mut operation = transformed.unwrap_or_else(|| operation.clone()); - operation.directives.reserve_exact(1); - operation - .directives - .push(self.generate_relay_client_component_client_metadata_directive()); - Transformed::Replace(operation) - } - - fn transform_fragment( - &mut self, - fragment: &FragmentDefinition, - ) -> Transformed { - assert!(self.split_operation_filenames.is_empty()); - self.document_name = Some(fragment.name.item.into()); - - let transformed = self.default_transform_fragment(fragment); - if self.split_operation_filenames.is_empty() { - return transformed; - } - - let mut fragment = transformed.unwrap_or_else(|| fragment.clone()); - fragment.directives.reserve_exact(1); - fragment - .directives - .push(self.generate_relay_client_component_client_metadata_directive()); - Transformed::Replace(fragment) - } - - fn transform_fragment_spread(&mut self, spread: &FragmentSpread) -> Transformed { - let relay_client_component_directive = spread - .directives - .named(*RELAY_CLIENT_COMPONENT_DIRECTIVE_NAME); - if relay_client_component_directive.is_some() { - match self.transform_relay_client_component(spread) { - Ok(transformed) => transformed, - Err(err) => { - self.errors.push(err); - self.default_transform_fragment_spread(spread) - } - } - } else { - self.default_transform_fragment_spread(spread) - } - } -} diff --git a/compiler/crates/relay-transforms/src/relay_resolvers.rs b/compiler/crates/relay-transforms/src/relay_resolvers.rs index 7526b105fd3f5..9b928df99b8c6 100644 --- a/compiler/crates/relay-transforms/src/relay_resolvers.rs +++ b/compiler/crates/relay-transforms/src/relay_resolvers.rs @@ -14,13 +14,13 @@ use common::Location; use common::NamedItem; use common::WithLocation; use docblock_shared::FRAGMENT_KEY_ARGUMENT_NAME; +use docblock_shared::GENERATED_FRAGMENT_ARGUMENT_NAME; use docblock_shared::HAS_OUTPUT_TYPE_ARGUMENT_NAME; use docblock_shared::IMPORT_NAME_ARGUMENT_NAME; use docblock_shared::IMPORT_PATH_ARGUMENT_NAME; use docblock_shared::INJECT_FRAGMENT_DATA_ARGUMENT_NAME; use docblock_shared::LIVE_ARGUMENT_NAME; use docblock_shared::RELAY_RESOLVER_DIRECTIVE_NAME; -use docblock_shared::RELAY_RESOLVER_MODEL_DIRECTIVE_NAME; use docblock_shared::RELAY_RESOLVER_WEAK_OBJECT_DIRECTIVE; use graphql_ir::associated_data_impl; use graphql_ir::Argument; @@ -42,6 +42,7 @@ use graphql_syntax::ConstantValue; use intern::string_key::Intern; use intern::string_key::StringKey; use intern::Lookup; +use relay_config::ProjectName; use schema::ArgumentValue; use schema::Field; use schema::FieldID; @@ -54,6 +55,7 @@ use crate::generate_relay_resolvers_operations_for_nested_objects::generate_name use crate::ClientEdgeMetadata; use crate::FragmentAliasMetadata; use crate::RequiredMetadataDirective; +use crate::CHILDREN_CAN_BUBBLE_METADATA_KEY; use crate::CLIENT_EDGE_WATERFALL_DIRECTIVE_NAME; use crate::REQUIRED_DIRECTIVE_NAME; @@ -66,8 +68,13 @@ use crate::REQUIRED_DIRECTIVE_NAME; /// /// See the docblock for `relay_resolvers_spread_transform` for more details /// about the resulting format. -pub fn relay_resolvers(program: &Program, enabled: bool) -> DiagnosticsResult { - let transformed_fields_program = relay_resolvers_fields_transform(program, enabled)?; +pub fn relay_resolvers( + project_name: ProjectName, + program: &Program, + enabled: bool, +) -> DiagnosticsResult { + let transformed_fields_program = + relay_resolvers_fields_transform(project_name, program, enabled)?; relay_resolvers_spread_transform(&transformed_fields_program) } @@ -120,7 +127,7 @@ associated_data_impl!(RelayResolverFieldMetadata); #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct RelayResolverMetadata { - field_id: FieldID, + pub field_id: FieldID, pub import_path: StringKey, pub import_name: Option, pub field_alias: Option, @@ -159,20 +166,10 @@ impl RelayResolverMetadata { } pub fn generate_local_resolver_name(&self, schema: &SDLSchema) -> StringKey { - to_camel_case(format!( - "{}_{}_resolver", - self.field_parent_type_name(schema), - self.field_name(schema) - )) - .intern() + resolver_import_alias(self.field_parent_type_name(schema), self.field_name(schema)) } pub fn generate_local_resolver_type_name(&self, schema: &SDLSchema) -> StringKey { - to_camel_case(format!( - "{}_{}_resolver_type", - self.field_parent_type_name(schema), - self.field_name(schema) - )) - .intern() + resolver_type_import_alias(self.field_parent_type_name(schema), self.field_name(schema)) } } @@ -219,11 +216,8 @@ impl<'program> RelayResolverSpreadTransform<'program> { .expect("Previous validation passes ensured this exists.") }); - let (fragment_arguments, field_arguments) = field - .arguments() - .iter() - .map(|arg| arg.clone()) - .partition(|arg| { + let (fragment_arguments, field_arguments) = + field.arguments().iter().cloned().partition(|arg| { if let Some(fragment_definition) = fragment_definition { fragment_definition .variable_definitions @@ -343,10 +337,11 @@ impl<'program> Transformer for RelayResolverSpreadTransform<'program> { /// root fragment dependencies are. They should simply be able to check for the /// presence of the `RelayResolverFieldMetadata` IR directive on the field. fn relay_resolvers_fields_transform( + project_name: ProjectName, program: &Program, enabled: bool, ) -> DiagnosticsResult { - let mut transform = RelayResolverFieldTransform::new(program, enabled); + let mut transform = RelayResolverFieldTransform::new(project_name, program, enabled); let next_program = transform .transform_program(program) .replace_or_else(|| program.clone()); @@ -359,6 +354,7 @@ fn relay_resolvers_fields_transform( } struct RelayResolverFieldTransform<'program> { + project_name: ProjectName, enabled: bool, program: &'program Program, errors: Vec, @@ -366,12 +362,13 @@ struct RelayResolverFieldTransform<'program> { } impl<'program> RelayResolverFieldTransform<'program> { - fn new(program: &'program Program, enabled: bool) -> Self { + fn new(project_name: ProjectName, program: &'program Program, enabled: bool) -> Self { Self { program, enabled, errors: Default::default(), path: Vec::new(), + project_name, } } @@ -408,6 +405,7 @@ impl<'program> RelayResolverFieldTransform<'program> { // For now, only @required and @waterfall are allowed on Resolver fields. directive.name.item != RequiredMetadataDirective::directive_name() && directive.name.item != *REQUIRED_DIRECTIVE_NAME + && directive.name.item != *CHILDREN_CAN_BUBBLE_METADATA_KEY && directive.name.item != *CLIENT_EDGE_WATERFALL_DIRECTIVE_NAME }); if let Some(directive) = non_required_directives.next() { @@ -450,6 +448,7 @@ impl<'program> RelayResolverFieldTransform<'program> { let output_type_info = if has_output_type { if inner_type.is_composite_type() { let normalization_operation = generate_name_for_nested_object_operation( + self.project_name, &self.program.schema, self.program.schema.field(field.definition().item), ); @@ -465,7 +464,7 @@ impl<'program> RelayResolverFieldTransform<'program> { // This is expect to be `__relay_model_instance` // TODO: Add validation/panic to assert that weak object has only // one field here, and it's a magic relay instance field. - Some(*object.fields.get(0).unwrap()) + Some(*object.fields.first().unwrap()) } else { None } @@ -575,11 +574,17 @@ impl Transformer for RelayResolverFieldTransform<'_> { .transform_selection(&client_edge_metadata.backing_field) .unwrap_or_else(|| client_edge_metadata.backing_field.clone()); + let field_name = client_edge_metadata + .linked_field + .alias_or_name(&self.program.schema); + + self.path.push(field_name.lookup()); let selections_field = self .default_transform_linked_field(client_edge_metadata.linked_field) .unwrap_or_else(|| { Selection::LinkedField(Arc::new(client_edge_metadata.linked_field.clone())) }); + self.path.pop(); let selections = vec![backing_id_field, selections_field]; @@ -599,16 +604,17 @@ impl Transformer for RelayResolverFieldTransform<'_> { } } -struct ResolverInfo { +#[derive(Debug)] +pub struct ResolverInfo { fragment_name: Option, fragment_data_injection_mode: Option, - import_path: StringKey, - import_name: Option, + pub import_path: StringKey, + pub import_name: Option, live: bool, has_output_type: bool, } -fn get_resolver_info( +pub fn get_resolver_info( schema: &SDLSchema, resolver_field: &Field, error_location: Location, @@ -719,10 +725,7 @@ pub(crate) fn get_bool_argument_is_true( // If the field is a resolver, return its user defined fragment name. Does not // return generated fragment names. -pub fn get_resolver_fragment_dependency_name( - field: &Field, - schema: &SDLSchema, -) -> Option { +pub fn get_resolver_fragment_dependency_name(field: &Field) -> Option { if !field.is_extension { return None; } @@ -730,36 +733,22 @@ pub fn get_resolver_fragment_dependency_name( field .directives .named(*RELAY_RESOLVER_DIRECTIVE_NAME) + .filter(|resolver_directive| { + let generated = resolver_directive + .arguments + .named(*GENERATED_FRAGMENT_ARGUMENT_NAME) + .and_then(|arg| arg.value.get_bool_literal()) + .unwrap_or(false); + !generated + }) .and_then(|resolver_directive| { resolver_directive .arguments .named(*FRAGMENT_KEY_ARGUMENT_NAME) }) - .filter(|_| { - // Resolvers on relay model types use generated fragments, and - // therefore have no user-defined fragment dependency. - !is_field_of_relay_model(schema, field) - }) .and_then(|arg| arg.value.get_string_literal().map(FragmentDefinitionName)) } -fn is_field_of_relay_model(schema: &SDLSchema, field: &Field) -> bool { - if let Some(parent_type) = field.parent_type { - let directives = match parent_type { - schema::Type::Object(object_id) => &schema.object(object_id).directives, - schema::Type::Interface(interface_id) => &schema.interface(interface_id).directives, - schema::Type::Union(union_id) => &schema.union(union_id).directives, - _ => panic!("Expected parent to be an object, interface or union."), - }; - - directives - .named(*RELAY_RESOLVER_MODEL_DIRECTIVE_NAME) - .is_some() - } else { - false - } -} - fn to_camel_case(non_camelized_string: String) -> String { let mut camelized_string = String::with_capacity(non_camelized_string.len()); let mut last_character_was_not_alphanumeric = false; @@ -776,3 +765,10 @@ fn to_camel_case(non_camelized_string: String) -> String { } camelized_string } + +pub fn resolver_import_alias(parent_type_name: StringKey, field_name: StringKey) -> StringKey { + to_camel_case(format!("{}_{}_resolver", parent_type_name, field_name,)).intern() +} +pub fn resolver_type_import_alias(parent_type_name: StringKey, field_name: StringKey) -> StringKey { + to_camel_case(format!("{}_{}_resolver_type", parent_type_name, field_name,)).intern() +} diff --git a/compiler/crates/relay-transforms/src/relay_resolvers_abstract_types.rs b/compiler/crates/relay-transforms/src/relay_resolvers_abstract_types.rs new file mode 100644 index 0000000000000..f2d89db6f034b --- /dev/null +++ b/compiler/crates/relay-transforms/src/relay_resolvers_abstract_types.rs @@ -0,0 +1,457 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::collections::HashMap; +use std::sync::Arc; + +use common::ArgumentName; +use common::Diagnostic; +use common::DiagnosticsResult; +use common::FeatureFlags; +use common::Location; +use common::NamedItem; +use common::WithLocation; +use docblock_shared::RELAY_RESOLVER_DIRECTIVE_NAME; +use docblock_shared::ROOT_FRAGMENT_FIELD; +use graphql_ir::transform_list; +use graphql_ir::Condition; +use graphql_ir::FragmentDefinition; +use graphql_ir::InlineFragment; +use graphql_ir::LinkedField; +use graphql_ir::Program; +use graphql_ir::ScalarField; +use graphql_ir::Selection; +use graphql_ir::Transformed; +use graphql_ir::TransformedValue; +use graphql_ir::Transformer; +use schema::FieldID; +use schema::InterfaceID; +use schema::Schema; +use schema::Type; + +/// Transform selections on interface types. +/// +/// First we locate fields which are interface types. Then we convert all of its +/// selections into inline fragments per concrete type with the same +/// selections. +pub fn relay_resolvers_abstract_types( + program: &Program, + feature_flags: &FeatureFlags, +) -> DiagnosticsResult { + if !feature_flags + .relay_resolver_enable_interface_output_type + .is_fully_enabled() + { + return Ok(program.clone()); + } + let mut transform = RelayResolverAbstractTypesTransform::new(program); + let next_program = transform + .transform_program(program) + .replace_or_else(|| program.clone()); + + if transform.errors.is_empty() { + Ok(next_program) + } else { + Err(transform.errors) + } +} + +struct RelayResolverAbstractTypesTransform<'program> { + program: &'program Program, + errors: Vec, + interface_to_are_implementers_server_defined: HashMap, +} + +impl<'program> RelayResolverAbstractTypesTransform<'program> { + fn new(program: &'program Program) -> Self { + Self { + program, + errors: Default::default(), + interface_to_are_implementers_server_defined: HashMap::new(), + } + } +} + +impl RelayResolverAbstractTypesTransform<'_> { + // Partition selections on an interface type to copy to inline fragments + // on concrete types and to keep as is. + // Selections that should be copied are those that have different implementations + // across concrete types on the interface type (e.g. resolver field defined differently + // per concrete type.) + // Selections that should be kept are those that have the same implementations + // across concrete types (e.g. fields defined directly on the abstract type, or on server) + // or inline fragments that are on a concrete type. + fn partition_selections_to_copy_and_keep( + &self, + selections: &[Selection], + interface_id: InterfaceID, + ) -> (Vec, Vec) { + selections + .iter() + .cloned() + .partition(|selection| self.should_copy_selection(selection, interface_id)) + } + + fn should_copy_selection(&self, selection: &Selection, interface_id: InterfaceID) -> bool { + match selection { + Selection::InlineFragment(inline_fragment) => { + if inline_fragment.type_condition.is_none() { + inline_fragment + .selections + .iter() + .any(|selection| self.should_copy_selection(selection, interface_id)) + } else { + false + } + } + Selection::FragmentSpread(_) => false, + Selection::Condition(condition) => condition + .selections + .iter() + .any(|selection| self.should_copy_selection(selection, interface_id)), + Selection::LinkedField(field) => self + .concrete_types_have_different_implementations(interface_id, field.definition.item), + Selection::ScalarField(field) => self + .concrete_types_have_different_implementations(interface_id, field.definition.item), + } + } + + // Return true if concrete types have different implementations for the interface field + // with field_id. + fn concrete_types_have_different_implementations( + &self, + interface_id: InterfaceID, + field_id: FieldID, + ) -> bool { + let interface = self.program.schema.interface(interface_id); + let interface_field = self.program.schema.field(field_id); + // Interface field is a model resolver field defined with @rootFragment + if let Some(resolver_directive) = interface_field + .directives + .iter() + .find(|directive| directive.name.0 == RELAY_RESOLVER_DIRECTIVE_NAME.0) + { + if resolver_directive + .arguments + .named(ArgumentName(*ROOT_FRAGMENT_FIELD)) + .is_some() + { + return true; + } + } + // Any of the implementing objects' corresponding field is a resolver field + let selection_name = interface_field.name.item; + let implementing_objects = + interface.recursively_implementing_objects(Arc::as_ref(&self.program.schema)); + implementing_objects.iter().any(|object_id| { + let concrete_field_id = self + .program + .schema + .named_field(Type::Object(*object_id), selection_name) + .expect("Expected field to be defined on concrete type"); + let concrete_field = self.program.schema.field(concrete_field_id); + concrete_field + .directives + .iter() + .any(|directive| directive.name.0 == RELAY_RESOLVER_DIRECTIVE_NAME.0) + }) + } + + /** + * Converts selections on an abstract type to selections on inline fragments on a concrete + * type by changing the field IDs to those defined on the concrete types in the schema. + */ + fn convert_interface_selections_to_concrete_field_selections( + &self, + concrete_type: Type, + selections: &[Selection], + ) -> Vec { + selections + .iter() + .map(|selection| match selection { + Selection::LinkedField(node) => { + let field_name = self.program.schema.field(node.definition.item).name.item; + let concrete_field_id = self + .program + .schema + .named_field(concrete_type, field_name) + .expect("Expected field to be defined on concrete type"); + let definition = WithLocation::new(node.definition.location, concrete_field_id); + Selection::LinkedField(Arc::new(LinkedField { + definition, + alias: node.alias, + arguments: node.arguments.clone(), + directives: node.directives.clone(), + selections: node.selections.clone(), + })) + } + Selection::ScalarField(node) => { + let field_name = self.program.schema.field(node.definition.item).name.item; + let concrete_field_id = self + .program + .schema + .named_field(concrete_type, field_name) + .expect("Expected field to be defined on concrete type"); + let definition = WithLocation::new(node.definition.location, concrete_field_id); + Selection::ScalarField(Arc::new(ScalarField { + definition, + alias: node.alias, + arguments: node.arguments.clone(), + directives: node.directives.clone(), + })) + } + Selection::FragmentSpread(_) => selection.clone(), + Selection::InlineFragment(_) => selection.clone(), + Selection::Condition(_) => selection.clone(), + }) + .collect() + } + + fn create_inline_fragment_selections_for_interface( + &self, + interface_id: InterfaceID, + selections: &[Selection], + ) -> Vec { + assert!( + !selections.is_empty(), + "Expected selections to be non-empty when copying to inline fragments on concrete type" + ); + let interface = self.program.schema.interface(interface_id); + let implementing_objects = + interface.recursively_implementing_objects(Arc::as_ref(&self.program.schema)); + let mut sorted_implementing_objects = implementing_objects.into_iter().collect::>(); + sorted_implementing_objects.sort(); + sorted_implementing_objects + .iter() + .map(|object_id| { + let concrete_type = Type::Object(*object_id); + Selection::InlineFragment(Arc::new(InlineFragment { + type_condition: Some(concrete_type), + directives: vec![], // Directives not necessary here + selections: self.convert_interface_selections_to_concrete_field_selections( + concrete_type, + selections, + ), + spread_location: Location::generated(), + })) + }) + .collect() + } + + fn is_interface_implemented_by_all_server_defined_types( + &mut self, + interface_id: InterfaceID, + ) -> bool { + *self + .interface_to_are_implementers_server_defined + .entry(interface_id) + .or_insert_with_key(|interface_id| { + let interface = self.program.schema.interface(*interface_id); + let implementing_objects = + interface.recursively_implementing_objects(Arc::as_ref(&self.program.schema)); + !implementing_objects.iter().any(|object_id| { + let object = self.program.schema.object(*object_id); + object.is_extension + }) + }) + } + + // Transform selections on an interface type. + fn transform_selections_given_parent_type( + &mut self, + entry_type: Option, + selections: &[Selection], + ) -> TransformedValue> { + if let Some(Type::Interface(interface_id)) = entry_type { + if self.is_interface_implemented_by_all_server_defined_types(interface_id) { + return TransformedValue::Keep; + } + let transformed_selections = transform_list(selections, |selection| match selection { + Selection::LinkedField(_) + | Selection::ScalarField(_) + | Selection::FragmentSpread(_) => Transformed::Keep, + Selection::InlineFragment(inline_fragment) => { + if inline_fragment.type_condition.is_none() { + self.transform_inline_fragment_with_parent_type(inline_fragment, entry_type) + } else { + Transformed::Keep + } + } + Selection::Condition(condition) => { + self.transform_condition_with_parent_type(condition, entry_type) + } + }); + let selections_to_transform = match &transformed_selections { + TransformedValue::Keep => selections, + TransformedValue::Replace(replaced_selections) => replaced_selections, + }; + let (selections_to_copy, mut selections_to_keep) = + self.partition_selections_to_copy_and_keep(selections_to_transform, interface_id); + if selections_to_copy.is_empty() { + if transformed_selections.should_keep() { + TransformedValue::Keep + } else { + TransformedValue::Replace(selections_to_keep) + } + } else { + selections_to_keep.append( + &mut self.create_inline_fragment_selections_for_interface( + interface_id, + &selections_to_copy, + ), + ); + TransformedValue::Replace(selections_to_keep) + } + } else { + // If no parent type is provided, skip transform + TransformedValue::Keep + } + } + + fn transform_condition_with_parent_type( + &mut self, + condition: &Condition, + parent_type: Option, + ) -> Transformed { + let transformed_selections = + self.transform_selections_given_parent_type(parent_type, &condition.selections); + match transformed_selections { + TransformedValue::Keep => Transformed::Keep, + TransformedValue::Replace(transformed_selections) => { + Transformed::Replace(Selection::Condition(Arc::new(Condition { + selections: transformed_selections, + ..condition.clone() + }))) + } + } + } + + fn transform_inline_fragment_with_parent_type( + &mut self, + inline_fragment: &InlineFragment, + parent_type: Option, + ) -> Transformed { + let transformed_selections = + self.transform_selections_given_parent_type(parent_type, &inline_fragment.selections); + match transformed_selections { + TransformedValue::Keep => Transformed::Keep, + TransformedValue::Replace(transformed_selections) => { + Transformed::Replace(Selection::InlineFragment(Arc::new(InlineFragment { + selections: transformed_selections, + ..inline_fragment.clone() + }))) + } + } + } +} + +impl Transformer for RelayResolverAbstractTypesTransform<'_> { + const NAME: &'static str = "RelayResolverAbstractTypesTransform"; + const VISIT_ARGUMENTS: bool = false; + const VISIT_DIRECTIVES: bool = false; + + fn transform_inline_fragment( + &mut self, + inline_fragment: &InlineFragment, + ) -> Transformed { + if inline_fragment.type_condition.is_none() { + self.default_transform_inline_fragment(inline_fragment) + } else { + let selections = self.transform_selections(&inline_fragment.selections); + // If our child selections had no changes, do not copy them until we have to replace them + let selections_to_transform = match &selections { + TransformedValue::Keep => &inline_fragment.selections, + TransformedValue::Replace(replaced_selections) => replaced_selections, + }; + let transformed_selections = self.transform_selections_given_parent_type( + inline_fragment.type_condition, + selections_to_transform, + ); + match transformed_selections { + TransformedValue::Keep => { + if !selections.should_keep() { + Transformed::Replace(Selection::InlineFragment(Arc::new(InlineFragment { + selections: selections_to_transform.to_vec(), + ..inline_fragment.clone() + }))) + } else { + Transformed::Keep + } + } + TransformedValue::Replace(transformed_selections) => { + Transformed::Replace(Selection::InlineFragment(Arc::new(InlineFragment { + selections: transformed_selections, + ..inline_fragment.clone() + }))) + } + } + } + } + + fn transform_fragment( + &mut self, + fragment: &FragmentDefinition, + ) -> Transformed { + let selections = self.transform_selections(&fragment.selections); + let selections_to_transform = match &selections { + TransformedValue::Keep => &fragment.selections, + TransformedValue::Replace(replaced_selections) => replaced_selections, + }; + let transformed_selections = self.transform_selections_given_parent_type( + Some(fragment.type_condition), + selections_to_transform, + ); + match transformed_selections { + TransformedValue::Keep => { + if !selections.should_keep() { + Transformed::Replace(FragmentDefinition { + selections: selections_to_transform.to_vec(), + ..fragment.clone() + }) + } else { + Transformed::Keep + } + } + TransformedValue::Replace(transformed_selections) => { + Transformed::Replace(FragmentDefinition { + selections: transformed_selections, + ..fragment.clone() + }) + } + } + } + + fn transform_linked_field(&mut self, field: &LinkedField) -> Transformed { + let selections = self.transform_selections(&field.selections); + let selections_to_transform = match &selections { + TransformedValue::Keep => &field.selections, + TransformedValue::Replace(replaced_selections) => replaced_selections, + }; + let field_type = self.program.schema.field(field.definition.item); + let edge_to_type = field_type.type_.inner(); + let transformed_selections = self + .transform_selections_given_parent_type(Some(edge_to_type), selections_to_transform); + match transformed_selections { + TransformedValue::Keep => { + if !selections.should_keep() { + Transformed::Replace(Selection::LinkedField(Arc::new(LinkedField { + selections: selections_to_transform.to_vec(), + ..field.clone() + }))) + } else { + Transformed::Keep + } + } + TransformedValue::Replace(transformed_selections) => { + Transformed::Replace(Selection::LinkedField(Arc::new(LinkedField { + selections: transformed_selections, + ..field.clone() + }))) + } + } + } +} diff --git a/compiler/crates/relay-transforms/src/remove_base_fragments.rs b/compiler/crates/relay-transforms/src/remove_base_fragments.rs index 1cee49d5dffca..8361fcad729e9 100644 --- a/compiler/crates/relay-transforms/src/remove_base_fragments.rs +++ b/compiler/crates/relay-transforms/src/remove_base_fragments.rs @@ -5,13 +5,22 @@ * LICENSE file in the root directory of this source tree. */ +use common::DirectiveName; +use common::NamedItem; use graphql_ir::FragmentDefinition; use graphql_ir::FragmentDefinitionNameSet; use graphql_ir::OperationDefinition; use graphql_ir::Program; use graphql_ir::Transformed; use graphql_ir::Transformer; +use intern::string_key::Intern; +use lazy_static::lazy_static; +use schema::Schema; +lazy_static! { + pub static ref RESOLVER_BELONGS_TO_BASE_SCHEMA_DIRECTIVE: DirectiveName = + DirectiveName("__belongs_to_base_schema".intern()); +} /// This transform removes the given list of base fragments from the Program. /// This is useful if earlier steps need access to fragments from some base /// project, but we don't want to write output files for them and can skip over @@ -25,7 +34,8 @@ pub fn remove_base_fragments( return program.clone(); } let mut transform = StripBaseFragmentsTransform { - base_fragment_names: &base_fragment_names, + program, + base_fragment_names, }; transform .transform_program(program) @@ -33,6 +43,7 @@ pub fn remove_base_fragments( } struct StripBaseFragmentsTransform<'a> { + program: &'a Program, base_fragment_names: &'a FragmentDefinitionNameSet, } @@ -55,7 +66,26 @@ impl<'a> Transformer for StripBaseFragmentsTransform<'a> { if self.base_fragment_names.contains(&fragment.name.item) { Transformed::Delete } else { - Transformed::Keep + // For resolvers that belong to the base schema, we don't need to generate fragments. + // These fragments should be generated during compilation of the base project. + let is_base_resolver_type = + fragment + .type_condition + .get_object_id() + .is_some_and(|object_id| { + let object = self.program.schema.object(object_id); + + object.is_extension + && object + .directives + .named(*RESOLVER_BELONGS_TO_BASE_SCHEMA_DIRECTIVE) + .is_some() + }); + if is_base_resolver_type { + Transformed::Delete + } else { + Transformed::Keep + } } } } diff --git a/compiler/crates/relay-transforms/src/required_directive.rs b/compiler/crates/relay-transforms/src/required_directive.rs new file mode 100644 index 0000000000000..b7865404bcac9 --- /dev/null +++ b/compiler/crates/relay-transforms/src/required_directive.rs @@ -0,0 +1,610 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +mod requireable_field; +mod validation_message; + +use std::borrow::Cow; +use std::mem; +use std::sync::Arc; + +use common::ArgumentName; +use common::Diagnostic; +use common::DiagnosticsResult; +use common::DirectiveName; +use common::Location; +use common::NamedItem; +use common::WithLocation; +use graphql_ir::associated_data_impl; +use graphql_ir::Directive; +use graphql_ir::Field; +use graphql_ir::FragmentDefinition; +use graphql_ir::FragmentDefinitionNameMap; +use graphql_ir::InlineFragment; +use graphql_ir::LinkedField; +use graphql_ir::OperationDefinition; +use graphql_ir::Program; +use graphql_ir::ScalarField; +use graphql_ir::Selection; +use graphql_ir::Transformed; +use graphql_ir::TransformedValue; +use graphql_ir::Transformer; +use intern::string_key::Intern; +use intern::string_key::StringKey; +use intern::string_key::StringKeyMap; +use intern::Lookup; +use lazy_static::lazy_static; +use requireable_field::RequireableField; +use requireable_field::RequiredMetadata; + +use self::validation_message::ValidationMessage; +use crate::DirectiveFinder; +use crate::FragmentAliasMetadata; + +lazy_static! { + pub static ref REQUIRED_DIRECTIVE_NAME: DirectiveName = DirectiveName("required".intern()); + pub static ref ACTION_ARGUMENT: ArgumentName = ArgumentName("action".intern()); + pub static ref CHILDREN_CAN_BUBBLE_METADATA_KEY: DirectiveName = + DirectiveName("__childrenCanBubbleNull".intern()); + pub static ref THROW_ACTION: StringKey = "THROW".intern(); + static ref LOG_ACTION: StringKey = "LOG".intern(); + static ref NONE_ACTION: StringKey = "NONE".intern(); + static ref INLINE_DIRECTIVE_NAME: DirectiveName = DirectiveName("inline".intern()); +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct RequiredMetadataDirective { + pub action: RequiredAction, + pub path: StringKey, +} +associated_data_impl!(RequiredMetadataDirective); + +pub fn required_directive(program: &Program) -> DiagnosticsResult { + let mut transform = RequiredDirective::new(program); + + let next_program = transform + .transform_program(program) + .replace_or_else(|| program.clone()); + + if transform.errors.is_empty() { + Ok(next_program) + } else { + Err(transform.errors) + } +} + +// #[derive(Clone)] +struct MaybeRequiredField { + required: Option, + field_name: WithLocation, +} + +struct RequiredField { + required: RequiredMetadata, + field_name: WithLocation, +} + +struct RequiredDirective<'s> { + program: &'s Program, + errors: Vec, + path: Vec<&'s str>, + within_abstract_inline_fragment: bool, + parent_inline_fragment_directive: Option, + path_required_map: StringKeyMap, + current_node_required_children: StringKeyMap, + required_children_map: StringKeyMap>, + required_directive_visitor: RequiredDirectiveVisitor<'s>, +} + +impl<'program> RequiredDirective<'program> { + fn new(program: &'program Program) -> Self { + Self { + program, + errors: Default::default(), + path: vec![], + within_abstract_inline_fragment: false, + parent_inline_fragment_directive: None, + path_required_map: Default::default(), + current_node_required_children: Default::default(), + required_children_map: Default::default(), + required_directive_visitor: RequiredDirectiveVisitor { + program, + visited_fragments: Default::default(), + }, + } + } + + fn reset_state(&mut self) { + self.path_required_map = Default::default(); + self.current_node_required_children = Default::default(); + self.parent_inline_fragment_directive = None; + self.required_children_map = Default::default(); + } + + fn assert_not_within_abstract_inline_fragment(&mut self, directive_location: Location) { + if self.within_abstract_inline_fragment { + self.errors.push(Diagnostic::error( + ValidationMessage::RequiredWithinAbstractInlineFragment, + // TODO(T70172661): Also reference the location of the inline fragment, once they have a location. + directive_location, + )) + } + } + + fn assert_not_within_inline_directive(&mut self, directive_location: Location) { + if let Some(location) = self.parent_inline_fragment_directive { + self.errors.push( + Diagnostic::error( + ValidationMessage::RequiredWithinInlineDirective, + directive_location, + ) + .annotate("The fragment is annotated as @inline here.", location), + ) + } + } + + fn assert_compatible_nullability(&mut self, path: StringKey, current: MaybeRequiredField) { + if let Some(previous) = self.path_required_map.get(&path) { + if let Some(previous_metadata) = &previous.required { + if let Some(current_metadata) = current.required { + if previous_metadata.action != current_metadata.action { + self.errors.push( + Diagnostic::error( + ValidationMessage::RequiredActionMismatch { + field_name: current.field_name.item, + }, + previous_metadata.action_location, + ) + .annotate( + "should be the same as the `action` declared here", + current_metadata.action_location, + ), + ) + } + } else { + self.errors.push( + Diagnostic::error( + ValidationMessage::RequiredFieldMismatch { + field_name: current.field_name.item, + }, + previous.field_name.location, + ) + .annotate("but not @required here", current.field_name.location), + ); + } + } else if current.required.is_some() { + self.errors.push( + Diagnostic::error( + ValidationMessage::RequiredFieldMismatch { + field_name: current.field_name.item, + }, + current.field_name.location, + ) + .annotate("but not @required here", previous.field_name.location), + ) + } + } else { + self.path_required_map.insert(path, current); + } + } + + fn get_required_metadata( + &mut self, + field: &T, + path_name: StringKey, + ) -> Option { + let maybe_required = match field.required_metadata() { + Err(err) => { + self.errors.push(err); + return None; + } + Ok(required) => required, + }; + + let field_name = field.name_with_location(&self.program.schema); + + if let Some(metadata) = maybe_required { + self.assert_not_within_abstract_inline_fragment(metadata.directive_location); + self.assert_not_within_inline_directive(metadata.directive_location); + self.current_node_required_children.insert( + path_name, + RequiredField { + field_name, + required: metadata, + }, + ); + } + + self.assert_compatible_nullability( + path_name, + MaybeRequiredField { + required: maybe_required, + field_name, + }, + ); + maybe_required + } + + fn assert_compatible_required_children_severity( + &mut self, + required_metadata: RequiredMetadata, + ) { + let parent_action = required_metadata.action; + for required_child in self.current_node_required_children.values() { + if required_child.required.action < parent_action { + self.errors.push( + Diagnostic::error( + ValidationMessage::RequiredFieldInvalidNesting { + suggested_action: required_child.required.action.into(), + }, + required_metadata.action_location, + ) + .annotate( + "so that it can match its parent", + required_child.required.action_location, + ), + ); + } + } + } + fn assert_compatible_required_children( + &mut self, + field: &T, + field_path: StringKey, + ) { + let previous_required_children = match self.required_children_map.get(&field_path) { + Some(it) => it, + _ => { + // We haven't seen any other instances of this field, so there's no validation to perform. + return; + } + }; + + // Check if this field has a required child field which was omitted in a previously encountered parent. + for (path, required_child) in self.current_node_required_children.iter() { + if !previous_required_children.contains_key(path) { + if let Some(other_parent) = self.path_required_map.get(&field_path) { + self.errors.push( + Diagnostic::error( + ValidationMessage::RequiredFieldMissing { + field_name: required_child.field_name.item, + }, + required_child.field_name.location, + ) + .annotate("but is missing from", other_parent.field_name.location), + ) + } else { + // We want to give a location of the other parent which is + // missing this field. We expect that we will be able to + // find it in `self.path_required_map` since it should + // contain data about every visited field in this program + // and the other parent _must_ have already been visited. + panic!("Could not find other parent node at path \"{}\".", { + field_path + }); + } + } + } + + // Check if a previous reference to this field had a required child field which we are missing. + for (path, required_child) in previous_required_children.iter() { + if !self.current_node_required_children.contains_key(path) { + self.errors.push( + Diagnostic::error( + ValidationMessage::RequiredFieldMissing { + field_name: required_child.field_name.item, + }, + required_child.field_name.location, + ) + .annotate( + "but is missing from", + field.name_with_location(&self.program.schema).location, + ), + ) + } + } + } +} + +impl<'s> Transformer for RequiredDirective<'s> { + const NAME: &'static str = "RequiredDirectiveTransform"; + const VISIT_ARGUMENTS: bool = false; + const VISIT_DIRECTIVES: bool = false; + + fn transform_fragment( + &mut self, + fragment: &FragmentDefinition, + ) -> Transformed { + if !self.required_directive_visitor.visit_fragment(fragment) { + return Transformed::Keep; + } + self.reset_state(); + self.parent_inline_fragment_directive = fragment + .directives + .named(*INLINE_DIRECTIVE_NAME) + .map(|inline_directive| inline_directive.name.location); + + let selections = self.transform_selections(&fragment.selections); + let directives = maybe_add_children_can_bubble_metadata_directive( + &fragment.directives, + &self.current_node_required_children, + ); + if selections.should_keep() && directives.should_keep() { + return Transformed::Keep; + } + Transformed::Replace(FragmentDefinition { + directives: directives.replace_or_else(|| fragment.directives.clone()), + selections: selections.replace_or_else(|| fragment.selections.clone()), + ..fragment.clone() + }) + } + + fn transform_operation( + &mut self, + operation: &OperationDefinition, + ) -> Transformed { + if !self + .required_directive_visitor + .find(operation.selections.iter().collect()) + { + return Transformed::Keep; + } + self.reset_state(); + let selections = self.transform_selections(&operation.selections); + let directives = maybe_add_children_can_bubble_metadata_directive( + &operation.directives, + &self.current_node_required_children, + ); + if selections.should_keep() && directives.should_keep() { + return Transformed::Keep; + } + Transformed::Replace(OperationDefinition { + directives: directives.replace_or_else(|| operation.directives.clone()), + selections: selections.replace_or_else(|| operation.selections.clone()), + ..operation.clone() + }) + } + + fn transform_scalar_field(&mut self, field: &ScalarField) -> Transformed { + let name = field.alias_or_name(&self.program.schema).lookup(); + self.path.push(name); + let path_name = self.path.join(".").intern(); + self.path.pop(); + + match self.get_required_metadata(field, path_name) { + None => Transformed::Keep, + Some(required_metadata) => { + Transformed::Replace(Selection::ScalarField(Arc::new(ScalarField { + directives: add_metadata_directive( + &field.directives, + path_name, + required_metadata.action, + ), + ..field.clone() + }))) + } + } + } + + fn transform_linked_field(&mut self, field: &LinkedField) -> Transformed { + let name = field.alias_or_name(&self.program.schema).lookup(); + self.path.push(name); + let path_name = self.path.join(".").intern(); + + let maybe_required_metadata = self.get_required_metadata(field, path_name); + let next_directives = match maybe_required_metadata { + Some(required_metadata) => Cow::from(add_metadata_directive( + &field.directives, + path_name, + required_metadata.action, + )), + None => Cow::from(&field.directives), + }; + + // Once we've handled our own directive, take the parent's required + // children map, leaving behind an empty/default map which our children + // can populate. + let parent_node_required_children = mem::take(&mut self.current_node_required_children); + + let previous_abstract_fragment = + mem::replace(&mut self.within_abstract_inline_fragment, false); + + let selections = self.transform_selections(&field.selections); + + self.assert_compatible_required_children(field, path_name); + if let Some(required_metadata) = maybe_required_metadata { + self.assert_compatible_required_children_severity(required_metadata); + } + + let next_directives_with_metadata = maybe_add_children_can_bubble_metadata_directive( + &next_directives, + &self.current_node_required_children, + ); + + self.within_abstract_inline_fragment = previous_abstract_fragment; + + let required_children = mem::replace( + &mut self.current_node_required_children, + parent_node_required_children, + ); + + self.required_children_map + .insert(path_name, required_children); + + self.path.pop(); + + if selections.should_keep() + && next_directives_with_metadata.should_keep() + && maybe_required_metadata.is_none() + { + Transformed::Keep + } else { + Transformed::Replace(Selection::LinkedField(Arc::new(LinkedField { + directives: next_directives_with_metadata + .replace_or_else(|| next_directives.into()), + selections: selections.replace_or_else(|| field.selections.clone()), + ..field.clone() + }))) + } + } + + fn transform_inline_fragment(&mut self, fragment: &InlineFragment) -> Transformed { + let previous = self.within_abstract_inline_fragment; + + let maybe_alias = + FragmentAliasMetadata::find(&fragment.directives).map(|metadata| metadata.alias.item); + + let next_fragment = if let Some(alias) = maybe_alias { + self.within_abstract_inline_fragment = false; + self.path.push(alias.lookup()); + let path_name = self.path.join(".").intern(); + + // Once we've handled our own directive, take the parent's required + // children map, leaving behind an empty/default map which our children + // can populate. + let parent_node_required_children = mem::take(&mut self.current_node_required_children); + let next_selections = self.transform_selections(&fragment.selections); + + let next_directives_with_metadata = maybe_add_children_can_bubble_metadata_directive( + &fragment.directives, + &self.current_node_required_children, + ); + let required_children = mem::replace( + &mut self.current_node_required_children, + parent_node_required_children, + ); + + self.required_children_map + .insert(path_name, required_children); + self.path.pop(); + + if next_selections.should_keep() && next_directives_with_metadata.should_keep() { + Transformed::Keep + } else { + Transformed::Replace(Selection::InlineFragment(Arc::new(InlineFragment { + directives: next_directives_with_metadata + .replace_or_else(|| fragment.directives.clone()), + selections: next_selections.replace_or_else(|| fragment.selections.clone()), + ..fragment.clone() + }))) + } + } else { + if let Some(type_) = fragment.type_condition { + if type_.is_abstract_type() { + self.within_abstract_inline_fragment = true; + } + } + self.default_transform_inline_fragment(fragment) + }; + + self.within_abstract_inline_fragment = previous; + next_fragment + } +} + +fn add_metadata_directive( + directives: &[Directive], + path_name: StringKey, + action: RequiredAction, +) -> Vec { + let mut next_directives: Vec = Vec::with_capacity(directives.len() + 1); + next_directives.extend(directives.iter().cloned()); + next_directives.push( + RequiredMetadataDirective { + action, + path: path_name, + } + .into(), + ); + next_directives +} + +fn maybe_add_children_can_bubble_metadata_directive( + directives: &[Directive], + current_node_required_children: &StringKeyMap, +) -> TransformedValue> { + let children_can_bubble = current_node_required_children + .values() + .any(|child| child.required.action != RequiredAction::Throw); + + if !children_can_bubble { + return TransformedValue::Keep; + } + let mut next_directives: Vec = Vec::with_capacity(directives.len() + 1); + for directive in directives.iter() { + next_directives.push(directive.clone()); + } + + next_directives.push(Directive { + name: WithLocation::generated(*CHILDREN_CAN_BUBBLE_METADATA_KEY), + arguments: vec![], + data: None, + }); + TransformedValue::Replace(next_directives) +} + +// Possible @required `action` enum values ordered by severity. +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Debug, Hash)] +pub enum RequiredAction { + None, + Log, + Throw, +} + +impl From for StringKey { + fn from(val: RequiredAction) -> Self { + match val { + RequiredAction::None => *NONE_ACTION, + RequiredAction::Log => *LOG_ACTION, + RequiredAction::Throw => *THROW_ACTION, + } + } +} + +impl From for RequiredAction { + fn from(action: StringKey) -> Self { + match action { + _ if action == *THROW_ACTION => Self::Throw, + _ if action == *LOG_ACTION => Self::Log, + _ if action == *NONE_ACTION => Self::None, + // Actions that don't conform to the GraphQL schema should have been filtered out in IR validation. + _ => unreachable!(), + } + } +} + +struct RequiredDirectiveVisitor<'s> { + program: &'s Program, + visited_fragments: FragmentDefinitionNameMap, +} + +impl<'s> DirectiveFinder for RequiredDirectiveVisitor<'s> { + fn visit_directive(&self, directive: &Directive) -> bool { + directive.name.item == *REQUIRED_DIRECTIVE_NAME + } + + fn visit_fragment_spread(&mut self, fragment_spread: &graphql_ir::FragmentSpread) -> bool { + let fragment = self + .program + .fragment(fragment_spread.fragment.item) + .unwrap(); + self.visit_fragment(fragment) + } +} + +impl<'s> RequiredDirectiveVisitor<'s> { + fn visit_fragment(&mut self, fragment: &FragmentDefinition) -> bool { + if let Some(val) = self.visited_fragments.get(&fragment.name.item) { + return *val; + } + // Avoid dead loop in self-referencing fragments + self.visited_fragments.insert(fragment.name.item, false); + let result = self.find(fragment.selections.iter().collect()); + self.visited_fragments.insert(fragment.name.item, result); + result + } +} diff --git a/compiler/crates/relay-transforms/src/required_directive/mod.rs b/compiler/crates/relay-transforms/src/required_directive/mod.rs deleted file mode 100644 index 2c01cce3b4c78..0000000000000 --- a/compiler/crates/relay-transforms/src/required_directive/mod.rs +++ /dev/null @@ -1,581 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -mod requireable_field; -mod validation_message; - -use std::borrow::Cow; -use std::mem; -use std::sync::Arc; - -use common::ArgumentName; -use common::Diagnostic; -use common::DiagnosticsResult; -use common::DirectiveName; -use common::Location; -use common::NamedItem; -use common::WithLocation; -use graphql_ir::associated_data_impl; -use graphql_ir::Directive; -use graphql_ir::Field; -use graphql_ir::FragmentDefinition; -use graphql_ir::FragmentDefinitionNameMap; -use graphql_ir::InlineFragment; -use graphql_ir::LinkedField; -use graphql_ir::OperationDefinition; -use graphql_ir::Program; -use graphql_ir::ScalarField; -use graphql_ir::Selection; -use graphql_ir::Transformed; -use graphql_ir::TransformedValue; -use graphql_ir::Transformer; -use intern::string_key::Intern; -use intern::string_key::StringKey; -use intern::string_key::StringKeyMap; -use intern::Lookup; -use lazy_static::lazy_static; -use requireable_field::RequireableField; -use requireable_field::RequiredMetadata; - -use self::validation_message::ValidationMessage; -use crate::DirectiveFinder; -use crate::FragmentAliasMetadata; - -lazy_static! { - pub static ref REQUIRED_DIRECTIVE_NAME: DirectiveName = DirectiveName("required".intern()); - pub static ref ACTION_ARGUMENT: ArgumentName = ArgumentName("action".intern()); - pub static ref CHILDREN_CAN_BUBBLE_METADATA_KEY: DirectiveName = - DirectiveName("__childrenCanBubbleNull".intern()); - static ref THROW_ACTION: StringKey = "THROW".intern(); - static ref LOG_ACTION: StringKey = "LOG".intern(); - static ref NONE_ACTION: StringKey = "NONE".intern(); - static ref INLINE_DIRECTIVE_NAME: DirectiveName = DirectiveName("inline".intern()); -} - -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct RequiredMetadataDirective { - pub action: RequiredAction, - pub path: StringKey, -} -associated_data_impl!(RequiredMetadataDirective); - -pub fn required_directive(program: &Program) -> DiagnosticsResult { - let mut transform = RequiredDirective::new(program); - - let next_program = transform - .transform_program(program) - .replace_or_else(|| program.clone()); - - if transform.errors.is_empty() { - Ok(next_program) - } else { - Err(transform.errors) - } -} - -// #[derive(Clone)] -struct MaybeRequiredField { - required: Option, - field_name: WithLocation, -} - -struct RequiredField { - required: RequiredMetadata, - field_name: WithLocation, -} - -struct RequiredDirective<'s> { - program: &'s Program, - errors: Vec, - path: Vec<&'s str>, - within_abstract_inline_fragment: bool, - parent_inline_fragment_directive: Option, - path_required_map: StringKeyMap, - current_node_required_children: StringKeyMap, - required_children_map: StringKeyMap>, - required_directive_visitor: RequiredDirectiveVisitor<'s>, -} - -impl<'program> RequiredDirective<'program> { - fn new(program: &'program Program) -> Self { - Self { - program, - errors: Default::default(), - path: vec![], - within_abstract_inline_fragment: false, - parent_inline_fragment_directive: None, - path_required_map: Default::default(), - current_node_required_children: Default::default(), - required_children_map: Default::default(), - required_directive_visitor: RequiredDirectiveVisitor { - program, - visited_fragments: Default::default(), - }, - } - } - - fn reset_state(&mut self) { - self.path_required_map = Default::default(); - self.current_node_required_children = Default::default(); - self.parent_inline_fragment_directive = None; - self.required_children_map = Default::default(); - } - - fn assert_not_within_abstract_inline_fragment(&mut self, directive_location: Location) { - if self.within_abstract_inline_fragment { - self.errors.push(Diagnostic::error( - ValidationMessage::RequiredWithinAbstractInlineFragment, - // TODO(T70172661): Also referece the location of the inline fragment, once they have a location. - directive_location, - )) - } - } - - fn assert_not_within_inline_directive(&mut self, directive_location: Location) { - if let Some(location) = self.parent_inline_fragment_directive { - self.errors.push( - Diagnostic::error( - ValidationMessage::RequiredWithinInlineDirective, - directive_location, - ) - .annotate("The fragment is annotated as @inline here.", location), - ) - } - } - - fn assert_compatible_nullability(&mut self, path: StringKey, current: MaybeRequiredField) { - if let Some(previous) = self.path_required_map.get(&path) { - if let Some(previous_metadata) = &previous.required { - if let Some(current_metadata) = current.required { - if previous_metadata.action != current_metadata.action { - self.errors.push( - Diagnostic::error( - ValidationMessage::RequiredActionMismatch { - field_name: current.field_name.item, - }, - previous_metadata.action_location, - ) - .annotate( - "should be the same as the `action` declared here", - current_metadata.action_location, - ), - ) - } - } else { - self.errors.push( - Diagnostic::error( - ValidationMessage::RequiredFieldMismatch { - field_name: current.field_name.item, - }, - previous.field_name.location, - ) - .annotate("but not @required here", current.field_name.location), - ); - } - } else if current.required.is_some() { - self.errors.push( - Diagnostic::error( - ValidationMessage::RequiredFieldMismatch { - field_name: current.field_name.item, - }, - current.field_name.location, - ) - .annotate("but not @required here", previous.field_name.location), - ) - } - } else { - self.path_required_map.insert(path, current); - } - } - - fn get_required_metadata( - &mut self, - field: &T, - path_name: StringKey, - ) -> Option { - let maybe_required = match field.required_metadata() { - Err(err) => { - self.errors.push(err); - return None; - } - Ok(required) => required, - }; - - let field_name = field.name_with_location(&self.program.schema); - - if let Some(metadata) = maybe_required { - self.assert_not_within_abstract_inline_fragment(metadata.directive_location); - self.assert_not_within_inline_directive(metadata.directive_location); - self.current_node_required_children.insert( - path_name, - RequiredField { - field_name, - required: metadata, - }, - ); - } - - self.assert_compatible_nullability( - path_name, - MaybeRequiredField { - required: maybe_required, - field_name, - }, - ); - maybe_required - } - - fn assert_compatible_required_children_severity( - &mut self, - required_metadata: RequiredMetadata, - ) { - let parent_action = required_metadata.action; - for required_child in self.current_node_required_children.values() { - if required_child.required.action < parent_action { - self.errors.push( - Diagnostic::error( - ValidationMessage::RequiredFieldInvalidNesting { - suggested_action: required_child.required.action.into(), - }, - required_metadata.action_location, - ) - .annotate( - "so that it can match its parent", - required_child.required.action_location, - ), - ); - } - } - } - fn assert_compatible_required_children( - &mut self, - field: &T, - field_path: StringKey, - ) { - let previous_required_children = match self.required_children_map.get(&field_path) { - Some(it) => it, - _ => { - // We haven't seen any other instances of this field, so there's no validation to perform. - return; - } - }; - - // Check if this field has a required child field which was omitted in a previously encountered parent. - for (path, required_child) in self.current_node_required_children.iter() { - if !previous_required_children.contains_key(path) { - if let Some(other_parent) = self.path_required_map.get(&field_path) { - self.errors.push( - Diagnostic::error( - ValidationMessage::RequiredFieldMissing { - field_name: required_child.field_name.item, - }, - required_child.field_name.location, - ) - .annotate("but is missing from", other_parent.field_name.location), - ) - } else { - // We want to give a location of the other parent which is - // missing this field. We expect that we will be able to - // find it in `self.path_required_map` since it should - // contain data about every visited field in this program - // and the other parent _must_ have already been visited. - panic!("Could not find other parent node at path \"{}\".", { - field_path - }); - } - } - } - - // Check if a previous reference to this field had a required child field which we are missing. - for (path, required_child) in previous_required_children.iter() { - if !self.current_node_required_children.contains_key(path) { - self.errors.push( - Diagnostic::error( - ValidationMessage::RequiredFieldMissing { - field_name: required_child.field_name.item, - }, - required_child.field_name.location, - ) - .annotate( - "but is missing from", - field.name_with_location(&self.program.schema).location, - ), - ) - } - } - } -} - -impl<'s> Transformer for RequiredDirective<'s> { - const NAME: &'static str = "RequiredDirectiveTransform"; - const VISIT_ARGUMENTS: bool = false; - const VISIT_DIRECTIVES: bool = false; - - fn transform_fragment( - &mut self, - fragment: &FragmentDefinition, - ) -> Transformed { - if !self.required_directive_visitor.visit_fragment(fragment) { - return Transformed::Keep; - } - self.reset_state(); - self.parent_inline_fragment_directive = fragment - .directives - .named(*INLINE_DIRECTIVE_NAME) - .map(|inline_directive| inline_directive.name.location); - - let selections = self.transform_selections(&fragment.selections); - let directives = maybe_add_children_can_bubble_metadata_directive( - &fragment.directives, - &self.current_node_required_children, - ); - if selections.should_keep() && directives.should_keep() { - return Transformed::Keep; - } - Transformed::Replace(FragmentDefinition { - directives: directives.replace_or_else(|| fragment.directives.clone()), - selections: selections.replace_or_else(|| fragment.selections.clone()), - ..fragment.clone() - }) - } - - fn transform_operation( - &mut self, - operation: &OperationDefinition, - ) -> Transformed { - if !self - .required_directive_visitor - .find(operation.selections.iter().collect()) - { - return Transformed::Keep; - } - self.reset_state(); - let selections = self.transform_selections(&operation.selections); - let directives = maybe_add_children_can_bubble_metadata_directive( - &operation.directives, - &self.current_node_required_children, - ); - if selections.should_keep() && directives.should_keep() { - return Transformed::Keep; - } - Transformed::Replace(OperationDefinition { - directives: directives.replace_or_else(|| operation.directives.clone()), - selections: selections.replace_or_else(|| operation.selections.clone()), - ..operation.clone() - }) - } - - fn transform_scalar_field(&mut self, field: &ScalarField) -> Transformed { - let name = field.alias_or_name(&self.program.schema).lookup(); - self.path.push(name); - let path_name = self.path.join(".").intern(); - self.path.pop(); - - match self.get_required_metadata(field, path_name) { - None => Transformed::Keep, - Some(required_metadata) => { - Transformed::Replace(Selection::ScalarField(Arc::new(ScalarField { - directives: add_metadata_directive( - &field.directives, - path_name, - required_metadata.action, - ), - ..field.clone() - }))) - } - } - } - - fn transform_linked_field(&mut self, field: &LinkedField) -> Transformed { - let name = field.alias_or_name(&self.program.schema).lookup(); - self.path.push(name); - let path_name = self.path.join(".").intern(); - - let maybe_required_metadata = self.get_required_metadata(field, path_name); - let next_directives = match maybe_required_metadata { - Some(required_metadata) => Cow::from(add_metadata_directive( - &field.directives, - path_name, - required_metadata.action, - )), - None => Cow::from(&field.directives), - }; - - // Once we've handled our own directive, take the parent's required - // children map, leaving behind an empty/default map which our children - // can populate. - let parent_node_required_children = mem::take(&mut self.current_node_required_children); - - let previous_abstract_fragment = - mem::replace(&mut self.within_abstract_inline_fragment, false); - - let selections = self.transform_selections(&field.selections); - - self.assert_compatible_required_children(field, path_name); - if let Some(required_metadata) = maybe_required_metadata { - self.assert_compatible_required_children_severity(required_metadata); - } - - let next_directives_with_metadata = maybe_add_children_can_bubble_metadata_directive( - &next_directives, - &self.current_node_required_children, - ); - - self.within_abstract_inline_fragment = previous_abstract_fragment; - - let required_children = mem::replace( - &mut self.current_node_required_children, - parent_node_required_children, - ); - - self.required_children_map - .insert(path_name, required_children); - - self.path.pop(); - - if selections.should_keep() - && next_directives_with_metadata.should_keep() - && maybe_required_metadata.is_none() - { - Transformed::Keep - } else { - Transformed::Replace(Selection::LinkedField(Arc::new(LinkedField { - directives: next_directives_with_metadata - .replace_or_else(|| next_directives.into()), - selections: selections.replace_or_else(|| field.selections.clone()), - ..field.clone() - }))) - } - } - - fn transform_inline_fragment(&mut self, fragment: &InlineFragment) -> Transformed { - let previous = self.within_abstract_inline_fragment; - - let maybe_alias = - FragmentAliasMetadata::find(&fragment.directives).map(|metadata| metadata.alias.item); - - if let Some(alias) = maybe_alias { - self.path.push(alias.lookup()) - } else if let Some(type_) = fragment.type_condition { - if type_.is_abstract_type() { - self.within_abstract_inline_fragment = true; - } - } - - let next_fragment = self.default_transform_inline_fragment(fragment); - - if maybe_alias.is_some() { - self.path.pop(); - } - - self.within_abstract_inline_fragment = previous; - next_fragment - } -} - -fn add_metadata_directive( - directives: &[Directive], - path_name: StringKey, - action: RequiredAction, -) -> Vec { - let mut next_directives: Vec = Vec::with_capacity(directives.len() + 1); - next_directives.extend(directives.iter().cloned()); - next_directives.push( - RequiredMetadataDirective { - action, - path: path_name, - } - .into(), - ); - next_directives -} - -fn maybe_add_children_can_bubble_metadata_directive( - directives: &[Directive], - current_node_required_children: &StringKeyMap, -) -> TransformedValue> { - let children_can_bubble = current_node_required_children - .values() - .any(|child| child.required.action != RequiredAction::Throw); - - if !children_can_bubble { - return TransformedValue::Keep; - } - let mut next_directives: Vec = Vec::with_capacity(directives.len() + 1); - for directive in directives.iter() { - next_directives.push(directive.clone()); - } - - next_directives.push(Directive { - name: WithLocation::generated(*CHILDREN_CAN_BUBBLE_METADATA_KEY), - arguments: vec![], - data: None, - }); - TransformedValue::Replace(next_directives) -} - -// Possible @required `action` enum values ordered by severity. -#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Debug, Hash)] -pub enum RequiredAction { - None, - Log, - Throw, -} - -impl Into for RequiredAction { - fn into(self) -> StringKey { - match self { - RequiredAction::None => *NONE_ACTION, - RequiredAction::Log => *LOG_ACTION, - RequiredAction::Throw => *THROW_ACTION, - } - } -} - -impl From for RequiredAction { - fn from(action: StringKey) -> Self { - match action { - _ if action == *THROW_ACTION => Self::Throw, - _ if action == *LOG_ACTION => Self::Log, - _ if action == *NONE_ACTION => Self::None, - // Actions that don't conform to the GraphQL schema should have been filtered out in IR validation. - _ => unreachable!(), - } - } -} - -struct RequiredDirectiveVisitor<'s> { - program: &'s Program, - visited_fragments: FragmentDefinitionNameMap, -} - -impl<'s> DirectiveFinder for RequiredDirectiveVisitor<'s> { - fn visit_directive(&self, directive: &Directive) -> bool { - directive.name.item == *REQUIRED_DIRECTIVE_NAME - } - - fn visit_fragment_spread(&mut self, fragment_spread: &graphql_ir::FragmentSpread) -> bool { - let fragment = self - .program - .fragment(fragment_spread.fragment.item) - .unwrap(); - self.visit_fragment(fragment) - } -} - -impl<'s> RequiredDirectiveVisitor<'s> { - fn visit_fragment(&mut self, fragment: &FragmentDefinition) -> bool { - if let Some(val) = self.visited_fragments.get(&fragment.name.item) { - return *val; - } - // Avoid dead loop in self-referencing fragments - self.visited_fragments.insert(fragment.name.item, false); - let result = self.find(fragment.selections.iter().collect()); - self.visited_fragments.insert(fragment.name.item, result); - result - } -} diff --git a/compiler/crates/relay-transforms/src/required_directive/validation_message.rs b/compiler/crates/relay-transforms/src/required_directive/validation_message.rs index dbe95279f581e..76834e7e46b10 100644 --- a/compiler/crates/relay-transforms/src/required_directive/validation_message.rs +++ b/compiler/crates/relay-transforms/src/required_directive/validation_message.rs @@ -8,7 +8,8 @@ use intern::string_key::StringKey; use thiserror::Error; -#[derive(Error, Debug)] +#[derive(Error, Debug, serde::Serialize)] +#[serde(tag = "type")] pub(super) enum ValidationMessage { #[error( "Unexpected @required within inline fragment on an abstract type. At runtime we cannot know if this field is null, or if it's missing because the inline fragment did not match. Consider using `@alias` to give your inline fragment a name." diff --git a/compiler/crates/relay-transforms/src/root_variables.rs b/compiler/crates/relay-transforms/src/root_variables.rs index 0f7ffd024ae3a..9882b76ca4239 100644 --- a/compiler/crates/relay-transforms/src/root_variables.rs +++ b/compiler/crates/relay-transforms/src/root_variables.rs @@ -25,6 +25,7 @@ use schema::Schema; use schema::Type; use schema::TypeReference; +use super::RelayResolverMetadata; use crate::no_inline::NO_INLINE_DIRECTIVE_NAME; pub type VariableMap = HashMap; @@ -212,6 +213,21 @@ impl<'a, 'b> Visitor for VariablesVisitor<'a, 'b> { const VISIT_ARGUMENTS: bool = true; const VISIT_DIRECTIVES: bool = true; + fn visit_directive(&mut self, directive: &graphql_ir::Directive) { + if directive.name.item == RelayResolverMetadata::directive_name() { + if let Some(relay_resolver_metadata) = RelayResolverMetadata::from(directive) { + for arg in relay_resolver_metadata.field_arguments.iter() { + if let Value::Variable(var) = &arg.value.item { + if self.is_root_variable(var.name.item) { + self.record_root_variable_usage(&var.name, &var.type_); + } + } + } + } + } + self.default_visit_directive(directive); + } + fn visit_fragment_spread(&mut self, spread: &FragmentSpread) { self.visit_directives(&spread.directives); let fragment = self diff --git a/compiler/crates/relay-transforms/src/skip_client_extensions.rs b/compiler/crates/relay-transforms/src/skip_client_extensions.rs index 845ce18eaca3f..fa36a2c5ca59b 100644 --- a/compiler/crates/relay-transforms/src/skip_client_extensions.rs +++ b/compiler/crates/relay-transforms/src/skip_client_extensions.rs @@ -61,8 +61,7 @@ impl<'s> Transformer for SkipClientExtensionsTransform<'s> { &mut self, operation: &OperationDefinition, ) -> Transformed { - let transformed = self.default_transform_operation(operation); - transformed + self.default_transform_operation(operation) } fn transform_fragment( diff --git a/compiler/crates/relay-transforms/src/skip_redundant_nodes.rs b/compiler/crates/relay-transforms/src/skip_redundant_nodes.rs index 318f60e941f25..702a9899014d4 100644 --- a/compiler/crates/relay-transforms/src/skip_redundant_nodes.rs +++ b/compiler/crates/relay-transforms/src/skip_redundant_nodes.rs @@ -21,11 +21,12 @@ use graphql_ir::Program; use graphql_ir::Selection; use graphql_ir::Transformed; use graphql_ir::TransformedValue; +use relay_config::DeferStreamInterface; use schema::SDLSchema; use crate::util::is_relay_custom_inline_fragment_directive; +use crate::ClientEdgeMetadataDirective; use crate::RelayLocationAgnosticBehavior; -use crate::DEFER_STREAM_CONSTANTS; /** * A transform that removes redundant fields and fragment spreads. Redundancy is @@ -118,8 +119,11 @@ use crate::DEFER_STREAM_CONSTANTS; * * 1 can be skipped because it is already fetched at the outer level. */ -pub fn skip_redundant_nodes(program: &Program) -> Program { - let transform = SkipRedundantNodesTransform::new(program); +pub fn skip_redundant_nodes( + program: &Program, + defer_stream_interface: DeferStreamInterface, +) -> Program { + let transform = SkipRedundantNodesTransform::new(program, defer_stream_interface); transform .transform_program(program) .replace_or_else(|| program.clone()) @@ -133,20 +137,26 @@ type Cache = DashMap, SelectionMap)>; pub struct SkipRedundantNodesTransform { schema: Arc, cache: Cache, + defer_stream_interface: DeferStreamInterface, } -impl<'s> SkipRedundantNodesTransform { - fn new(program: &'_ Program) -> Self { +impl SkipRedundantNodesTransform { + fn new(program: &'_ Program, defer_stream_interface: DeferStreamInterface) -> Self { Self { schema: Arc::clone(&program.schema), cache: DashMap::new(), + defer_stream_interface, } } - pub fn from_schema(schema: &Arc) -> Self { + pub fn from_schema( + schema: &Arc, + defer_stream_interface: DeferStreamInterface, + ) -> Self { Self { schema: Arc::clone(schema), cache: DashMap::new(), + defer_stream_interface, } } @@ -277,7 +287,10 @@ impl<'s> SkipRedundantNodesTransform { field: &LinkedField, selection_map: &mut SelectionMap, ) -> Transformed> { - let selections = self.transform_selections(&field.selections, selection_map); + let selections = self.transform_selections( + self.get_partitioned_selections(&field.selections), + selection_map, + ); match selections { TransformedValue::Keep => Transformed::Keep, TransformedValue::Replace(selections) => { @@ -298,7 +311,10 @@ impl<'s> SkipRedundantNodesTransform { condition: &Condition, selection_map: &mut SelectionMap, ) -> Transformed> { - let selections = self.transform_selections(&condition.selections, selection_map); + let selections = self.transform_selections( + self.get_partitioned_selections(&condition.selections), + selection_map, + ); match selections { TransformedValue::Keep => Transformed::Keep, TransformedValue::Replace(selections) => { @@ -319,7 +335,19 @@ impl<'s> SkipRedundantNodesTransform { fragment: &InlineFragment, selection_map: &mut SelectionMap, ) -> Transformed> { - let selections = self.transform_selections(&fragment.selections, selection_map); + let selections = self.transform_selections( + // we must not change the order of selections within inline fragments with ClientEdgeMetadataDirective + if fragment + .directives + .named(ClientEdgeMetadataDirective::directive_name()) + .is_some() + { + Vec::from_iter(&fragment.selections) + } else { + self.get_partitioned_selections(&fragment.selections) + }, + selection_map, + ); match selections { TransformedValue::Keep => Transformed::Keep, TransformedValue::Replace(selections) => { @@ -335,10 +363,10 @@ impl<'s> SkipRedundantNodesTransform { } } - // Mostly a copy from Transformer::transform_list, but does partition and pass down `selection_map`. + // Mostly a copy from Transformer::transform_list, but with `selection_map` passed down fn transform_selections( &self, - selections: &[Selection], + selections: Vec<&Selection>, selection_map: &mut SelectionMap, ) -> TransformedValue> { if selections.is_empty() { @@ -346,7 +374,6 @@ impl<'s> SkipRedundantNodesTransform { } let mut result: Vec = Vec::new(); let mut has_changes = false; - let selections = get_partitioned_selections(selections); for (index, prev_item) in selections.iter().enumerate() { let next_item = self.transform_selection(prev_item, selection_map); @@ -389,7 +416,10 @@ impl<'s> SkipRedundantNodesTransform { operation: &OperationDefinition, ) -> Transformed { let mut selection_map = Default::default(); - let selections = self.transform_selections(&operation.selections, &mut selection_map); + let selections = self.transform_selections( + self.get_partitioned_selections(&operation.selections), + &mut selection_map, + ); match selections { TransformedValue::Keep => Transformed::Keep, TransformedValue::Replace(selections) => Transformed::Replace(OperationDefinition { @@ -404,7 +434,10 @@ impl<'s> SkipRedundantNodesTransform { fragment: &FragmentDefinition, ) -> Transformed { let mut selection_map = Default::default(); - let selections = self.transform_selections(&fragment.selections, &mut selection_map); + let selections = self.transform_selections( + self.get_partitioned_selections(&fragment.selections), + &mut selection_map, + ); match selections { TransformedValue::Keep => Transformed::Keep, TransformedValue::Replace(selections) => Transformed::Replace(FragmentDefinition { @@ -438,41 +471,41 @@ impl<'s> SkipRedundantNodesTransform { } TransformedValue::Replace(next_program) } -} -/* Selections are sorted with fields first, "conditionals" - * (inline fragments & conditions) last. This means that all fields that are - * guaranteed to be fetched are encountered prior to any duplicates that may be - * fetched within a conditional. - */ -fn get_partitioned_selections(selections: &[Selection]) -> Vec<&Selection> { - let mut result = Vec::with_capacity(selections.len()); - unsafe { result.set_len(selections.len()) }; - let mut non_field_index = selections - .iter() - .filter(|sel| is_selection_linked_or_scalar(sel)) - .count(); - let mut field_index = 0; - for sel in selections.iter() { - if is_selection_linked_or_scalar(sel) { - result[field_index] = sel; - field_index += 1; - } else { - result[non_field_index] = sel; - non_field_index += 1; + /* Selections are sorted with fields first, "conditionals" + * (inline fragments & conditions) last. This means that all fields that are + * guaranteed to be fetched are encountered prior to any duplicates that may be + * fetched within a conditional. + */ + fn get_partitioned_selections<'a>(&self, selections: &'a [Selection]) -> Vec<&'a Selection> { + let mut result = Vec::with_capacity(selections.len()); + unsafe { result.set_len(selections.len()) }; + let mut non_field_index = selections + .iter() + .filter(|sel| self.is_selection_linked_or_scalar(sel)) + .count(); + let mut field_index = 0; + for sel in selections.iter() { + if self.is_selection_linked_or_scalar(sel) { + result[field_index] = sel; + field_index += 1; + } else { + result[non_field_index] = sel; + non_field_index += 1; + } } + result } - result -} -fn is_selection_linked_or_scalar(selection: &Selection) -> bool { - match selection { - Selection::LinkedField(field) => field - .directives - .named(DEFER_STREAM_CONSTANTS.stream_name) - .is_none(), - Selection::ScalarField(_) => true, - _ => false, + fn is_selection_linked_or_scalar(&self, selection: &Selection) -> bool { + match selection { + Selection::LinkedField(field) => field + .directives + .named(self.defer_stream_interface.stream_name) + .is_none(), + Selection::ScalarField(_) => true, + _ => false, + } } } diff --git a/compiler/crates/relay-transforms/src/skip_unreachable_node.rs b/compiler/crates/relay-transforms/src/skip_unreachable_node.rs index fdb2ba1adfc83..8c776df688baf 100644 --- a/compiler/crates/relay-transforms/src/skip_unreachable_node.rs +++ b/compiler/crates/relay-transforms/src/skip_unreachable_node.rs @@ -28,9 +28,9 @@ use graphql_ir::TransformedValue; use graphql_ir::Transformer; use graphql_ir::Value; use intern::string_key::StringKey; +use relay_config::DeferStreamInterface; use thiserror::Error; -use super::defer_stream::DEFER_STREAM_CONSTANTS; use crate::DeferDirective; use crate::NoInlineFragmentSpreadMetadata; use crate::StreamDirective; @@ -40,10 +40,13 @@ enum ValidationMode { Loose, } -pub fn skip_unreachable_node_strict(program: &Program) -> DiagnosticsResult { +pub fn skip_unreachable_node_strict( + program: &Program, + defer_stream_interface: DeferStreamInterface, +) -> DiagnosticsResult { let errors = vec![]; let mut validation_mode = ValidationMode::Strict(errors); - let next_program = skip_unreachable_node(program, &mut validation_mode); + let next_program = skip_unreachable_node(program, &mut validation_mode, defer_stream_interface); if let ValidationMode::Strict(errors) = validation_mode { if !errors.is_empty() { @@ -53,14 +56,21 @@ pub fn skip_unreachable_node_strict(program: &Program) -> DiagnosticsResult Program { +pub fn skip_unreachable_node_loose( + program: &Program, + defer_stream_interface: DeferStreamInterface, +) -> Program { let mut validation_mode = ValidationMode::Loose; - skip_unreachable_node(program, &mut validation_mode) + skip_unreachable_node(program, &mut validation_mode, defer_stream_interface) } -fn skip_unreachable_node(program: &Program, validation_mode: &mut ValidationMode) -> Program { +fn skip_unreachable_node( + program: &Program, + validation_mode: &mut ValidationMode, + defer_stream_interface: DeferStreamInterface, +) -> Program { let mut skip_unreachable_node_transform = - SkipUnreachableNodeTransform::new(program, validation_mode); + SkipUnreachableNodeTransform::new(program, validation_mode, defer_stream_interface); let transformed = skip_unreachable_node_transform.transform_program(program); transformed.replace_or_else(|| program.clone()) @@ -73,6 +83,7 @@ pub struct SkipUnreachableNodeTransform<'s> { visited_fragments: VisitedFragments, program: &'s Program, validation_mode: &'s mut ValidationMode, + defer_stream_interface: DeferStreamInterface, } impl<'s> Transformer for SkipUnreachableNodeTransform<'s> { @@ -213,8 +224,13 @@ impl<'s> Transformer for SkipUnreachableNodeTransform<'s> { fn transform_linked_field(&mut self, field: &LinkedField) -> Transformed { let transformed_field = self.default_transform_linked_field(field); - if let Some(directive) = field.directives.named(DEFER_STREAM_CONSTANTS.stream_name) { - if let Some(if_arg) = StreamDirective::from(directive).if_arg { + if let Some(directive) = field + .directives + .named(self.defer_stream_interface.stream_name) + { + if let Some(if_arg) = + StreamDirective::from(directive, &self.defer_stream_interface).if_arg + { if let Value::Constant(ConstantValue::Boolean(false)) = &if_arg.value.item { let mut next_field = match transformed_field { Transformed::Delete => return Transformed::Delete, @@ -228,7 +244,7 @@ impl<'s> Transformer for SkipUnreachableNodeTransform<'s> { Arc::make_mut(&mut next_field) .directives .retain(|directive| { - directive.name.item != DEFER_STREAM_CONSTANTS.stream_name + directive.name.item != self.defer_stream_interface.stream_name }); assert_eq!( previous_directive_len, @@ -244,11 +260,16 @@ impl<'s> Transformer for SkipUnreachableNodeTransform<'s> { } impl<'s> SkipUnreachableNodeTransform<'s> { - fn new(program: &'s Program, validation_mode: &'s mut ValidationMode) -> Self { + fn new( + program: &'s Program, + validation_mode: &'s mut ValidationMode, + defer_stream_interface: DeferStreamInterface, + ) -> Self { Self { visited_fragments: Default::default(), program, validation_mode, + defer_stream_interface, } } @@ -292,10 +313,12 @@ impl<'s> SkipUnreachableNodeTransform<'s> { ) -> TransformedMulti { if let Some(directive) = inline_fragment .directives - .named(DEFER_STREAM_CONSTANTS.defer_name) + .named(self.defer_stream_interface.defer_name) { assert!(inline_fragment.directives.len() == 1); - if let Some(if_arg) = DeferDirective::from(directive).if_arg { + if let Some(if_arg) = + DeferDirective::from(directive, &self.defer_stream_interface).if_arg + { if let Value::Constant(ConstantValue::Boolean(false)) = &if_arg.value.item { return TransformedMulti::ReplaceMultiple( self.transform_selections(&inline_fragment.selections) @@ -338,7 +361,18 @@ impl<'s> SkipUnreachableNodeTransform<'s> { } } -#[derive(Clone, Debug, Error, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +#[serde(tag = "type")] enum ValidationMessage { #[error( "After applying transforms to the {document} `{name}` selections of \ diff --git a/compiler/crates/relay-transforms/src/transform_connections.rs b/compiler/crates/relay-transforms/src/transform_connections.rs index 733e70e5734aa..6413f680b3d44 100644 --- a/compiler/crates/relay-transforms/src/transform_connections.rs +++ b/compiler/crates/relay-transforms/src/transform_connections.rs @@ -25,6 +25,7 @@ use graphql_ir::Value; use intern::string_key::Intern; use intern::string_key::StringKey; use intern::Lookup; +use relay_config::DeferStreamInterface; use schema::Schema; use crate::connections::assert_connection_selections; @@ -37,15 +38,16 @@ use crate::connections::ConnectionConstants; use crate::connections::ConnectionInterface; use crate::connections::ConnectionMetadata; use crate::connections::ConnectionMetadataDirective; -use crate::defer_stream::DEFER_STREAM_CONSTANTS; use crate::handle_fields::build_handle_field_directive_from_connection_directive; use crate::handle_fields::KEY_ARG_NAME; pub fn transform_connections( program: &Program, connection_interface: &ConnectionInterface, + defer_stream_interface: &DeferStreamInterface, ) -> Program { - let mut transform = ConnectionTransform::new(program, connection_interface); + let mut transform = + ConnectionTransform::new(program, connection_interface, defer_stream_interface); transform .transform_program(program) .replace_or_else(|| program.clone()) @@ -58,10 +60,15 @@ struct ConnectionTransform<'s> { current_connection_metadata: Vec, current_document_name: StringKey, program: &'s Program, + defer_stream_interface: &'s DeferStreamInterface, } impl<'s> ConnectionTransform<'s> { - fn new(program: &'s Program, connection_interface: &'s ConnectionInterface) -> Self { + fn new( + program: &'s Program, + connection_interface: &'s ConnectionInterface, + defer_stream_interface: &'s DeferStreamInterface, + ) -> Self { Self { connection_constants: ConnectionConstants::default(), connection_interface, @@ -69,6 +76,7 @@ impl<'s> ConnectionTransform<'s> { current_document_name: connection_interface.cursor, // Set an arbitrary value to avoid Option current_connection_metadata: Vec::new(), program, + defer_stream_interface, } } @@ -132,16 +140,43 @@ impl<'s> ConnectionTransform<'s> { if is_stream_connection { let mut arguments = vec![]; for arg in &connection_directive.arguments { - if arg.name.item == DEFER_STREAM_CONSTANTS.if_arg - || arg.name.item == DEFER_STREAM_CONSTANTS.initial_count_arg - || arg.name.item == DEFER_STREAM_CONSTANTS.use_customized_batch_arg + if arg.name.item == self.connection_constants.stream_connection_if_arg { + arguments.push(Argument { + name: WithLocation::new( + arg.name.location, + self.defer_stream_interface.if_arg, + ), + value: arg.value.clone(), + }); + } else if arg.name.item + == self + .connection_constants + .stream_connection_initial_count_arg + { + arguments.push(Argument { + name: WithLocation::new( + arg.name.location, + self.defer_stream_interface.initial_count_arg, + ), + value: arg.value.clone(), + }); + } else if arg.name.item + == self + .connection_constants + .stream_connection_use_customized_batch_arg { - arguments.push(arg.clone()); + arguments.push(Argument { + name: WithLocation::new( + arg.name.location, + self.defer_stream_interface.use_customized_batch_arg, + ), + value: arg.value.clone(), + }); } else if arg.name.item == *KEY_ARG_NAME { arguments.push(Argument { name: WithLocation::new( arg.name.location, - DEFER_STREAM_CONSTANTS.label_arg, + self.defer_stream_interface.label_arg, ), value: arg.value.clone(), }); @@ -150,7 +185,7 @@ impl<'s> ConnectionTransform<'s> { transformed_edges_field.directives.push(Directive { name: WithLocation::new( connection_directive.name.location, - DEFER_STREAM_CONSTANTS.stream_name, + self.defer_stream_interface.stream_name, ), arguments, data: None, @@ -218,7 +253,7 @@ impl<'s> ConnectionTransform<'s> { arguments.push(Argument { name: WithLocation::new( key_arg.name.location, - DEFER_STREAM_CONSTANTS.label_arg, + self.defer_stream_interface.label_arg, ), value: WithLocation::new( key_arg.value.location, @@ -234,8 +269,16 @@ impl<'s> ConnectionTransform<'s> { ), }); } - if let Some(if_arg) = connection_args.named(DEFER_STREAM_CONSTANTS.if_arg) { - arguments.push(if_arg.clone()); + if let Some(if_arg) = + connection_args.named(self.connection_constants.stream_connection_if_arg) + { + arguments.push(Argument { + name: WithLocation::new( + if_arg.name.location, + self.defer_stream_interface.if_arg, + ), + value: if_arg.value.clone(), + }) } Selection::InlineFragment(Arc::new(InlineFragment { type_condition: None, @@ -245,7 +288,7 @@ impl<'s> ConnectionTransform<'s> { directives: vec![Directive { name: WithLocation::new( connection_directive.name.location, - DEFER_STREAM_CONSTANTS.defer_name, + self.defer_stream_interface.defer_name, ), arguments, data: None, @@ -300,9 +343,8 @@ impl<'s> ConnectionTransform<'s> { let mut next_directives = connection_field .directives .iter() + .filter(|&directive| directive != connection_directive) .cloned() - // Remove the original @connection directive - .filter(|directive| directive != connection_directive) .collect::>(); // Add an internal (untyped) directive to pass down the connection handle diff --git a/compiler/crates/relay-transforms/src/unwrap_custom_directive_selection.rs b/compiler/crates/relay-transforms/src/unwrap_custom_directive_selection.rs index e698fd15256c8..e9722ba92d70e 100644 --- a/compiler/crates/relay-transforms/src/unwrap_custom_directive_selection.rs +++ b/compiler/crates/relay-transforms/src/unwrap_custom_directive_selection.rs @@ -15,19 +15,31 @@ use graphql_ir::Program; use graphql_ir::Selection; use graphql_ir::Transformed; use graphql_ir::Transformer; - -use crate::DEFER_STREAM_CONSTANTS; +use relay_config::DeferStreamInterface; /// Transform to unwrap selections wrapped in a InlineFragment with custom /// directive for printing -pub fn unwrap_custom_directive_selection(program: &Program) -> Program { - let mut transform = UnwrapCustomDirectiveSelection; +pub fn unwrap_custom_directive_selection( + program: &Program, + defer_stream_interface: DeferStreamInterface, +) -> Program { + let mut transform = UnwrapCustomDirectiveSelection::new(defer_stream_interface); transform .transform_program(program) .replace_or_else(|| program.clone()) } -struct UnwrapCustomDirectiveSelection; +struct UnwrapCustomDirectiveSelection { + defer_stream_interface: DeferStreamInterface, +} + +impl UnwrapCustomDirectiveSelection { + fn new(defer_stream_interface: DeferStreamInterface) -> Self { + Self { + defer_stream_interface, + } + } +} impl Transformer for UnwrapCustomDirectiveSelection { const NAME: &'static str = "UnwrapCustomDirectiveSelection"; @@ -37,7 +49,9 @@ impl Transformer for UnwrapCustomDirectiveSelection { fn transform_inline_fragment(&mut self, fragment: &InlineFragment) -> Transformed { if fragment.type_condition.is_none() { // Remove the wrapping `... @defer` for `@defer` on fragment spreads. - let defer = fragment.directives.named(DEFER_STREAM_CONSTANTS.defer_name); + let defer = fragment + .directives + .named(self.defer_stream_interface.defer_name); if let Some(defer) = defer { if let Selection::FragmentSpread(frag_spread) = &fragment.selections[0] { return Transformed::Replace(Selection::FragmentSpread(Arc::new( diff --git a/compiler/crates/relay-transforms/src/util.rs b/compiler/crates/relay-transforms/src/util.rs index ba6ff26fdb96f..896b7c0de1687 100644 --- a/compiler/crates/relay-transforms/src/util.rs +++ b/compiler/crates/relay-transforms/src/util.rs @@ -23,11 +23,11 @@ use schema::SDLSchema; use schema::Schema; use schema::Type; +use crate::catch_directive::CATCH_DIRECTIVE_NAME; use crate::client_extensions::CLIENT_EXTENSION_DIRECTIVE_NAME; use crate::connections::ConnectionMetadataDirective; use crate::handle_fields::HANDLE_FIELD_DIRECTIVE_NAME; use crate::inline_data_fragment::InlineDirectiveMetadata; -use crate::react_flight::REACT_FLIGHT_SCALAR_FLIGHT_FIELD_METADATA_KEY; use crate::refetchable_fragment::RefetchableMetadata; use crate::relay_actor_change::RELAY_ACTOR_CHANGE_DIRECTIVE_FOR_CODEGEN; use crate::required_directive::CHILDREN_CAN_BUBBLE_METADATA_KEY; @@ -36,9 +36,7 @@ use crate::ClientEdgeGeneratedQueryMetadataDirective; use crate::ClientEdgeMetadataDirective; use crate::FragmentAliasMetadata; use crate::ModuleMetadata; -use crate::ReactFlightLocalComponentsMetadata; use crate::RefetchableDerivedFromMetadata; -use crate::RelayClientComponentMetadata; use crate::RelayResolverMetadata; use crate::RequiredMetadataDirective; use crate::DIRECTIVE_SPLIT_OPERATION; @@ -87,7 +85,8 @@ pub fn extract_variable_name(argument: Option<&Argument>) -> Option { } lazy_static! { - static ref CUSTOM_METADATA_DIRECTIVES: [DirectiveName; 22] = [ + static ref CUSTOM_METADATA_DIRECTIVES: [DirectiveName; 20] = [ + *CATCH_DIRECTIVE_NAME, *CLIENT_EXTENSION_DIRECTIVE_NAME, ConnectionMetadataDirective::directive_name(), *HANDLE_FIELD_DIRECTIVE_NAME, @@ -97,21 +96,19 @@ lazy_static! { RefetchableDerivedFromMetadata::directive_name(), *INTERNAL_METADATA_DIRECTIVE, *ARGUMENT_DEFINITION, - *REACT_FLIGHT_SCALAR_FLIGHT_FIELD_METADATA_KEY, - ReactFlightLocalComponentsMetadata::directive_name(), *REQUIRED_DIRECTIVE_NAME, RequiredMetadataDirective::directive_name(), ClientEdgeMetadataDirective::directive_name(), ClientEdgeGeneratedQueryMetadataDirective::directive_name(), *CHILDREN_CAN_BUBBLE_METADATA_KEY, RelayResolverMetadata::directive_name(), - RelayClientComponentMetadata::directive_name(), *UNUSED_LOCAL_VARIABLE_DEPRECATED, *RELAY_ACTOR_CHANGE_DIRECTIVE_FOR_CODEGEN, ProvidedVariableMetadata::directive_name(), FragmentAliasMetadata::directive_name(), ]; - static ref DIRECTIVES_SKIPPED_IN_NODE_IDENTIFIER: [DirectiveName; 11] = [ + static ref DIRECTIVES_SKIPPED_IN_NODE_IDENTIFIER: [DirectiveName; 9] = [ + *CATCH_DIRECTIVE_NAME, *CLIENT_EXTENSION_DIRECTIVE_NAME, ConnectionMetadataDirective::directive_name(), *HANDLE_FIELD_DIRECTIVE_NAME, @@ -119,10 +116,7 @@ lazy_static! { RefetchableDerivedFromMetadata::directive_name(), *INTERNAL_METADATA_DIRECTIVE, *ARGUMENT_DEFINITION, - *REACT_FLIGHT_SCALAR_FLIGHT_FIELD_METADATA_KEY, - ReactFlightLocalComponentsMetadata::directive_name(), *REQUIRED_DIRECTIVE_NAME, - RelayClientComponentMetadata::directive_name(), ]; static ref RELAY_CUSTOM_INLINE_FRAGMENT_DIRECTIVES: [DirectiveName; 8] = [ *CLIENT_EXTENSION_DIRECTIVE_NAME, diff --git a/compiler/crates/relay-transforms/src/validate_operation_variables.rs b/compiler/crates/relay-transforms/src/validate_operation_variables.rs index f8aa47d3fd796..89e533d97926b 100644 --- a/compiler/crates/relay-transforms/src/validate_operation_variables.rs +++ b/compiler/crates/relay-transforms/src/validate_operation_variables.rs @@ -82,8 +82,9 @@ impl<'s> Transformer for ValidateOperationVariables<'s> { definition .default_value .as_ref() - .map(|default_value| default_value.location) - .unwrap_or(definition.name.location), + .map_or(definition.name.location, |default_value| { + default_value.location + }), )); continue; } diff --git a/compiler/crates/relay-transforms/src/validations.rs b/compiler/crates/relay-transforms/src/validations.rs new file mode 100644 index 0000000000000..e0ae3f13be8ae --- /dev/null +++ b/compiler/crates/relay-transforms/src/validations.rs @@ -0,0 +1,52 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +mod deprecated_fields; +mod disallow_circular_no_inline_fragments; +mod disallow_non_node_id_fields; +mod disallow_readtime_features_in_mutations; +mod disallow_required_on_non_null_field; +mod disallow_reserved_aliases; +mod disallow_typename_on_root; +mod validate_connections; +mod validate_global_variable_names; +mod validate_global_variables; +mod validate_module_names; +mod validate_no_double_underscore_alias; +mod validate_no_inline_with_raw_response_type; +mod validate_no_unselectable_selections; +mod validate_relay_directives; +mod validate_required_arguments; +mod validate_resolver_fragments; +mod validate_server_only_directives; +mod validate_static_args; +mod validate_unused_fragment_variables; +mod validate_unused_variables; + +pub use deprecated_fields::deprecated_fields; +pub use deprecated_fields::deprecated_fields_for_executable_definition; +pub use disallow_circular_no_inline_fragments::disallow_circular_no_inline_fragments; +pub use disallow_non_node_id_fields::disallow_non_node_id_fields; +pub use disallow_readtime_features_in_mutations::disallow_readtime_features_in_mutations; +pub use disallow_required_on_non_null_field::disallow_required_on_non_null_field; +pub use disallow_reserved_aliases::disallow_reserved_aliases; +pub use disallow_typename_on_root::disallow_typename_on_root; +pub use validate_connections::validate_connections; +pub use validate_global_variable_names::validate_global_variable_names; +pub use validate_global_variables::validate_global_variables; +pub use validate_module_names::extract_module_name; +pub use validate_module_names::validate_module_names; +pub use validate_no_double_underscore_alias::validate_no_double_underscore_alias; +pub use validate_no_inline_with_raw_response_type::validate_no_inline_fragments_with_raw_response_type; +pub use validate_no_unselectable_selections::validate_no_unselectable_selections; +pub use validate_relay_directives::validate_relay_directives; +pub use validate_required_arguments::validate_required_arguments; +pub use validate_resolver_fragments::validate_resolver_fragments; +pub use validate_server_only_directives::validate_server_only_directives; +pub use validate_static_args::validate_static_args; +pub use validate_unused_fragment_variables::validate_unused_fragment_variables; +pub use validate_unused_variables::validate_unused_variables; diff --git a/compiler/crates/relay-transforms/src/validations/deprecated_fields.rs b/compiler/crates/relay-transforms/src/validations/deprecated_fields.rs index 84190f1952e0e..571e9a2bc3881 100644 --- a/compiler/crates/relay-transforms/src/validations/deprecated_fields.rs +++ b/compiler/crates/relay-transforms/src/validations/deprecated_fields.rs @@ -18,12 +18,15 @@ use graphql_ir::LinkedField; use graphql_ir::Program; use graphql_ir::ScalarField; use graphql_ir::ValidationMessage; +use graphql_ir::ValidationMessageWithData; use graphql_ir::Validator; use graphql_ir::Value; use schema::FieldID; use schema::SDLSchema; use schema::Schema; +use crate::fragment_alias_directive::FRAGMENT_DANGEROUSLY_UNALIAS_DIRECTIVE_NAME; + pub fn deprecated_fields( schema: &Arc, program: &Program, @@ -128,6 +131,17 @@ impl<'a> Validator for DeprecatedFields<'a> { fn validate_directive(&mut self, directive: &Directive) -> DiagnosticsResult<()> { if let Some(directive_definition) = self.schema.get_directive(directive.name.item) { + // GraphQL does not support @deprecated on directive definitions, + // but there are some directives that Relay exposes as escape + // hatches which we would like to render as struckthrough in order + // to indicate that they should be avoided or migrated to some other pattern. + if directive_definition.name.item == *FRAGMENT_DANGEROUSLY_UNALIAS_DIRECTIVE_NAME { + self.warnings.push(Diagnostic::hint_with_data( + ValidationMessageWithData::DeprecatedDangerouslyUnaliasedDirective, + directive.name.location, + vec![DiagnosticTag::DEPRECATED], + )); + } for arg in &directive.arguments { if let Some(arg_definition) = directive_definition.arguments.named(arg.name.item) { if let Some(deprecation) = arg_definition.deprecated() { diff --git a/compiler/crates/relay-transforms/src/validations/disallow_circular_no_inline_fragments.rs b/compiler/crates/relay-transforms/src/validations/disallow_circular_no_inline_fragments.rs index 766fbf5489ff6..278509f486d15 100644 --- a/compiler/crates/relay-transforms/src/validations/disallow_circular_no_inline_fragments.rs +++ b/compiler/crates/relay-transforms/src/validations/disallow_circular_no_inline_fragments.rs @@ -87,7 +87,8 @@ impl Validator for DisallowCircularNoInlineFragments<'_> { } } -#[derive(Debug, Error)] +#[derive(Debug, Error, serde::Serialize)] +#[serde(tag = "type")] enum ValidationMessage { #[error("Found a circular reference from fragment '{fragment_name}'.")] CircularFragmentReference { diff --git a/compiler/crates/relay-transforms/src/validations/disallow_readtime_features_in_mutations.rs b/compiler/crates/relay-transforms/src/validations/disallow_readtime_features_in_mutations.rs new file mode 100644 index 0000000000000..795cdb250fff4 --- /dev/null +++ b/compiler/crates/relay-transforms/src/validations/disallow_readtime_features_in_mutations.rs @@ -0,0 +1,153 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::Diagnostic; +use common::DiagnosticsResult; +use common::FeatureFlag; +use common::NamedItem; +use docblock_shared::RELAY_RESOLVER_DIRECTIVE_NAME; +use graphql_ir::ConstantValue; +use graphql_ir::Field; +use graphql_ir::FragmentDefinition; +use graphql_ir::FragmentSpread; +use graphql_ir::LinkedField; +use graphql_ir::OperationDefinition; +use graphql_ir::Program; +use graphql_ir::ScalarField; +use graphql_ir::ValidationMessage; +use graphql_ir::Validator; +use schema::Schema; + +use crate::ACTION_ARGUMENT; +use crate::REQUIRED_DIRECTIVE_NAME; +use crate::THROW_ACTION; + +/// Some Relay features will cause a field to throw or suspend at read time. +/// These behaviors are incompatible with our mutation APIs. +/// This validator checks that no such features are used in mutations. +pub fn disallow_readtime_features_in_mutations( + program: &Program, + allow_resolvers_mutation_response: &FeatureFlag, + allow_required_in_mutation_response: &FeatureFlag, + enable_relay_resolver_mutations: bool, +) -> DiagnosticsResult<()> { + let mut validator = DisallowReadtimeFeaturesInMutations::new( + program, + allow_resolvers_mutation_response.clone(), + allow_required_in_mutation_response.clone(), + enable_relay_resolver_mutations, + ); + validator.validate_program(program) +} + +struct DisallowReadtimeFeaturesInMutations<'program> { + program: &'program Program, + allow_resolvers_mutation_response: FeatureFlag, + allow_required_in_mutation_response: FeatureFlag, + enable_relay_resolver_mutations: bool, + allow_resolvers_for_this_mutation: bool, + allow_required_for_this_mutation: bool, +} + +impl<'program> DisallowReadtimeFeaturesInMutations<'program> { + fn new( + program: &'program Program, + allow_resolvers_mutation_response: FeatureFlag, + allow_required_in_mutation_response: FeatureFlag, + enable_relay_resolver_mutations: bool, + ) -> Self { + Self { + program, + allow_resolvers_mutation_response, + allow_required_in_mutation_response, + enable_relay_resolver_mutations, + allow_resolvers_for_this_mutation: false, + allow_required_for_this_mutation: false, + } + } + + fn validate_field(&self, field: &impl Field) -> DiagnosticsResult<()> { + if !self.allow_required_for_this_mutation { + if let Some(directive) = field.directives().named(*REQUIRED_DIRECTIVE_NAME) { + let action = directive + .arguments + .named(*ACTION_ARGUMENT) + .and_then(|arg| arg.value.item.get_constant()); + if let Some(ConstantValue::Enum(action)) = action { + if *action == *THROW_ACTION { + return Err(vec![Diagnostic::error( + ValidationMessage::RequiredInMutation, + directive.name.location, + )]); + } + } + } + } + if !self.allow_resolvers_for_this_mutation + && self + .program + .schema + .field(field.definition().item) + .directives + .named(*RELAY_RESOLVER_DIRECTIVE_NAME) + .is_some() + { + return Err(vec![Diagnostic::error( + ValidationMessage::ResolverInMutation, + field.alias_or_name_location(), + )]); + } + + Ok(()) + } +} + +impl Validator for DisallowReadtimeFeaturesInMutations<'_> { + const NAME: &'static str = "disallow_readtime_features_in_mutations"; + const VALIDATE_ARGUMENTS: bool = false; + const VALIDATE_DIRECTIVES: bool = false; + + fn validate_operation(&mut self, operation: &OperationDefinition) -> DiagnosticsResult<()> { + if !operation.is_mutation() { + // No need to traverse into non-mutation operations + return Ok(()); + } + self.allow_resolvers_for_this_mutation = self.enable_relay_resolver_mutations + || self + .allow_resolvers_mutation_response + .is_enabled_for(operation.name.item.0); + self.allow_required_for_this_mutation = self + .allow_required_in_mutation_response + .is_enabled_for(operation.name.item.0); + let result = self.default_validate_operation(operation); + + // Reset state + self.allow_resolvers_for_this_mutation = false; + self.allow_required_for_this_mutation = false; + + result + } + + fn validate_fragment(&mut self, _fragment: &FragmentDefinition) -> DiagnosticsResult<()> { + // We only care about mutations + Ok(()) + } + + fn validate_fragment_spread(&mut self, _spread: &FragmentSpread) -> DiagnosticsResult<()> { + // Values nested within fragment spreads are fine since they are not read as part of the + // mutation response. + Ok(()) + } + + fn validate_scalar_field(&mut self, field: &ScalarField) -> DiagnosticsResult<()> { + self.validate_field(field) + } + fn validate_linked_field(&mut self, field: &LinkedField) -> DiagnosticsResult<()> { + self.validate_field(field)?; + self.default_validate_linked_field(field) + } +} diff --git a/compiler/crates/relay-transforms/src/validations/disallow_required_on_non_null_field.rs b/compiler/crates/relay-transforms/src/validations/disallow_required_on_non_null_field.rs new file mode 100644 index 0000000000000..8b6067d32a7ba --- /dev/null +++ b/compiler/crates/relay-transforms/src/validations/disallow_required_on_non_null_field.rs @@ -0,0 +1,181 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use ::errors::try_all; +use common::Diagnostic; +use common::DiagnosticsResult; +use common::DirectiveName; +use common::NamedItem; +use graphql_ir::reexport::Intern; +use graphql_ir::Field; +use graphql_ir::FragmentDefinition; +use graphql_ir::Program; +use graphql_ir::Selection; +use graphql_ir::ValidationMessage; +use graphql_ir::Validator; +use lazy_static::lazy_static; +use schema::Schema; + +use crate::ValidationMessageWithData; +use crate::REQUIRED_DIRECTIVE_NAME; + +lazy_static! { + static ref SEMANTIC_NON_NULL_DIRECTIVE: DirectiveName = + DirectiveName("semanticNonNull".intern()); + static ref THROW_ON_FIELD_ERROR_DIRECTIVE: DirectiveName = + DirectiveName("throwOnFieldError".intern()); +} + +pub fn disallow_required_on_non_null_field( + program: &Program, + is_no_required_on_non_null: bool, + experimental_emit_semantic_nullability_types: bool, +) -> DiagnosticsResult<()> { + let mut validator = DisallowRequiredOnNonNullField::new( + program, + is_no_required_on_non_null, + experimental_emit_semantic_nullability_types, + ); + validator.validate_program(program) +} + +struct DisallowRequiredOnNonNullField<'program> { + program: &'program Program, + is_no_required_on_non_null: bool, + experimental_emit_semantic_nullability_types: bool, +} + +impl<'program> DisallowRequiredOnNonNullField<'program> { + fn new( + program: &'program Program, + is_no_required_on_non_null: bool, + experimental_emit_semantic_nullability_types: bool, + ) -> Self { + Self { + program, + is_no_required_on_non_null, + experimental_emit_semantic_nullability_types, + } + } + + fn validate_required_field( + &self, + field: &Arc, + is_throw_on_field_error: bool, + ) -> DiagnosticsResult<()> { + if self.is_no_required_on_non_null + && field.directives().named(*REQUIRED_DIRECTIVE_NAME).is_some() + && self + .program + .schema + .field(field.definition().item) + .type_ + .is_non_null() + { + // @required on a non-null (!) field is an error. + return Err(vec![Diagnostic::error_with_data( + ValidationMessageWithData::RequiredOnNonNull, + field + .directives() + .named(*REQUIRED_DIRECTIVE_NAME) + .unwrap() + .name + .location, + )]); + } + + if is_throw_on_field_error + && field.directives().named(*REQUIRED_DIRECTIVE_NAME).is_some() + && (self + .program + .schema + .field(field.definition().item) + .directives + .named(*SEMANTIC_NON_NULL_DIRECTIVE) + .is_some()) + { + // @required on a semantically-non-null field is an error. + return Err(vec![Diagnostic::error( + ValidationMessageWithData::RequiredOnSemanticNonNull, + field + .directives() + .named(*REQUIRED_DIRECTIVE_NAME) + .unwrap() + .name + .location, + )]); + } + + Ok(()) + } + + fn validate_selection_fields( + &self, + selections: &[Selection], + is_throw_on_field_error: bool, + ) -> DiagnosticsResult<()> { + try_all(selections.iter().map(|selection| match selection { + Selection::LinkedField(linked_field) => { + self.validate_required_field(linked_field, is_throw_on_field_error)?; + self.validate_selection_fields(&linked_field.selections, is_throw_on_field_error) + } + Selection::ScalarField(scalar_field) => { + self.validate_required_field(scalar_field, is_throw_on_field_error) + } + _ => Ok(()), + }))?; + Ok(()) + } +} +impl Validator for DisallowRequiredOnNonNullField<'_> { + const NAME: &'static str = "disallow_required_on_non_null_field"; + const VALIDATE_ARGUMENTS: bool = false; + const VALIDATE_DIRECTIVES: bool = false; + + fn validate_fragment(&mut self, fragment: &FragmentDefinition) -> DiagnosticsResult<()> { + let throw_on_field_error_directive = + fragment.directives.named(*THROW_ON_FIELD_ERROR_DIRECTIVE); + + if !self.experimental_emit_semantic_nullability_types + && throw_on_field_error_directive.is_some() + { + return Err(vec![Diagnostic::error( + ValidationMessage::ThrowOnFieldErrorNotEnabled, + throw_on_field_error_directive.unwrap().name.location, + )]); + } + + self.validate_selection_fields( + &fragment.selections, + throw_on_field_error_directive.is_some(), + ) + } + + fn validate_operation( + &mut self, + operation: &graphql_ir::OperationDefinition, + ) -> DiagnosticsResult<()> { + let throw_on_field_error_directive = + operation.directives.named(*THROW_ON_FIELD_ERROR_DIRECTIVE); + + if !self.experimental_emit_semantic_nullability_types + && throw_on_field_error_directive.is_some() + { + return Err(vec![Diagnostic::error( + ValidationMessage::ThrowOnFieldErrorNotEnabled, + throw_on_field_error_directive.unwrap().name.location, + )]); + } + + self.validate_selection_fields( + &operation.selections, + throw_on_field_error_directive.is_some(), + ) + } +} diff --git a/compiler/crates/relay-transforms/src/validations/mod.rs b/compiler/crates/relay-transforms/src/validations/mod.rs deleted file mode 100644 index 0d7e3fd06feb7..0000000000000 --- a/compiler/crates/relay-transforms/src/validations/mod.rs +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -mod deprecated_fields; -mod disallow_circular_no_inline_fragments; -mod disallow_non_node_id_fields; -mod disallow_reserved_aliases; -mod disallow_typename_on_root; -mod validate_connections; -mod validate_global_variable_names; -mod validate_global_variables; -mod validate_module_names; -mod validate_no_double_underscore_alias; -mod validate_no_inline_with_raw_response_type; -mod validate_no_unselectable_selections; -mod validate_relay_directives; -mod validate_required_arguments; -mod validate_resolver_fragments; -mod validate_server_only_directives; -mod validate_static_args; -mod validate_unused_fragment_variables; -mod validate_unused_variables; - -pub use deprecated_fields::deprecated_fields; -pub use deprecated_fields::deprecated_fields_for_executable_definition; -pub use disallow_circular_no_inline_fragments::disallow_circular_no_inline_fragments; -pub use disallow_non_node_id_fields::disallow_non_node_id_fields; -pub use disallow_reserved_aliases::disallow_reserved_aliases; -pub use disallow_typename_on_root::disallow_typename_on_root; -pub use validate_connections::validate_connections; -pub use validate_global_variable_names::validate_global_variable_names; -pub use validate_global_variables::validate_global_variables; -pub use validate_module_names::extract_module_name; -pub use validate_module_names::validate_module_names; -pub use validate_no_double_underscore_alias::validate_no_double_underscore_alias; -pub use validate_no_inline_with_raw_response_type::validate_no_inline_fragments_with_raw_response_type; -pub use validate_no_unselectable_selections::validate_no_unselectable_selections; -pub use validate_relay_directives::validate_relay_directives; -pub use validate_required_arguments::validate_required_arguments; -pub use validate_resolver_fragments::validate_resolver_fragments; -pub use validate_server_only_directives::validate_server_only_directives; -pub use validate_static_args::validate_static_args; -pub use validate_unused_fragment_variables::validate_unused_fragment_variables; -pub use validate_unused_variables::validate_unused_variables; diff --git a/compiler/crates/relay-transforms/src/validations/validate_connections.rs b/compiler/crates/relay-transforms/src/validations/validate_connections.rs index 0bad87dc69870..940e89b0af3eb 100644 --- a/compiler/crates/relay-transforms/src/validations/validate_connections.rs +++ b/compiler/crates/relay-transforms/src/validations/validate_connections.rs @@ -5,6 +5,7 @@ * LICENSE file in the root directory of this source tree. */ +use common::ArgumentName; use common::Diagnostic; use common::DiagnosticsResult; use common::NamedItem; @@ -294,7 +295,7 @@ impl<'s> ConnectionValidation<'s> { )?; let page_info_type = page_info_type.inner(); - let page_info_sub_fields = vec![ + let page_info_sub_fields = [ self.connection_interface.end_cursor, self.connection_interface.has_next_page, self.connection_interface.has_previous_page, @@ -477,23 +478,36 @@ impl<'s> ConnectionValidation<'s> { if let Some((arg, filters_val)) = constant_filters_arg { match filters_val { ConstantValue::List(list_val) => { - let non_string_value = list_val - .iter() - .find(|val| !matches!(val, ConstantValue::String(_))); - - if non_string_value.is_some() { - return Err(vec![ - Diagnostic::error( + validate_map(list_val, |filter_val| { + if let ConstantValue::String(filter_val) = filter_val { + if connection_field + .arguments + .named(ArgumentName(*filter_val)) + .is_none() + { + Err(vec![Diagnostic::error( + ValidationMessage::InvalidConnectionFiltersArgNotAnArgument { + connection_directive_name: connection_directive.name.item, + connection_field_name: connection_schema_field.name.item, + filters_arg_name: *FILTERS_ARG_NAME, + invalid_name: *filter_val, + }, + arg.value.location, + )]) + } else { + Ok(()) + } + } else { + Err(vec![Diagnostic::error( ValidationMessage::InvalidConnectionFiltersArg { connection_directive_name: connection_directive.name.item, connection_field_name: connection_schema_field.name.item, filters_arg_name: *FILTERS_ARG_NAME, }, arg.value.location, - ) - .annotate("related location", connection_field.definition.location), - ]); - } + )]) + } + })?; } _ => { return Err(vec![ diff --git a/compiler/crates/relay-transforms/src/validations/validate_global_variable_names.rs b/compiler/crates/relay-transforms/src/validations/validate_global_variable_names.rs index 69f64957fcc85..1b87315bcf460 100644 --- a/compiler/crates/relay-transforms/src/validations/validate_global_variable_names.rs +++ b/compiler/crates/relay-transforms/src/validations/validate_global_variable_names.rs @@ -58,7 +58,18 @@ impl Validator for ValidateGlobalVariableNames { } } -#[derive(Clone, Debug, Error, Eq, PartialEq, Ord, PartialOrd, Hash)] +#[derive( + Clone, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +#[serde(tag = "type")] enum ValidationMessage { #[error( "Invalid name `${0}` for an operation variable. The `__relay_internal` prefix is reserved for relay internal usage." diff --git a/compiler/crates/relay-transforms/src/validations/validate_global_variables.rs b/compiler/crates/relay-transforms/src/validations/validate_global_variables.rs index a74a10860411a..977323ec34ac8 100644 --- a/compiler/crates/relay-transforms/src/validations/validate_global_variables.rs +++ b/compiler/crates/relay-transforms/src/validations/validate_global_variables.rs @@ -51,7 +51,7 @@ impl Validator for ValidateGlobalVariables<'_> { } let (variables, _) = self.visitor.infer_operation_variables(operation); - let undefined_variables: Vec<_> = variables + let mut undefined_variables: Vec<_> = variables .values() .filter(|var| { !(operation @@ -60,7 +60,7 @@ impl Validator for ValidateGlobalVariables<'_> { .any(|def| def.name.item == var.name.item)) }) .collect(); - + undefined_variables.sort_by(|a, b| a.name.cmp(&b.name)); if !undefined_variables.is_empty() { let is_plural = undefined_variables.len() > 1; let mut locations = undefined_variables diff --git a/compiler/crates/relay-transforms/src/validations/validate_module_names.rs b/compiler/crates/relay-transforms/src/validations/validate_module_names.rs new file mode 100644 index 0000000000000..8ad25746227e6 --- /dev/null +++ b/compiler/crates/relay-transforms/src/validations/validate_module_names.rs @@ -0,0 +1,103 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +mod extract_module_name; + +use common::Diagnostic; +use common::DiagnosticsResult; +use graphql_ir::FragmentDefinition; +use graphql_ir::OperationDefinition; +use graphql_ir::Program; +use graphql_ir::Validator; +use graphql_syntax::OperationKind; +use thiserror::Error; + +pub fn validate_module_names(program: &Program) -> DiagnosticsResult<()> { + (ValidateModuleNames {}).validate_program(program) +} + +pub use extract_module_name::extract_module_name; + +pub struct ValidateModuleNames {} + +impl Validator for ValidateModuleNames { + const NAME: &'static str = "ValidateModuleNames"; + const VALIDATE_ARGUMENTS: bool = false; + const VALIDATE_DIRECTIVES: bool = true; + + fn validate_operation(&mut self, operation: &OperationDefinition) -> DiagnosticsResult<()> { + let operation_name = operation.name.item.0.to_string(); + let path = operation.name.location.source_location().path(); + let module_name = extract_module_name(path).expect("Unable to extract module name."); + let (operation_type_suffix, pluralized_string) = match operation.kind { + OperationKind::Query => ("Query", "Queries"), + OperationKind::Mutation => ("Mutation", "Mutations"), + OperationKind::Subscription => ("Subscription", "Subscriptions"), + }; + + let operation_name_ending_is_valid = operation_name.ends_with("Query") + || operation_name.ends_with("Mutation") + || operation_name.ends_with("Subscription"); + + if !operation_name.starts_with(&module_name) || !operation_name_ending_is_valid + // TODO: T71484519 re-enable this line when queries are correctly named in www + // || !operation_name.ends_with(operation_type_suffix) + { + return Err(vec![Diagnostic::error( + ValidationMessage::InvalidOperationName { + pluralized_string: pluralized_string.to_string(), + operation_type_suffix: operation_type_suffix.to_string(), + module_name, + operation_name, + }, + operation.name.location, + )]); + } + + Ok(()) + } + + fn validate_fragment(&mut self, fragment: &FragmentDefinition) -> DiagnosticsResult<()> { + let fragment_name = fragment.name.item.to_string(); + let path = fragment.name.location.source_location().path(); + let module_name = + extract_module_name::extract_module_name(path).expect("Unable to extract module name."); + + if !fragment_name.starts_with(&module_name) { + return Err(vec![Diagnostic::error( + ValidationMessage::InvalidFragmentName { + module_name, + fragment_name, + }, + fragment.name.location, + )]); + } + Ok(()) + } +} + +#[derive(Debug, Error, serde::Serialize)] +#[serde(tag = "type")] +pub enum ValidationMessage { + #[error( + "{pluralized_string} in graphql tags must start with the module name ('{module_name}') and end with '{operation_type_suffix}'. Got '{operation_name}' instead." + )] + InvalidOperationName { + pluralized_string: String, + module_name: String, + operation_type_suffix: String, + operation_name: String, + }, + + #[error( + "Fragments in graphql tags must start with the module name ('{module_name}'). Got '{fragment_name}' instead." + )] + InvalidFragmentName { + module_name: String, + fragment_name: String, + }, +} diff --git a/compiler/crates/relay-transforms/src/validations/validate_module_names/mod.rs b/compiler/crates/relay-transforms/src/validations/validate_module_names/mod.rs deleted file mode 100644 index 48cdf8e43800b..0000000000000 --- a/compiler/crates/relay-transforms/src/validations/validate_module_names/mod.rs +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -mod extract_module_name; - -use common::Diagnostic; -use common::DiagnosticsResult; -use graphql_ir::FragmentDefinition; -use graphql_ir::OperationDefinition; -use graphql_ir::Program; -use graphql_ir::Validator; -use graphql_syntax::OperationKind; -use thiserror::Error; - -pub fn validate_module_names(program: &Program) -> DiagnosticsResult<()> { - (ValidateModuleNames {}).validate_program(program) -} - -pub use extract_module_name::extract_module_name; - -pub struct ValidateModuleNames {} - -impl Validator for ValidateModuleNames { - const NAME: &'static str = "ValidateModuleNames"; - const VALIDATE_ARGUMENTS: bool = false; - const VALIDATE_DIRECTIVES: bool = true; - - fn validate_operation(&mut self, operation: &OperationDefinition) -> DiagnosticsResult<()> { - let operation_name = operation.name.item.0.to_string(); - let path = operation.name.location.source_location().path(); - let module_name = extract_module_name(path).expect("Unable to extract module name."); - let (operation_type_suffix, pluralized_string) = match operation.kind { - OperationKind::Query => ("Query", "Queries"), - OperationKind::Mutation => ("Mutation", "Mutations"), - OperationKind::Subscription => ("Subscription", "Subscriptions"), - }; - - let operation_name_ending_is_valid = operation_name.ends_with("Query") - || operation_name.ends_with("Mutation") - || operation_name.ends_with("Subscription"); - - if !operation_name.starts_with(&module_name) || !operation_name_ending_is_valid - // TODO: T71484519 re-enable this line when queries are correctly named in www - // || !operation_name.ends_with(operation_type_suffix) - { - return Err(vec![Diagnostic::error( - ValidationMessage::InvalidOperationName { - pluralized_string: pluralized_string.to_string(), - operation_type_suffix: operation_type_suffix.to_string(), - module_name, - operation_name, - }, - operation.name.location, - )]); - } - - Ok(()) - } - - fn validate_fragment(&mut self, fragment: &FragmentDefinition) -> DiagnosticsResult<()> { - let fragment_name = fragment.name.item.to_string(); - let path = fragment.name.location.source_location().path(); - let module_name = - extract_module_name::extract_module_name(path).expect("Unable to extract module name."); - - if !fragment_name.starts_with(&module_name) { - return Err(vec![Diagnostic::error( - ValidationMessage::InvalidFragmentName { - module_name, - fragment_name, - }, - fragment.name.location, - )]); - } - Ok(()) - } -} - -#[derive(Debug, Error)] -pub enum ValidationMessage { - #[error( - "{pluralized_string} in graphql tags must start with the module name ('{module_name}') and end with '{operation_type_suffix}'. Got '{operation_name}' instead." - )] - InvalidOperationName { - pluralized_string: String, - module_name: String, - operation_type_suffix: String, - operation_name: String, - }, - - #[error( - "Fragments in graphql tags must start with the module name ('{module_name}'). Got '{fragment_name}' instead." - )] - InvalidFragmentName { - module_name: String, - fragment_name: String, - }, -} diff --git a/compiler/crates/relay-transforms/src/validations/validate_required_arguments.rs b/compiler/crates/relay-transforms/src/validations/validate_required_arguments.rs index eb55ec94a2e46..d6d5c3af6462b 100644 --- a/compiler/crates/relay-transforms/src/validations/validate_required_arguments.rs +++ b/compiler/crates/relay-transforms/src/validations/validate_required_arguments.rs @@ -131,12 +131,12 @@ impl ValidateRequiredArguments<'_> { && !arguments .iter() .map(|arg| arg.name.item) - .any(|x| x == def.name) + .any(|x| x == def.name.item) { return Err(vec![ Diagnostic::error( ValidationMessage::MissingRequiredArgument { - argument_name: def.name, + argument_name: def.name.item, node_name, root_name: root_name_with_location.item, type_string: self.program.schema.get_type_string(&def.type_), diff --git a/compiler/crates/relay-transforms/src/validations/validate_server_only_directives.rs b/compiler/crates/relay-transforms/src/validations/validate_server_only_directives.rs index ed5b03da773ac..e37e72df7b60b 100644 --- a/compiler/crates/relay-transforms/src/validations/validate_server_only_directives.rs +++ b/compiler/crates/relay-transforms/src/validations/validate_server_only_directives.rs @@ -169,7 +169,7 @@ impl<'s> Validator for ServerOnlyDirectivesValidation<'s> { self.is_current_fragment_client_only = self.is_current_fragment_client_only && is_client_only; self.current_client_invalid_directives - .extend(client_invalid_directives.into_iter()); + .extend(client_invalid_directives); if is_client_only { let current_root_client_selection = self.current_root_client_selection; self.current_root_client_selection = Some(fragment.name.location); diff --git a/compiler/crates/relay-transforms/src/validations/validate_static_args.rs b/compiler/crates/relay-transforms/src/validations/validate_static_args.rs index 2c7ef8f53f1c1..04da0c3f479ec 100644 --- a/compiler/crates/relay-transforms/src/validations/validate_static_args.rs +++ b/compiler/crates/relay-transforms/src/validations/validate_static_args.rs @@ -82,8 +82,8 @@ impl<'a> Validator for StaticArgValidator<'a> { } } -fn validate_all_static_args<'a, 'b>( - field_to_static_args: &'b mut StaticArgCache, +fn validate_all_static_args<'a>( + field_to_static_args: &mut StaticArgCache, field_name: StringKey, schema_arguments: &'a ArgumentDefinitions, ir_arguments: &'a [IRArgument], @@ -114,7 +114,7 @@ fn find_static_argument_names(schema_arguments: &ArgumentDefinitions) -> FnvHash schema_arguments .iter() .filter(|a| has_static_directive(a)) - .map(|a| a.name) + .map(|a| a.name.item) .collect() } diff --git a/compiler/crates/relay-transforms/tests/apply_fragment_arguments.rs b/compiler/crates/relay-transforms/tests/apply_fragment_arguments.rs new file mode 100644 index 0000000000000..c938b7ce9445d --- /dev/null +++ b/compiler/crates/relay-transforms/tests/apply_fragment_arguments.rs @@ -0,0 +1,19 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::FeatureFlag; +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::apply_fragment_arguments; +use relay_transforms::provided_variable_fragment_transform; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, |program| { + let program = provided_variable_fragment_transform(program)?; + apply_fragment_arguments(&program, false, &FeatureFlag::Enabled, &Default::default()) + }) +} diff --git a/compiler/crates/relay-transforms/tests/apply_fragment_arguments/mod.rs b/compiler/crates/relay-transforms/tests/apply_fragment_arguments/mod.rs deleted file mode 100644 index 0607bc44ab482..0000000000000 --- a/compiler/crates/relay-transforms/tests/apply_fragment_arguments/mod.rs +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::FeatureFlag; -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::apply_fragment_arguments; -use relay_transforms::provided_variable_fragment_transform; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| { - let program = provided_variable_fragment_transform(program)?; - apply_fragment_arguments(&program, false, &FeatureFlag::Enabled, &Default::default()) - }) -} diff --git a/compiler/crates/relay-transforms/tests/apply_fragment_arguments_test.rs b/compiler/crates/relay-transforms/tests/apply_fragment_arguments_test.rs index 72f28bb1a75b7..6cd98cd2fec03 100644 --- a/compiler/crates/relay-transforms/tests/apply_fragment_arguments_test.rs +++ b/compiler/crates/relay-transforms/tests/apply_fragment_arguments_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<31a9fd31997eea7c6c3cf9a74ddaf459>> + * @generated SignedSource<<7a0be220ee0ccec98d9d78d2768944ba>> */ mod apply_fragment_arguments; @@ -12,93 +12,93 @@ mod apply_fragment_arguments; use apply_fragment_arguments::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn deletes_empty_fragments() { +#[tokio::test] +async fn deletes_empty_fragments() { let input = include_str!("apply_fragment_arguments/fixtures/deletes-empty-fragments.graphql"); let expected = include_str!("apply_fragment_arguments/fixtures/deletes-empty-fragments.expected"); - test_fixture(transform_fixture, "deletes-empty-fragments.graphql", "apply_fragment_arguments/fixtures/deletes-empty-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "deletes-empty-fragments.graphql", "apply_fragment_arguments/fixtures/deletes-empty-fragments.expected", input, expected).await; } -#[test] -fn deletes_unreferenced_fragments() { +#[tokio::test] +async fn deletes_unreferenced_fragments() { let input = include_str!("apply_fragment_arguments/fixtures/deletes-unreferenced-fragments.graphql"); let expected = include_str!("apply_fragment_arguments/fixtures/deletes-unreferenced-fragments.expected"); - test_fixture(transform_fixture, "deletes-unreferenced-fragments.graphql", "apply_fragment_arguments/fixtures/deletes-unreferenced-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "deletes-unreferenced-fragments.graphql", "apply_fragment_arguments/fixtures/deletes-unreferenced-fragments.expected", input, expected).await; } -#[test] -fn fragment_include_with_provided_argument() { +#[tokio::test] +async fn fragment_include_with_provided_argument() { let input = include_str!("apply_fragment_arguments/fixtures/fragment-include-with-provided-argument.graphql"); let expected = include_str!("apply_fragment_arguments/fixtures/fragment-include-with-provided-argument.expected"); - test_fixture(transform_fixture, "fragment-include-with-provided-argument.graphql", "apply_fragment_arguments/fixtures/fragment-include-with-provided-argument.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-include-with-provided-argument.graphql", "apply_fragment_arguments/fixtures/fragment-include-with-provided-argument.expected", input, expected).await; } -#[test] -fn fragment_with_float_argument() { +#[tokio::test] +async fn fragment_with_float_argument() { let input = include_str!("apply_fragment_arguments/fixtures/fragment-with-float-argument.graphql"); let expected = include_str!("apply_fragment_arguments/fixtures/fragment-with-float-argument.expected"); - test_fixture(transform_fixture, "fragment-with-float-argument.graphql", "apply_fragment_arguments/fixtures/fragment-with-float-argument.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-float-argument.graphql", "apply_fragment_arguments/fixtures/fragment-with-float-argument.expected", input, expected).await; } -#[test] -fn inline_fragment_with_provided_argument() { +#[tokio::test] +async fn inline_fragment_with_provided_argument() { let input = include_str!("apply_fragment_arguments/fixtures/inline-fragment-with-provided-argument.graphql"); let expected = include_str!("apply_fragment_arguments/fixtures/inline-fragment-with-provided-argument.expected"); - test_fixture(transform_fixture, "inline-fragment-with-provided-argument.graphql", "apply_fragment_arguments/fixtures/inline-fragment-with-provided-argument.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-with-provided-argument.graphql", "apply_fragment_arguments/fixtures/inline-fragment-with-provided-argument.expected", input, expected).await; } -#[test] -fn inlines_fragment_arguments() { +#[tokio::test] +async fn inlines_fragment_arguments() { let input = include_str!("apply_fragment_arguments/fixtures/inlines-fragment-arguments.graphql"); let expected = include_str!("apply_fragment_arguments/fixtures/inlines-fragment-arguments.expected"); - test_fixture(transform_fixture, "inlines-fragment-arguments.graphql", "apply_fragment_arguments/fixtures/inlines-fragment-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "inlines-fragment-arguments.graphql", "apply_fragment_arguments/fixtures/inlines-fragment-arguments.expected", input, expected).await; } -#[test] -fn inlines_literal_fragment_arguments() { +#[tokio::test] +async fn inlines_literal_fragment_arguments() { let input = include_str!("apply_fragment_arguments/fixtures/inlines-literal-fragment-arguments.graphql"); let expected = include_str!("apply_fragment_arguments/fixtures/inlines-literal-fragment-arguments.expected"); - test_fixture(transform_fixture, "inlines-literal-fragment-arguments.graphql", "apply_fragment_arguments/fixtures/inlines-literal-fragment-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "inlines-literal-fragment-arguments.graphql", "apply_fragment_arguments/fixtures/inlines-literal-fragment-arguments.expected", input, expected).await; } -#[test] -fn merges_identical_fragments() { +#[tokio::test] +async fn merges_identical_fragments() { let input = include_str!("apply_fragment_arguments/fixtures/merges-identical-fragments.graphql"); let expected = include_str!("apply_fragment_arguments/fixtures/merges-identical-fragments.expected"); - test_fixture(transform_fixture, "merges-identical-fragments.graphql", "apply_fragment_arguments/fixtures/merges-identical-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "merges-identical-fragments.graphql", "apply_fragment_arguments/fixtures/merges-identical-fragments.expected", input, expected).await; } -#[test] -fn merges_identical_fragments_literal_arguments() { +#[tokio::test] +async fn merges_identical_fragments_literal_arguments() { let input = include_str!("apply_fragment_arguments/fixtures/merges-identical-fragments-literal-arguments.graphql"); let expected = include_str!("apply_fragment_arguments/fixtures/merges-identical-fragments-literal-arguments.expected"); - test_fixture(transform_fixture, "merges-identical-fragments-literal-arguments.graphql", "apply_fragment_arguments/fixtures/merges-identical-fragments-literal-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "merges-identical-fragments-literal-arguments.graphql", "apply_fragment_arguments/fixtures/merges-identical-fragments-literal-arguments.expected", input, expected).await; } -#[test] -fn multiple_queries_with_provided_argument() { +#[tokio::test] +async fn multiple_queries_with_provided_argument() { let input = include_str!("apply_fragment_arguments/fixtures/multiple-queries-with-provided-argument.graphql"); let expected = include_str!("apply_fragment_arguments/fixtures/multiple-queries-with-provided-argument.expected"); - test_fixture(transform_fixture, "multiple-queries-with-provided-argument.graphql", "apply_fragment_arguments/fixtures/multiple-queries-with-provided-argument.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple-queries-with-provided-argument.graphql", "apply_fragment_arguments/fixtures/multiple-queries-with-provided-argument.expected", input, expected).await; } -#[test] -fn noncyclic_fragment_with_provided_argument() { +#[tokio::test] +async fn noncyclic_fragment_with_provided_argument() { let input = include_str!("apply_fragment_arguments/fixtures/noncyclic-fragment-with-provided-argument.graphql"); let expected = include_str!("apply_fragment_arguments/fixtures/noncyclic-fragment-with-provided-argument.expected"); - test_fixture(transform_fixture, "noncyclic-fragment-with-provided-argument.graphql", "apply_fragment_arguments/fixtures/noncyclic-fragment-with-provided-argument.expected", input, expected); + test_fixture(transform_fixture, file!(), "noncyclic-fragment-with-provided-argument.graphql", "apply_fragment_arguments/fixtures/noncyclic-fragment-with-provided-argument.expected", input, expected).await; } -#[test] -fn rejects_cyclic_fragments_invalid() { +#[tokio::test] +async fn rejects_cyclic_fragments_invalid() { let input = include_str!("apply_fragment_arguments/fixtures/rejects-cyclic-fragments.invalid.graphql"); let expected = include_str!("apply_fragment_arguments/fixtures/rejects-cyclic-fragments.invalid.expected"); - test_fixture(transform_fixture, "rejects-cyclic-fragments.invalid.graphql", "apply_fragment_arguments/fixtures/rejects-cyclic-fragments.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "rejects-cyclic-fragments.invalid.graphql", "apply_fragment_arguments/fixtures/rejects-cyclic-fragments.invalid.expected", input, expected).await; } -#[test] -fn uses_defaults_for_literal_null_arguments() { +#[tokio::test] +async fn uses_defaults_for_literal_null_arguments() { let input = include_str!("apply_fragment_arguments/fixtures/uses-defaults-for-literal-null-arguments.graphql"); let expected = include_str!("apply_fragment_arguments/fixtures/uses-defaults-for-literal-null-arguments.expected"); - test_fixture(transform_fixture, "uses-defaults-for-literal-null-arguments.graphql", "apply_fragment_arguments/fixtures/uses-defaults-for-literal-null-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "uses-defaults-for-literal-null-arguments.graphql", "apply_fragment_arguments/fixtures/uses-defaults-for-literal-null-arguments.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/assignable_directive.rs b/compiler/crates/relay-transforms/tests/assignable_directive.rs new file mode 100644 index 0000000000000..70a97e65b7639 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/assignable_directive.rs @@ -0,0 +1,32 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::TEST_SCHEMA; +use relay_transforms::validate_assignable_directive; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir_result = build(&TEST_SCHEMA, &ast.definitions); + let ir = ir_result + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); + validate_assignable_directive(&program) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) +} diff --git a/compiler/crates/relay-transforms/tests/assignable_directive/mod.rs b/compiler/crates/relay-transforms/tests/assignable_directive/mod.rs deleted file mode 100644 index cf32eaffdcd71..0000000000000 --- a/compiler/crates/relay-transforms/tests/assignable_directive/mod.rs +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::TEST_SCHEMA; -use relay_transforms::validate_assignable_directive; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir_result = build(&TEST_SCHEMA, &ast.definitions); - let ir = ir_result - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); - validate_assignable_directive(&program) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) -} diff --git a/compiler/crates/relay-transforms/tests/assignable_directive_test.rs b/compiler/crates/relay-transforms/tests/assignable_directive_test.rs index 93492610a75ff..565c0924499bc 100644 --- a/compiler/crates/relay-transforms/tests/assignable_directive_test.rs +++ b/compiler/crates/relay-transforms/tests/assignable_directive_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<> */ mod assignable_directive; @@ -12,44 +12,44 @@ mod assignable_directive; use assignable_directive::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn assignable_fragment() { +#[tokio::test] +async fn assignable_fragment() { let input = include_str!("assignable_directive/fixtures/assignable-fragment.graphql"); let expected = include_str!("assignable_directive/fixtures/assignable-fragment.expected"); - test_fixture(transform_fixture, "assignable-fragment.graphql", "assignable_directive/fixtures/assignable-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "assignable-fragment.graphql", "assignable_directive/fixtures/assignable-fragment.expected", input, expected).await; } -#[test] -fn assignable_fragment_aliased_typename_invalid() { +#[tokio::test] +async fn assignable_fragment_aliased_typename_invalid() { let input = include_str!("assignable_directive/fixtures/assignable-fragment-aliased-typename.invalid.graphql"); let expected = include_str!("assignable_directive/fixtures/assignable-fragment-aliased-typename.invalid.expected"); - test_fixture(transform_fixture, "assignable-fragment-aliased-typename.invalid.graphql", "assignable_directive/fixtures/assignable-fragment-aliased-typename.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "assignable-fragment-aliased-typename.invalid.graphql", "assignable_directive/fixtures/assignable-fragment-aliased-typename.invalid.expected", input, expected).await; } -#[test] -fn assignable_fragment_directive_on_typename_invalid() { +#[tokio::test] +async fn assignable_fragment_directive_on_typename_invalid() { let input = include_str!("assignable_directive/fixtures/assignable-fragment-directive-on-typename.invalid.graphql"); let expected = include_str!("assignable_directive/fixtures/assignable-fragment-directive-on-typename.invalid.expected"); - test_fixture(transform_fixture, "assignable-fragment-directive-on-typename.invalid.graphql", "assignable_directive/fixtures/assignable-fragment-directive-on-typename.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "assignable-fragment-directive-on-typename.invalid.graphql", "assignable_directive/fixtures/assignable-fragment-directive-on-typename.invalid.expected", input, expected).await; } -#[test] -fn assignable_fragment_directive_other_directives_invalid() { +#[tokio::test] +async fn assignable_fragment_directive_other_directives_invalid() { let input = include_str!("assignable_directive/fixtures/assignable-fragment-directive-other-directives.invalid.graphql"); let expected = include_str!("assignable_directive/fixtures/assignable-fragment-directive-other-directives.invalid.expected"); - test_fixture(transform_fixture, "assignable-fragment-directive-other-directives.invalid.graphql", "assignable_directive/fixtures/assignable-fragment-directive-other-directives.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "assignable-fragment-directive-other-directives.invalid.graphql", "assignable_directive/fixtures/assignable-fragment-directive-other-directives.invalid.expected", input, expected).await; } -#[test] -fn assignable_fragment_no_typename_invalid() { +#[tokio::test] +async fn assignable_fragment_no_typename_invalid() { let input = include_str!("assignable_directive/fixtures/assignable-fragment-no-typename.invalid.graphql"); let expected = include_str!("assignable_directive/fixtures/assignable-fragment-no-typename.invalid.expected"); - test_fixture(transform_fixture, "assignable-fragment-no-typename.invalid.graphql", "assignable_directive/fixtures/assignable-fragment-no-typename.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "assignable-fragment-no-typename.invalid.graphql", "assignable_directive/fixtures/assignable-fragment-no-typename.invalid.expected", input, expected).await; } -#[test] -fn assignable_fragment_other_fields_invalid() { +#[tokio::test] +async fn assignable_fragment_other_fields_invalid() { let input = include_str!("assignable_directive/fixtures/assignable-fragment-other-fields.invalid.graphql"); let expected = include_str!("assignable_directive/fixtures/assignable-fragment-other-fields.invalid.expected"); - test_fixture(transform_fixture, "assignable-fragment-other-fields.invalid.graphql", "assignable_directive/fixtures/assignable-fragment-other-fields.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "assignable-fragment-other-fields.invalid.graphql", "assignable_directive/fixtures/assignable-fragment-other-fields.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/assignable_fragment_spread.rs b/compiler/crates/relay-transforms/tests/assignable_fragment_spread.rs new file mode 100644 index 0000000000000..cb3aec74837fd --- /dev/null +++ b/compiler/crates/relay-transforms/tests/assignable_fragment_spread.rs @@ -0,0 +1,20 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::DiagnosticsResult; +use fixture_tests::Fixture; +use graphql_ir::Program; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::transform_assignable_fragment_spreads_in_regular_queries; + +fn transform(program: &Program) -> DiagnosticsResult { + transform_assignable_fragment_spreads_in_regular_queries(program) +} + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, transform) +} diff --git a/compiler/crates/relay-transforms/tests/assignable_fragment_spread/mod.rs b/compiler/crates/relay-transforms/tests/assignable_fragment_spread/mod.rs deleted file mode 100644 index 853722f20376f..0000000000000 --- a/compiler/crates/relay-transforms/tests/assignable_fragment_spread/mod.rs +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::DiagnosticsResult; -use fixture_tests::Fixture; -use graphql_ir::Program; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::transform_assignable_fragment_spreads_in_regular_queries; - -fn transform(program: &Program) -> DiagnosticsResult { - transform_assignable_fragment_spreads_in_regular_queries(program) -} - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, transform) -} diff --git a/compiler/crates/relay-transforms/tests/assignable_fragment_spread_test.rs b/compiler/crates/relay-transforms/tests/assignable_fragment_spread_test.rs index c258a743a0dc3..bb0b2d9e3640e 100644 --- a/compiler/crates/relay-transforms/tests/assignable_fragment_spread_test.rs +++ b/compiler/crates/relay-transforms/tests/assignable_fragment_spread_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<7e199fca5115686b17339b3e81aeaf9d>> + * @generated SignedSource<<2e19314c2dbf23eef2878bbb537fba0e>> */ mod assignable_fragment_spread; @@ -12,93 +12,93 @@ mod assignable_fragment_spread; use assignable_fragment_spread::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn abstract_assignable_fragment_spread_on_concrete_type() { +#[tokio::test] +async fn abstract_assignable_fragment_spread_on_concrete_type() { let input = include_str!("assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-concrete-type.graphql"); let expected = include_str!("assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-concrete-type.expected"); - test_fixture(transform_fixture, "abstract-assignable-fragment-spread-on-concrete-type.graphql", "assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-concrete-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-assignable-fragment-spread-on-concrete-type.graphql", "assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-concrete-type.expected", input, expected).await; } -#[test] -fn abstract_assignable_fragment_spread_on_concrete_type_in_updatable_fragment() { +#[tokio::test] +async fn abstract_assignable_fragment_spread_on_concrete_type_in_updatable_fragment() { let input = include_str!("assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-concrete-type-in-updatable-fragment.graphql"); let expected = include_str!("assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-concrete-type-in-updatable-fragment.expected"); - test_fixture(transform_fixture, "abstract-assignable-fragment-spread-on-concrete-type-in-updatable-fragment.graphql", "assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-concrete-type-in-updatable-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-assignable-fragment-spread-on-concrete-type-in-updatable-fragment.graphql", "assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-concrete-type-in-updatable-fragment.expected", input, expected).await; } -#[test] -fn abstract_assignable_fragment_spread_on_different_abstract_type() { +#[tokio::test] +async fn abstract_assignable_fragment_spread_on_different_abstract_type() { let input = include_str!("assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-different-abstract-type.graphql"); let expected = include_str!("assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-different-abstract-type.expected"); - test_fixture(transform_fixture, "abstract-assignable-fragment-spread-on-different-abstract-type.graphql", "assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-different-abstract-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-assignable-fragment-spread-on-different-abstract-type.graphql", "assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-different-abstract-type.expected", input, expected).await; } -#[test] -fn abstract_assignable_fragment_spread_on_matching_abstract_type() { +#[tokio::test] +async fn abstract_assignable_fragment_spread_on_matching_abstract_type() { let input = include_str!("assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-matching-abstract-type.graphql"); let expected = include_str!("assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-matching-abstract-type.expected"); - test_fixture(transform_fixture, "abstract-assignable-fragment-spread-on-matching-abstract-type.graphql", "assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-matching-abstract-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-assignable-fragment-spread-on-matching-abstract-type.graphql", "assignable_fragment_spread/fixtures/abstract-assignable-fragment-spread-on-matching-abstract-type.expected", input, expected).await; } -#[test] -fn assignable_fragment_spread_top_level_invalid() { +#[tokio::test] +async fn assignable_fragment_spread_top_level_invalid() { let input = include_str!("assignable_fragment_spread/fixtures/assignable-fragment-spread-top-level.invalid.graphql"); let expected = include_str!("assignable_fragment_spread/fixtures/assignable-fragment-spread-top-level.invalid.expected"); - test_fixture(transform_fixture, "assignable-fragment-spread-top-level.invalid.graphql", "assignable_fragment_spread/fixtures/assignable-fragment-spread-top-level.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "assignable-fragment-spread-top-level.invalid.graphql", "assignable_fragment_spread/fixtures/assignable-fragment-spread-top-level.invalid.expected", input, expected).await; } -#[test] -fn assignable_fragment_spread_with_directives_invalid() { +#[tokio::test] +async fn assignable_fragment_spread_with_directives_invalid() { let input = include_str!("assignable_fragment_spread/fixtures/assignable-fragment-spread-with-directives.invalid.graphql"); let expected = include_str!("assignable_fragment_spread/fixtures/assignable-fragment-spread-with-directives.invalid.expected"); - test_fixture(transform_fixture, "assignable-fragment-spread-with-directives.invalid.graphql", "assignable_fragment_spread/fixtures/assignable-fragment-spread-with-directives.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "assignable-fragment-spread-with-directives.invalid.graphql", "assignable_fragment_spread/fixtures/assignable-fragment-spread-with-directives.invalid.expected", input, expected).await; } -#[test] -fn assignable_fragment_spread_within_inline_fragment() { +#[tokio::test] +async fn assignable_fragment_spread_within_inline_fragment() { let input = include_str!("assignable_fragment_spread/fixtures/assignable-fragment-spread-within-inline-fragment.graphql"); let expected = include_str!("assignable_fragment_spread/fixtures/assignable-fragment-spread-within-inline-fragment.expected"); - test_fixture(transform_fixture, "assignable-fragment-spread-within-inline-fragment.graphql", "assignable_fragment_spread/fixtures/assignable-fragment-spread-within-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "assignable-fragment-spread-within-inline-fragment.graphql", "assignable_fragment_spread/fixtures/assignable-fragment-spread-within-inline-fragment.expected", input, expected).await; } -#[test] -fn assignable_fragment_spread_within_inline_fragment_and_linked_field() { +#[tokio::test] +async fn assignable_fragment_spread_within_inline_fragment_and_linked_field() { let input = include_str!("assignable_fragment_spread/fixtures/assignable-fragment-spread-within-inline-fragment-and-linked-field.graphql"); let expected = include_str!("assignable_fragment_spread/fixtures/assignable-fragment-spread-within-inline-fragment-and-linked-field.expected"); - test_fixture(transform_fixture, "assignable-fragment-spread-within-inline-fragment-and-linked-field.graphql", "assignable_fragment_spread/fixtures/assignable-fragment-spread-within-inline-fragment-and-linked-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "assignable-fragment-spread-within-inline-fragment-and-linked-field.graphql", "assignable_fragment_spread/fixtures/assignable-fragment-spread-within-inline-fragment-and-linked-field.expected", input, expected).await; } -#[test] -fn assignable_fragment_spread_within_skipped_inline_fragment_invalid() { +#[tokio::test] +async fn assignable_fragment_spread_within_skipped_inline_fragment_invalid() { let input = include_str!("assignable_fragment_spread/fixtures/assignable-fragment-spread-within-skipped-inline-fragment.invalid.graphql"); let expected = include_str!("assignable_fragment_spread/fixtures/assignable-fragment-spread-within-skipped-inline-fragment.invalid.expected"); - test_fixture(transform_fixture, "assignable-fragment-spread-within-skipped-inline-fragment.invalid.graphql", "assignable_fragment_spread/fixtures/assignable-fragment-spread-within-skipped-inline-fragment.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "assignable-fragment-spread-within-skipped-inline-fragment.invalid.graphql", "assignable_fragment_spread/fixtures/assignable-fragment-spread-within-skipped-inline-fragment.invalid.expected", input, expected).await; } -#[test] -fn concrete_assignable_fragment_spread_on_abstract_type() { +#[tokio::test] +async fn concrete_assignable_fragment_spread_on_abstract_type() { let input = include_str!("assignable_fragment_spread/fixtures/concrete-assignable-fragment-spread-on-abstract-type.graphql"); let expected = include_str!("assignable_fragment_spread/fixtures/concrete-assignable-fragment-spread-on-abstract-type.expected"); - test_fixture(transform_fixture, "concrete-assignable-fragment-spread-on-abstract-type.graphql", "assignable_fragment_spread/fixtures/concrete-assignable-fragment-spread-on-abstract-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "concrete-assignable-fragment-spread-on-abstract-type.graphql", "assignable_fragment_spread/fixtures/concrete-assignable-fragment-spread-on-abstract-type.expected", input, expected).await; } -#[test] -fn concrete_assignable_fragment_spread_on_matching_concrete_type() { +#[tokio::test] +async fn concrete_assignable_fragment_spread_on_matching_concrete_type() { let input = include_str!("assignable_fragment_spread/fixtures/concrete-assignable-fragment-spread-on-matching-concrete-type.graphql"); let expected = include_str!("assignable_fragment_spread/fixtures/concrete-assignable-fragment-spread-on-matching-concrete-type.expected"); - test_fixture(transform_fixture, "concrete-assignable-fragment-spread-on-matching-concrete-type.graphql", "assignable_fragment_spread/fixtures/concrete-assignable-fragment-spread-on-matching-concrete-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "concrete-assignable-fragment-spread-on-matching-concrete-type.graphql", "assignable_fragment_spread/fixtures/concrete-assignable-fragment-spread-on-matching-concrete-type.expected", input, expected).await; } -#[test] -fn included_assignable_fragment_spread_invalid() { +#[tokio::test] +async fn included_assignable_fragment_spread_invalid() { let input = include_str!("assignable_fragment_spread/fixtures/included-assignable-fragment-spread.invalid.graphql"); let expected = include_str!("assignable_fragment_spread/fixtures/included-assignable-fragment-spread.invalid.expected"); - test_fixture(transform_fixture, "included-assignable-fragment-spread.invalid.graphql", "assignable_fragment_spread/fixtures/included-assignable-fragment-spread.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "included-assignable-fragment-spread.invalid.graphql", "assignable_fragment_spread/fixtures/included-assignable-fragment-spread.invalid.expected", input, expected).await; } -#[test] -fn skipped_assignable_fragment_spread_invalid() { +#[tokio::test] +async fn skipped_assignable_fragment_spread_invalid() { let input = include_str!("assignable_fragment_spread/fixtures/skipped-assignable-fragment-spread.invalid.graphql"); let expected = include_str!("assignable_fragment_spread/fixtures/skipped-assignable-fragment-spread.invalid.expected"); - test_fixture(transform_fixture, "skipped-assignable-fragment-spread.invalid.graphql", "assignable_fragment_spread/fixtures/skipped-assignable-fragment-spread.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "skipped-assignable-fragment-spread.invalid.graphql", "assignable_fragment_spread/fixtures/skipped-assignable-fragment-spread.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/catch_directive.rs b/compiler/crates/relay-transforms/tests/catch_directive.rs new file mode 100644 index 0000000000000..5ada83456eef2 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/catch_directive.rs @@ -0,0 +1,21 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::DiagnosticsResult; +use fixture_tests::Fixture; +use graphql_ir::Program; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::catch_directive; +use relay_transforms::fragment_alias_directive; + +fn transform(program: &Program) -> DiagnosticsResult { + catch_directive(&fragment_alias_directive(program, true, true)?, true) +} + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, transform) +} diff --git a/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-linked-with-linked-sibling.expected b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-linked-with-linked-sibling.expected new file mode 100644 index 0000000000000..8f776a2d79582 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-linked-with-linked-sibling.expected @@ -0,0 +1,29 @@ +==================================== INPUT ==================================== +fragment MyFragmentFirst on User { + parents { + lastName + } +} + +fragment MyFragmentSecond on User { + address @catch(to: RESULT) { + street + } +} +==================================== OUTPUT =================================== +fragment MyFragmentFirst on User { + parents { + lastName + } +} + +fragment MyFragmentSecond on User { + address @catch(to: RESULT) @__CatchMetadataDirective + # CatchMetadataDirective { + # to: Result, + # path: "address", + # } + { + street + } +} diff --git a/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-linked-with-linked-sibling.graphql b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-linked-with-linked-sibling.graphql new file mode 100644 index 0000000000000..e01545d09ff3f --- /dev/null +++ b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-linked-with-linked-sibling.graphql @@ -0,0 +1,11 @@ +fragment MyFragmentFirst on User { + parents { + lastName + } +} + +fragment MyFragmentSecond on User { + address @catch(to: RESULT) { + street + } +} diff --git a/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-linked.expected b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-linked.expected new file mode 100644 index 0000000000000..8d08156307003 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-linked.expected @@ -0,0 +1,17 @@ +==================================== INPUT ==================================== +fragment MyFragment on User { + address @catch(to: RESULT) { + street + } +} +==================================== OUTPUT =================================== +fragment MyFragment on User { + address @catch(to: RESULT) @__CatchMetadataDirective + # CatchMetadataDirective { + # to: Result, + # path: "address", + # } + { + street + } +} diff --git a/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-linked.graphql b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-linked.graphql new file mode 100644 index 0000000000000..5b2d19bd6f4de --- /dev/null +++ b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-linked.graphql @@ -0,0 +1,5 @@ +fragment MyFragment on User { + address @catch(to: RESULT) { + street + } +} diff --git a/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-nested-catches.expected b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-nested-catches.expected new file mode 100644 index 0000000000000..f5f66d1158f35 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-nested-catches.expected @@ -0,0 +1,22 @@ +==================================== INPUT ==================================== +fragment MyFragmentFirst on User { + parents @catch { + lastName @catch + } +} +==================================== OUTPUT =================================== +fragment MyFragmentFirst on User { + parents @catch @__CatchMetadataDirective + # CatchMetadataDirective { + # to: Result, + # path: "parents", + # } + { + lastName @catch @__CatchMetadataDirective + # CatchMetadataDirective { + # to: Result, + # path: "parents.lastName", + # } + + } +} diff --git a/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-nested-catches.graphql b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-nested-catches.graphql new file mode 100644 index 0000000000000..6bf212f44a9d4 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-nested-catches.graphql @@ -0,0 +1,5 @@ +fragment MyFragmentFirst on User { + parents @catch { + lastName @catch + } +} diff --git a/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-on-query-with-required.invalid.expected b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-on-query-with-required.invalid.expected new file mode 100644 index 0000000000000..14cbe486f6685 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-on-query-with-required.invalid.expected @@ -0,0 +1,16 @@ +==================================== INPUT ==================================== +# expected-to-throw +query Foo { + me { + id + name @catch @required(action: THROW) + } +} +==================================== ERROR ==================================== +✖︎ @catch and @required directives cannot be on the same field + + catch-usage-on-query-with-required.invalid.graphql:5:17 + 4 │ id + 5 │ name @catch @required(action: THROW) + │ ^^^^^^^^^ + 6 │ } diff --git a/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-on-query-with-required.invalid.graphql b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-on-query-with-required.invalid.graphql new file mode 100644 index 0000000000000..7932aad5d5ca9 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-on-query-with-required.invalid.graphql @@ -0,0 +1,7 @@ +# expected-to-throw +query Foo { + me { + id + name @catch @required(action: THROW) + } +} diff --git a/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-on-query.expected b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-on-query.expected new file mode 100644 index 0000000000000..3584c6f523ccd --- /dev/null +++ b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-on-query.expected @@ -0,0 +1,19 @@ +==================================== INPUT ==================================== +query Foo { + me { + id + name @catch + } +} +==================================== OUTPUT =================================== +query Foo { + me { + id + name @catch @__CatchMetadataDirective + # CatchMetadataDirective { + # to: Result, + # path: "me.name", + # } + + } +} diff --git a/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-on-query.graphql b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-on-query.graphql new file mode 100644 index 0000000000000..e57bcb8832bfd --- /dev/null +++ b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage-on-query.graphql @@ -0,0 +1,6 @@ +query Foo { + me { + id + name @catch + } +} diff --git a/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage.expected b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage.expected new file mode 100644 index 0000000000000..bfac48d7e5462 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage.expected @@ -0,0 +1,13 @@ +==================================== INPUT ==================================== +fragment FragFoo on User { + name @catch(to: RESULT) +} +==================================== OUTPUT =================================== +fragment FragFoo on User { + name @catch(to: RESULT) @__CatchMetadataDirective + # CatchMetadataDirective { + # to: Result, + # path: "name", + # } + +} diff --git a/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage.graphql b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage.graphql new file mode 100644 index 0000000000000..e16bed9ea5d9b --- /dev/null +++ b/compiler/crates/relay-transforms/tests/catch_directive/fixtures/catch-usage.graphql @@ -0,0 +1,3 @@ +fragment FragFoo on User { + name @catch(to: RESULT) +} diff --git a/compiler/crates/relay-transforms/tests/catch_directive_test.rs b/compiler/crates/relay-transforms/tests/catch_directive_test.rs new file mode 100644 index 0000000000000..5b9e54d9a5975 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/catch_directive_test.rs @@ -0,0 +1,55 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @generated SignedSource<<93e5f2ac6e6d960616d4f1a412fccc79>> + */ + +mod catch_directive; + +use catch_directive::transform_fixture; +use fixture_tests::test_fixture; + +#[tokio::test] +async fn catch_usage() { + let input = include_str!("catch_directive/fixtures/catch-usage.graphql"); + let expected = include_str!("catch_directive/fixtures/catch-usage.expected"); + test_fixture(transform_fixture, file!(), "catch-usage.graphql", "catch_directive/fixtures/catch-usage.expected", input, expected).await; +} + +#[tokio::test] +async fn catch_usage_linked() { + let input = include_str!("catch_directive/fixtures/catch-usage-linked.graphql"); + let expected = include_str!("catch_directive/fixtures/catch-usage-linked.expected"); + test_fixture(transform_fixture, file!(), "catch-usage-linked.graphql", "catch_directive/fixtures/catch-usage-linked.expected", input, expected).await; +} + +#[tokio::test] +async fn catch_usage_linked_with_linked_sibling() { + let input = include_str!("catch_directive/fixtures/catch-usage-linked-with-linked-sibling.graphql"); + let expected = include_str!("catch_directive/fixtures/catch-usage-linked-with-linked-sibling.expected"); + test_fixture(transform_fixture, file!(), "catch-usage-linked-with-linked-sibling.graphql", "catch_directive/fixtures/catch-usage-linked-with-linked-sibling.expected", input, expected).await; +} + +#[tokio::test] +async fn catch_usage_nested_catches() { + let input = include_str!("catch_directive/fixtures/catch-usage-nested-catches.graphql"); + let expected = include_str!("catch_directive/fixtures/catch-usage-nested-catches.expected"); + test_fixture(transform_fixture, file!(), "catch-usage-nested-catches.graphql", "catch_directive/fixtures/catch-usage-nested-catches.expected", input, expected).await; +} + +#[tokio::test] +async fn catch_usage_on_query() { + let input = include_str!("catch_directive/fixtures/catch-usage-on-query.graphql"); + let expected = include_str!("catch_directive/fixtures/catch-usage-on-query.expected"); + test_fixture(transform_fixture, file!(), "catch-usage-on-query.graphql", "catch_directive/fixtures/catch-usage-on-query.expected", input, expected).await; +} + +#[tokio::test] +async fn catch_usage_on_query_with_required_invalid() { + let input = include_str!("catch_directive/fixtures/catch-usage-on-query-with-required.invalid.graphql"); + let expected = include_str!("catch_directive/fixtures/catch-usage-on-query-with-required.invalid.expected"); + test_fixture(transform_fixture, file!(), "catch-usage-on-query-with-required.invalid.graphql", "catch_directive/fixtures/catch-usage-on-query-with-required.invalid.expected", input, expected).await; +} diff --git a/compiler/crates/relay-transforms/tests/client_edges.rs b/compiler/crates/relay-transforms/tests/client_edges.rs new file mode 100644 index 0000000000000..fa8ed0a690f0e --- /dev/null +++ b/compiler/crates/relay-transforms/tests/client_edges.rs @@ -0,0 +1,77 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::FeatureFlag; +use common::FeatureFlags; +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use graphql_text_printer::print_fragment; +use graphql_text_printer::print_operation; +use graphql_text_printer::PrinterOptions; +use relay_config::ProjectConfig; +use relay_config::ProjectName; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::client_edges; +use relay_transforms::relay_resolvers; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + if let [base, extensions] = parts.as_slice() { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(base, source_location).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + let relay_resolver_enable_interface_output_type = if fixture + .content + .contains("# relay-resolver-enable-interface-output-type") + { + FeatureFlag::Enabled + } else { + FeatureFlag::Disabled + }; + let feature_flags = Arc::new(FeatureFlags { + relay_resolver_enable_interface_output_type, + ..Default::default() + }); + let project_config: ProjectConfig = ProjectConfig { + feature_flags, + ..Default::default() + }; + let mut next_program = client_edges(&program, &project_config, &Default::default()) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + next_program = relay_resolvers(ProjectName::default(), &next_program, true) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let printer_options = PrinterOptions { + debug_directive_data: true, + ..Default::default() + }; + + let mut printed = next_program + .operations() + .map(|def| print_operation(&schema, def, printer_options.clone())) + .chain( + next_program + .fragments() + .map(|def| print_fragment(&schema, def, printer_options.clone())), + ) + .collect::>(); + printed.sort(); + + Ok(printed.join("\n\n")) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-inline-fragment-no-type-condition.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-inline-fragment-no-type-condition.expected index 41724255c79db..8f97a558adbc6 100644 --- a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-inline-fragment-no-type-condition.expected +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-inline-fragment-no-type-condition.expected @@ -38,7 +38,7 @@ fragment Foo_user on User { { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, @@ -65,7 +65,7 @@ fragment Foo_user on User { { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, @@ -102,8 +102,11 @@ fragment RefetchableClientEdgeQuery_Foo_user_best_friend on User @__ClientEdgeGe # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { @@ -130,8 +133,11 @@ fragment RefetchableClientEdgeQuery_Foo_user_best_friend_1 on User @__ClientEdge # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-inline-fragment.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-inline-fragment.expected index 5e253386f697d..1b9b5348bae65 100644 --- a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-inline-fragment.expected +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-inline-fragment.expected @@ -43,7 +43,7 @@ fragment Foo_node on Node { { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, @@ -72,7 +72,7 @@ fragment Foo_node on Node { { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, @@ -110,8 +110,11 @@ fragment RefetchableClientEdgeQuery_Foo_node_PhotoStory__author__best_friend on # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { @@ -138,8 +141,11 @@ fragment RefetchableClientEdgeQuery_Foo_node_Story__author__best_friend on User # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-interface.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-interface.expected new file mode 100644 index 0000000000000..c95e7efea66e0 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-interface.expected @@ -0,0 +1,69 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment Foo_user on User { + best_friend { + id + } +} + +fragment BestFriendResolverFragment_name on User { + __typename +} + +# %extensions% + +interface ClientOnlyInterface implements Node { + id: ID! +} + +# Add a concrete type so that we don't trigger an unrelated compiler error. +type BestFriend implements ClientOnlyInterface @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "BestFriendResolver" fragment_name: "BestFriend__id", generated_fragment: true, inject_fragment_data: "id", import_name: "BestFriend") +} + +extend type User { + best_friend: ClientOnlyInterface @relay_resolver(fragment_name: "BestFriendResolverFragment_name", import_path: "BestFriendResolver") +} +==================================== OUTPUT =================================== +fragment BestFriendResolverFragment_name on User { + __typename +} + +fragment Foo_user on User { + ... @__ClientEdgeMetadataDirective + # ClientObject { + # type_name: None, + # unique_id: 0, + # model_resolvers: [ + # ClientEdgeModelResolver { + # type_name: WithLocation { + # location: :144:154, + # item: ObjectName( + # "BestFriend", + # ), + # }, + # is_live: false, + # }, + # ], + # } + { + ...BestFriendResolverFragment_name @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(529), + # import_path: "BestFriendResolver", + # import_name: None, + # field_alias: None, + # field_path: "best_friend", + # field_arguments: [], + # live: false, + # output_type_info: EdgeTo, + # fragment_data_injection_mode: None, + # } + + best_friend { + id + } + } +} diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-interface.graphql b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-interface.graphql new file mode 100644 index 0000000000000..fb8bb04a41dee --- /dev/null +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-interface.graphql @@ -0,0 +1,27 @@ +# relay-resolver-enable-interface-output-type + +fragment Foo_user on User { + best_friend { + id + } +} + +fragment BestFriendResolverFragment_name on User { + __typename +} + +# %extensions% + +interface ClientOnlyInterface implements Node { + id: ID! +} + +# Add a concrete type so that we don't trigger an unrelated compiler error. +type BestFriend implements ClientOnlyInterface @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "BestFriendResolver" fragment_name: "BestFriend__id", generated_fragment: true, inject_fragment_data: "id", import_name: "BestFriend") +} + +extend type User { + best_friend: ClientOnlyInterface @relay_resolver(fragment_name: "BestFriendResolverFragment_name", import_path: "BestFriendResolver") +} diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-interface.invalid.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-interface.invalid.expected index d7b9a7bb61baa..293bc39debbc3 100644 --- a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-interface.invalid.expected +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-interface.invalid.expected @@ -16,6 +16,12 @@ interface ClientOnlyInterface implements Node { id: ID! } +# Add a concrete type so that we don't trigger an unrelated compiler error. +type BestFriend implements ClientOnlyInterface @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "BestFriendResolver" fragment_name: "BestFriend__id", generated_fragment: true, inject_fragment_data: "id", import_name: "BestFriend") +} + extend type User { best_friend: ClientOnlyInterface @relay_resolver(fragment_name: "BestFriendResolverFragment_name", import_path: "BestFriendResolver") } diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-interface.invalid.graphql b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-interface.invalid.graphql index 46c9dc032e4b3..2c95d7aa987f2 100644 --- a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-interface.invalid.graphql +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-interface.invalid.graphql @@ -15,6 +15,12 @@ interface ClientOnlyInterface implements Node { id: ID! } +# Add a concrete type so that we don't trigger an unrelated compiler error. +type BestFriend implements ClientOnlyInterface @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "BestFriendResolver" fragment_name: "BestFriend__id", generated_fragment: true, inject_fragment_data: "id", import_name: "BestFriend") +} + extend type User { best_friend: ClientOnlyInterface @relay_resolver(fragment_name: "BestFriendResolverFragment_name", import_path: "BestFriendResolver") } diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-object.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-object.expected index bf30b08e2be03..c9f63a45c04d7 100644 --- a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-object.expected +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-object.expected @@ -32,11 +32,12 @@ fragment Foo_user on User { # ), # ), # unique_id: 0, + # model_resolvers: [], # } { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(518), + # field_id: FieldID(527), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-union.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-union.expected new file mode 100644 index 0000000000000..c1e1115b6e10d --- /dev/null +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-union.expected @@ -0,0 +1,74 @@ +==================================== INPUT ==================================== +fragment FeedbackFragmentType on User { + feedback_as_union { + ... on Like { + __typename + } + } +} + +# %extensions% + +type Like @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "LikeResolver", fragment_name: "Like__id", inject_fragment_data: "id") +} + +type Heart @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "HeartResolver", fragment_name: "Heart__id", inject_fragment_data: "id") +} + +union ClientOnlyUnion = Comment | Like | Heart + +extend type User { + feedback_as_union: ClientOnlyUnion @relay_resolver(import_path: "FeedbackResolver") +} +==================================== OUTPUT =================================== +fragment FeedbackFragmentType on User { + ... @__ClientEdgeMetadataDirective + # ClientObject { + # type_name: None, + # unique_id: 0, + # model_resolvers: [ + # ClientEdgeModelResolver { + # type_name: WithLocation { + # location: :199:204, + # item: ObjectName( + # "Heart", + # ), + # }, + # is_live: false, + # }, + # ClientEdgeModelResolver { + # type_name: WithLocation { + # location: :7:11, + # item: ObjectName( + # "Like", + # ), + # }, + # is_live: false, + # }, + # ], + # } + { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(530), + # import_path: "FeedbackResolver", + # import_name: None, + # field_alias: None, + # field_path: "feedback_as_union", + # field_arguments: [], + # live: false, + # output_type_info: EdgeTo, + # fragment_data_injection_mode: None, + # } + + feedback_as_union { + ... on Like { + __typename + } + } + } +} diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-union.graphql b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-union.graphql new file mode 100644 index 0000000000000..db74cb8b7c3ff --- /dev/null +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-union.graphql @@ -0,0 +1,25 @@ +fragment FeedbackFragmentType on User { + feedback_as_union { + ... on Like { + __typename + } + } +} + +# %extensions% + +type Like @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "LikeResolver", fragment_name: "Like__id", inject_fragment_data: "id") +} + +type Heart @__RelayResolverModel { + id: ID! + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "HeartResolver", fragment_name: "Heart__id", inject_fragment_data: "id") +} + +union ClientOnlyUnion = Comment | Like | Heart + +extend type User { + feedback_as_union: ClientOnlyUnion @relay_resolver(import_path: "FeedbackResolver") +} diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-union.invalid.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-union.invalid.expected deleted file mode 100644 index 8581f0a185b1e..0000000000000 --- a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-union.invalid.expected +++ /dev/null @@ -1,27 +0,0 @@ -==================================== INPUT ==================================== -# expected-to-throw -fragment Foo_user on User { - best_friend { - __typename - } -} - -fragment BestFriendResolverFragment_name on ClientOnlyUnion { - __typename -} - -# %extensions% - -union ClientOnlyUnion = Comment | Feedback - -extend type User { - best_friend: ClientOnlyUnion @relay_resolver(fragment_name: "BestFriendResolverFragment_name", import_path: "BestFriendResolver") -} -==================================== ERROR ==================================== -✖︎ Client Edges that reference client-defined union types are not currently supported in Relay. - - client-edge-to-client-union.invalid.graphql:3:3 - 2 │ fragment Foo_user on User { - 3 │ best_friend { - │ ^^^^^^^^^^^ - 4 │ __typename diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-union.invalid.graphql b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-union.invalid.graphql deleted file mode 100644 index d5c0ca9a84880..0000000000000 --- a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-to-client-union.invalid.graphql +++ /dev/null @@ -1,18 +0,0 @@ -# expected-to-throw -fragment Foo_user on User { - best_friend { - __typename - } -} - -fragment BestFriendResolverFragment_name on ClientOnlyUnion { - __typename -} - -# %extensions% - -union ClientOnlyUnion = Comment | Feedback - -extend type User { - best_friend: ClientOnlyUnion @relay_resolver(fragment_name: "BestFriendResolverFragment_name", import_path: "BestFriendResolver") -} diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-variables.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-variables.expected index 52d62c03e5905..9b2b28da71f7b 100644 --- a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-variables.expected +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-variables.expected @@ -30,7 +30,7 @@ fragment Foo_user on User { { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, @@ -66,8 +66,11 @@ fragment RefetchableClientEdgeQuery_Foo_user_best_friend on User @__ClientEdgeGe # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-with-required.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-with-required.expected index 1a6166afcb5a8..179311c0f090e 100644 --- a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-with-required.expected +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-with-required.expected @@ -34,7 +34,7 @@ fragment Foo_user on User { { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, @@ -70,8 +70,11 @@ fragment RefetchableClientEdgeQuery_Foo_user_best_friend on User @__ClientEdgeGe # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-within-non-client-edge.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-within-non-client-edge.expected index 34a702c7c1cf4..f2b8937b5470c 100644 --- a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-within-non-client-edge.expected +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge-within-non-client-edge.expected @@ -33,7 +33,7 @@ fragment Foo_user on User { { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, @@ -70,8 +70,11 @@ fragment RefetchableClientEdgeQuery_Foo_user_nearest_neighbor__best_friend on Us # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge.expected index 7bb90aa47d756..f47c6d59bdc2e 100644 --- a/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge.expected +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/client-edge.expected @@ -30,7 +30,7 @@ fragment Foo_user on User { { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, @@ -66,8 +66,11 @@ fragment RefetchableClientEdgeQuery_Foo_user_best_friend on User @__ClientEdgeGe # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-client-edges-with-variables.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-client-edges-with-variables.expected index f49abf7c7c25f..0e48deaeba1d7 100644 --- a/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-client-edges-with-variables.expected +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-client-edges-with-variables.expected @@ -34,7 +34,7 @@ fragment Foo_user on User { { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, @@ -57,11 +57,11 @@ fragment Foo_user on User { { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, - # field_path: "best_friend", + # field_path: "best_friend.best_friend", # field_arguments: [], # live: false, # output_type_info: EdgeTo, @@ -96,8 +96,11 @@ fragment RefetchableClientEdgeQuery_Foo_user_best_friend on User @__ClientEdgeGe # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { @@ -112,7 +115,7 @@ fragment RefetchableClientEdgeQuery_Foo_user_best_friend on User @__ClientEdgeGe { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, @@ -150,8 +153,11 @@ fragment RefetchableClientEdgeQuery_Foo_user_best_friend__best_friend on User @_ # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-client-edges.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-client-edges.expected index bed5e98690744..28e599338a951 100644 --- a/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-client-edges.expected +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-client-edges.expected @@ -32,7 +32,7 @@ fragment Foo_user on User { { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, @@ -54,11 +54,11 @@ fragment Foo_user on User { { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, - # field_path: "best_friend", + # field_path: "best_friend.best_friend", # field_arguments: [], # live: false, # output_type_info: EdgeTo, @@ -92,8 +92,11 @@ fragment RefetchableClientEdgeQuery_Foo_user_best_friend on User @__ClientEdgeGe # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { @@ -107,7 +110,7 @@ fragment RefetchableClientEdgeQuery_Foo_user_best_friend on User @__ClientEdgeGe { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, @@ -144,8 +147,11 @@ fragment RefetchableClientEdgeQuery_Foo_user_best_friend__best_friend on User @_ # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-path-with-alias.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-path-with-alias.expected new file mode 100644 index 0000000000000..91c214e2c9d10 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-path-with-alias.expected @@ -0,0 +1,92 @@ +==================================== INPUT ==================================== +fragment Foo_user on ClientUser { + bff: best_friend { + bffs_bff: best_friend { + id + } + } +} + +fragment BestFriendFragment on ClientUser { + id +} + +# %extensions% + +type ClientUser { + id: ID +} + +extend type ClientUser { + best_friend: ClientUser + @relay_resolver( + fragment_name: "BestFriendFragment" + import_path: "BestFriendResolver" + ) +} +==================================== OUTPUT =================================== +fragment BestFriendFragment on ClientUser { + id +} + +fragment Foo_user on ClientUser { + ... @__ClientEdgeMetadataDirective + # ClientObject { + # type_name: Some( + # ObjectName( + # "ClientUser", + # ), + # ), + # unique_id: 1, + # model_resolvers: [], + # } + { + ...BestFriendFragment @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(527), + # import_path: "BestFriendResolver", + # import_name: None, + # field_alias: Some( + # "bff", + # ), + # field_path: "bff", + # field_arguments: [], + # live: false, + # output_type_info: EdgeTo, + # fragment_data_injection_mode: None, + # } + + bff: best_friend { + ... @__ClientEdgeMetadataDirective + # ClientObject { + # type_name: Some( + # ObjectName( + # "ClientUser", + # ), + # ), + # unique_id: 0, + # model_resolvers: [], + # } + { + ...BestFriendFragment @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(527), + # import_path: "BestFriendResolver", + # import_name: None, + # field_alias: Some( + # "bffs_bff", + # ), + # field_path: "bff.bffs_bff", + # field_arguments: [], + # live: false, + # output_type_info: EdgeTo, + # fragment_data_injection_mode: None, + # } + + bffs_bff: best_friend { + id + } + } + } + } +} diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-path-with-alias.graphql b/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-path-with-alias.graphql new file mode 100644 index 0000000000000..6c64bc15b0009 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-path-with-alias.graphql @@ -0,0 +1,25 @@ +fragment Foo_user on ClientUser { + bff: best_friend { + bffs_bff: best_friend { + id + } + } +} + +fragment BestFriendFragment on ClientUser { + id +} + +# %extensions% + +type ClientUser { + id: ID +} + +extend type ClientUser { + best_friend: ClientUser + @relay_resolver( + fragment_name: "BestFriendFragment" + import_path: "BestFriendResolver" + ) +} diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-path.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-path.expected new file mode 100644 index 0000000000000..350f646bceb43 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-path.expected @@ -0,0 +1,88 @@ +==================================== INPUT ==================================== +fragment Foo_user on ClientUser { + best_friend { + best_friend { + id + } + } +} + +fragment BestFriendFragment on ClientUser { + id +} + +# %extensions% + +type ClientUser { + id: ID +} + +extend type ClientUser { + best_friend: ClientUser + @relay_resolver( + fragment_name: "BestFriendFragment" + import_path: "BestFriendResolver" + ) +} +==================================== OUTPUT =================================== +fragment BestFriendFragment on ClientUser { + id +} + +fragment Foo_user on ClientUser { + ... @__ClientEdgeMetadataDirective + # ClientObject { + # type_name: Some( + # ObjectName( + # "ClientUser", + # ), + # ), + # unique_id: 1, + # model_resolvers: [], + # } + { + ...BestFriendFragment @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(527), + # import_path: "BestFriendResolver", + # import_name: None, + # field_alias: None, + # field_path: "best_friend", + # field_arguments: [], + # live: false, + # output_type_info: EdgeTo, + # fragment_data_injection_mode: None, + # } + + best_friend { + ... @__ClientEdgeMetadataDirective + # ClientObject { + # type_name: Some( + # ObjectName( + # "ClientUser", + # ), + # ), + # unique_id: 0, + # model_resolvers: [], + # } + { + ...BestFriendFragment @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(527), + # import_path: "BestFriendResolver", + # import_name: None, + # field_alias: None, + # field_path: "best_friend.best_friend", + # field_arguments: [], + # live: false, + # output_type_info: EdgeTo, + # fragment_data_injection_mode: None, + # } + + best_friend { + id + } + } + } + } +} diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-path.graphql b/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-path.graphql new file mode 100644 index 0000000000000..15c71fdb0f99d --- /dev/null +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/nested-path.graphql @@ -0,0 +1,25 @@ +fragment Foo_user on ClientUser { + best_friend { + best_friend { + id + } + } +} + +fragment BestFriendFragment on ClientUser { + id +} + +# %extensions% + +type ClientUser { + id: ID +} + +extend type ClientUser { + best_friend: ClientUser + @relay_resolver( + fragment_name: "BestFriendFragment" + import_path: "BestFriendResolver" + ) +} diff --git a/compiler/crates/relay-transforms/tests/client_edges/fixtures/output-type.expected b/compiler/crates/relay-transforms/tests/client_edges/fixtures/output-type.expected index 9e33bac29afef..cc7a6953eb613 100644 --- a/compiler/crates/relay-transforms/tests/client_edges/fixtures/output-type.expected +++ b/compiler/crates/relay-transforms/tests/client_edges/fixtures/output-type.expected @@ -32,11 +32,12 @@ fragment Foo_user on User { # ), # ), # unique_id: 0, + # model_resolvers: [], # } { ...BestFriendResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(518), + # field_id: FieldID(527), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, @@ -45,7 +46,7 @@ fragment Foo_user on User { # live: false, # output_type_info: Composite( # ResolverNormalizationInfo { - # inner_type: Object(78), + # inner_type: Object(82), # plural: false, # normalization_operation: WithLocation { # location: :59:70, diff --git a/compiler/crates/relay-transforms/tests/client_edges/mod.rs b/compiler/crates/relay-transforms/tests/client_edges/mod.rs deleted file mode 100644 index f0e561a29f7fb..0000000000000 --- a/compiler/crates/relay-transforms/tests/client_edges/mod.rs +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use graphql_text_printer::print_fragment; -use graphql_text_printer::print_operation; -use graphql_text_printer::PrinterOptions; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::client_edges; -use relay_transforms::relay_resolvers; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - if let [base, extensions] = parts.as_slice() { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(base, source_location).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - - let mut next_program = client_edges(&program, &Default::default()) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - next_program = relay_resolvers(&next_program, true) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let printer_options = PrinterOptions { - debug_directive_data: true, - ..Default::default() - }; - - let mut printed = next_program - .operations() - .map(|def| print_operation(&schema, def, printer_options.clone())) - .chain( - next_program - .fragments() - .map(|def| print_fragment(&schema, def, printer_options.clone())), - ) - .collect::>(); - printed.sort(); - - Ok(printed.join("\n\n")) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} diff --git a/compiler/crates/relay-transforms/tests/client_edges_test.rs b/compiler/crates/relay-transforms/tests/client_edges_test.rs index eb86f61825448..a291dd457b5cc 100644 --- a/compiler/crates/relay-transforms/tests/client_edges_test.rs +++ b/compiler/crates/relay-transforms/tests/client_edges_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<4c8ae6c8320ef3ba5464f80d8fff1afd>> + * @generated SignedSource<<0fcfe34726846687001fb9cf0431f725>> */ mod client_edges; @@ -12,93 +12,114 @@ mod client_edges; use client_edges::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn client_edge() { +#[tokio::test] +async fn client_edge() { let input = include_str!("client_edges/fixtures/client-edge.graphql"); let expected = include_str!("client_edges/fixtures/client-edge.expected"); - test_fixture(transform_fixture, "client-edge.graphql", "client_edges/fixtures/client-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-edge.graphql", "client_edges/fixtures/client-edge.expected", input, expected).await; } -#[test] -fn client_edge_inline_fragment() { +#[tokio::test] +async fn client_edge_inline_fragment() { let input = include_str!("client_edges/fixtures/client-edge-inline-fragment.graphql"); let expected = include_str!("client_edges/fixtures/client-edge-inline-fragment.expected"); - test_fixture(transform_fixture, "client-edge-inline-fragment.graphql", "client_edges/fixtures/client-edge-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-edge-inline-fragment.graphql", "client_edges/fixtures/client-edge-inline-fragment.expected", input, expected).await; } -#[test] -fn client_edge_inline_fragment_no_type_condition() { +#[tokio::test] +async fn client_edge_inline_fragment_no_type_condition() { let input = include_str!("client_edges/fixtures/client-edge-inline-fragment-no-type-condition.graphql"); let expected = include_str!("client_edges/fixtures/client-edge-inline-fragment-no-type-condition.expected"); - test_fixture(transform_fixture, "client-edge-inline-fragment-no-type-condition.graphql", "client_edges/fixtures/client-edge-inline-fragment-no-type-condition.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-edge-inline-fragment-no-type-condition.graphql", "client_edges/fixtures/client-edge-inline-fragment-no-type-condition.expected", input, expected).await; } -#[test] -fn client_edge_to_client_interface_invalid() { +#[tokio::test] +async fn client_edge_to_client_interface() { + let input = include_str!("client_edges/fixtures/client-edge-to-client-interface.graphql"); + let expected = include_str!("client_edges/fixtures/client-edge-to-client-interface.expected"); + test_fixture(transform_fixture, file!(), "client-edge-to-client-interface.graphql", "client_edges/fixtures/client-edge-to-client-interface.expected", input, expected).await; +} + +#[tokio::test] +async fn client_edge_to_client_interface_invalid() { let input = include_str!("client_edges/fixtures/client-edge-to-client-interface.invalid.graphql"); let expected = include_str!("client_edges/fixtures/client-edge-to-client-interface.invalid.expected"); - test_fixture(transform_fixture, "client-edge-to-client-interface.invalid.graphql", "client_edges/fixtures/client-edge-to-client-interface.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-edge-to-client-interface.invalid.graphql", "client_edges/fixtures/client-edge-to-client-interface.invalid.expected", input, expected).await; } -#[test] -fn client_edge_to_client_object() { +#[tokio::test] +async fn client_edge_to_client_object() { let input = include_str!("client_edges/fixtures/client-edge-to-client-object.graphql"); let expected = include_str!("client_edges/fixtures/client-edge-to-client-object.expected"); - test_fixture(transform_fixture, "client-edge-to-client-object.graphql", "client_edges/fixtures/client-edge-to-client-object.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-edge-to-client-object.graphql", "client_edges/fixtures/client-edge-to-client-object.expected", input, expected).await; } -#[test] -fn client_edge_to_client_union_invalid() { - let input = include_str!("client_edges/fixtures/client-edge-to-client-union.invalid.graphql"); - let expected = include_str!("client_edges/fixtures/client-edge-to-client-union.invalid.expected"); - test_fixture(transform_fixture, "client-edge-to-client-union.invalid.graphql", "client_edges/fixtures/client-edge-to-client-union.invalid.expected", input, expected); +#[tokio::test] +async fn client_edge_to_client_union() { + let input = include_str!("client_edges/fixtures/client-edge-to-client-union.graphql"); + let expected = include_str!("client_edges/fixtures/client-edge-to-client-union.expected"); + test_fixture(transform_fixture, file!(), "client-edge-to-client-union.graphql", "client_edges/fixtures/client-edge-to-client-union.expected", input, expected).await; } -#[test] -fn client_edge_variables() { +#[tokio::test] +async fn client_edge_variables() { let input = include_str!("client_edges/fixtures/client-edge-variables.graphql"); let expected = include_str!("client_edges/fixtures/client-edge-variables.expected"); - test_fixture(transform_fixture, "client-edge-variables.graphql", "client_edges/fixtures/client-edge-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-edge-variables.graphql", "client_edges/fixtures/client-edge-variables.expected", input, expected).await; } -#[test] -fn client_edge_with_required() { +#[tokio::test] +async fn client_edge_with_required() { let input = include_str!("client_edges/fixtures/client-edge-with-required.graphql"); let expected = include_str!("client_edges/fixtures/client-edge-with-required.expected"); - test_fixture(transform_fixture, "client-edge-with-required.graphql", "client_edges/fixtures/client-edge-with-required.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-edge-with-required.graphql", "client_edges/fixtures/client-edge-with-required.expected", input, expected).await; } -#[test] -fn client_edge_within_non_client_edge() { +#[tokio::test] +async fn client_edge_within_non_client_edge() { let input = include_str!("client_edges/fixtures/client-edge-within-non-client-edge.graphql"); let expected = include_str!("client_edges/fixtures/client-edge-within-non-client-edge.expected"); - test_fixture(transform_fixture, "client-edge-within-non-client-edge.graphql", "client_edges/fixtures/client-edge-within-non-client-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-edge-within-non-client-edge.graphql", "client_edges/fixtures/client-edge-within-non-client-edge.expected", input, expected).await; } -#[test] -fn nested_client_edges() { +#[tokio::test] +async fn nested_client_edges() { let input = include_str!("client_edges/fixtures/nested-client-edges.graphql"); let expected = include_str!("client_edges/fixtures/nested-client-edges.expected"); - test_fixture(transform_fixture, "nested-client-edges.graphql", "client_edges/fixtures/nested-client-edges.expected", input, expected); + test_fixture(transform_fixture, file!(), "nested-client-edges.graphql", "client_edges/fixtures/nested-client-edges.expected", input, expected).await; } -#[test] -fn nested_client_edges_with_variables() { +#[tokio::test] +async fn nested_client_edges_with_variables() { let input = include_str!("client_edges/fixtures/nested-client-edges-with-variables.graphql"); let expected = include_str!("client_edges/fixtures/nested-client-edges-with-variables.expected"); - test_fixture(transform_fixture, "nested-client-edges-with-variables.graphql", "client_edges/fixtures/nested-client-edges-with-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "nested-client-edges-with-variables.graphql", "client_edges/fixtures/nested-client-edges-with-variables.expected", input, expected).await; +} + +#[tokio::test] +async fn nested_path() { + let input = include_str!("client_edges/fixtures/nested-path.graphql"); + let expected = include_str!("client_edges/fixtures/nested-path.expected"); + test_fixture(transform_fixture, file!(), "nested-path.graphql", "client_edges/fixtures/nested-path.expected", input, expected).await; +} + +#[tokio::test] +async fn nested_path_with_alias() { + let input = include_str!("client_edges/fixtures/nested-path-with-alias.graphql"); + let expected = include_str!("client_edges/fixtures/nested-path-with-alias.expected"); + test_fixture(transform_fixture, file!(), "nested-path-with-alias.graphql", "client_edges/fixtures/nested-path-with-alias.expected", input, expected).await; } -#[test] -fn output_type() { +#[tokio::test] +async fn output_type() { let input = include_str!("client_edges/fixtures/output-type.graphql"); let expected = include_str!("client_edges/fixtures/output-type.expected"); - test_fixture(transform_fixture, "output-type.graphql", "client_edges/fixtures/output-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type.graphql", "client_edges/fixtures/output-type.expected", input, expected).await; } -#[test] -fn unexpected_waterfall_invalid() { +#[tokio::test] +async fn unexpected_waterfall_invalid() { let input = include_str!("client_edges/fixtures/unexpected-waterfall.invalid.graphql"); let expected = include_str!("client_edges/fixtures/unexpected-waterfall.invalid.expected"); - test_fixture(transform_fixture, "unexpected-waterfall.invalid.graphql", "client_edges/fixtures/unexpected-waterfall.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unexpected-waterfall.invalid.graphql", "client_edges/fixtures/unexpected-waterfall.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/client_extensions.rs b/compiler/crates/relay-transforms/tests/client_extensions.rs new file mode 100644 index 0000000000000..f8bbdd37614a6 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/client_extensions.rs @@ -0,0 +1,50 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_text_printer::print_fragment; +use graphql_text_printer::print_operation; +use graphql_text_printer::PrinterOptions; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::client_extensions; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + if let [base, extensions] = parts.as_slice() { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(base, source_location).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + let next_program = client_extensions(&program); + + let printer_options = PrinterOptions { + debug_directive_data: true, + ..Default::default() + }; + + let mut printed = next_program + .operations() + .map(|def| print_operation(&schema, def, printer_options.clone())) + .chain( + next_program + .fragments() + .map(|def| print_fragment(&schema, def, printer_options.clone())), + ) + .collect::>(); + printed.sort(); + Ok(printed.join("\n\n")) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-transforms/tests/client_extensions/mod.rs b/compiler/crates/relay-transforms/tests/client_extensions/mod.rs deleted file mode 100644 index 678dcf1b4a802..0000000000000 --- a/compiler/crates/relay-transforms/tests/client_extensions/mod.rs +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_text_printer::print_fragment; -use graphql_text_printer::print_operation; -use graphql_text_printer::PrinterOptions; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::client_extensions; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - if let [base, extensions] = parts.as_slice() { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(base, source_location).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - let next_program = client_extensions(&program); - - let printer_options = PrinterOptions { - debug_directive_data: true, - ..Default::default() - }; - - let mut printed = next_program - .operations() - .map(|def| print_operation(&schema, def, printer_options.clone())) - .chain( - next_program - .fragments() - .map(|def| print_fragment(&schema, def, printer_options.clone())), - ) - .collect::>(); - printed.sort(); - Ok(printed.join("\n\n")) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} diff --git a/compiler/crates/relay-transforms/tests/client_extensions_abstract_types.rs b/compiler/crates/relay-transforms/tests/client_extensions_abstract_types.rs new file mode 100644 index 0000000000000..ce1428c75bd52 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/client_extensions_abstract_types.rs @@ -0,0 +1,50 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_text_printer::print_fragment; +use graphql_text_printer::print_operation; +use graphql_text_printer::PrinterOptions; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::client_extensions_abstract_types; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + if let [base, extensions] = parts.as_slice() { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(base, source_location).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + let next_program = client_extensions_abstract_types(&program); + + let printer_options = PrinterOptions { + debug_directive_data: true, + ..Default::default() + }; + + let mut printed = next_program + .operations() + .map(|def| print_operation(&schema, def, printer_options.clone())) + .chain( + next_program + .fragments() + .map(|def| print_fragment(&schema, def, printer_options.clone())), + ) + .collect::>(); + printed.sort(); + Ok(printed.join("\n\n")) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-transforms/tests/client_extensions_abstract_types/mod.rs b/compiler/crates/relay-transforms/tests/client_extensions_abstract_types/mod.rs deleted file mode 100644 index 7e99e0e201dec..0000000000000 --- a/compiler/crates/relay-transforms/tests/client_extensions_abstract_types/mod.rs +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_text_printer::print_fragment; -use graphql_text_printer::print_operation; -use graphql_text_printer::PrinterOptions; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::client_extensions_abstract_types; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - if let [base, extensions] = parts.as_slice() { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(base, source_location).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - let next_program = client_extensions_abstract_types(&program); - - let printer_options = PrinterOptions { - debug_directive_data: true, - ..Default::default() - }; - - let mut printed = next_program - .operations() - .map(|def| print_operation(&schema, def, printer_options.clone())) - .chain( - next_program - .fragments() - .map(|def| print_fragment(&schema, def, printer_options.clone())), - ) - .collect::>(); - printed.sort(); - Ok(printed.join("\n\n")) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} diff --git a/compiler/crates/relay-transforms/tests/client_extensions_abstract_types_test.rs b/compiler/crates/relay-transforms/tests/client_extensions_abstract_types_test.rs index 7bf36359d5df8..f0fcaa2c1c3c5 100644 --- a/compiler/crates/relay-transforms/tests/client_extensions_abstract_types_test.rs +++ b/compiler/crates/relay-transforms/tests/client_extensions_abstract_types_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<3d2f11178076ec66072479878618fb48>> */ mod client_extensions_abstract_types; @@ -12,30 +12,30 @@ mod client_extensions_abstract_types; use client_extensions_abstract_types::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn fragment_spread_on_client_interface() { +#[tokio::test] +async fn fragment_spread_on_client_interface() { let input = include_str!("client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface.graphql"); let expected = include_str!("client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface.expected"); - test_fixture(transform_fixture, "fragment_spread_on_client_interface.graphql", "client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_spread_on_client_interface.graphql", "client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface.expected", input, expected).await; } -#[test] -fn fragment_spread_on_client_interface_transitively() { +#[tokio::test] +async fn fragment_spread_on_client_interface_transitively() { let input = include_str!("client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface_transitively.graphql"); let expected = include_str!("client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface_transitively.expected"); - test_fixture(transform_fixture, "fragment_spread_on_client_interface_transitively.graphql", "client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface_transitively.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_spread_on_client_interface_transitively.graphql", "client_extensions_abstract_types/fixtures/fragment_spread_on_client_interface_transitively.expected", input, expected).await; } -#[test] -fn fragment_spread_on_client_union() { +#[tokio::test] +async fn fragment_spread_on_client_union() { let input = include_str!("client_extensions_abstract_types/fixtures/fragment_spread_on_client_union.graphql"); let expected = include_str!("client_extensions_abstract_types/fixtures/fragment_spread_on_client_union.expected"); - test_fixture(transform_fixture, "fragment_spread_on_client_union.graphql", "client_extensions_abstract_types/fixtures/fragment_spread_on_client_union.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment_spread_on_client_union.graphql", "client_extensions_abstract_types/fixtures/fragment_spread_on_client_union.expected", input, expected).await; } -#[test] -fn inline_fragment_on_client_interface() { +#[tokio::test] +async fn inline_fragment_on_client_interface() { let input = include_str!("client_extensions_abstract_types/fixtures/inline_fragment_on_client_interface.graphql"); let expected = include_str!("client_extensions_abstract_types/fixtures/inline_fragment_on_client_interface.expected"); - test_fixture(transform_fixture, "inline_fragment_on_client_interface.graphql", "client_extensions_abstract_types/fixtures/inline_fragment_on_client_interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline_fragment_on_client_interface.graphql", "client_extensions_abstract_types/fixtures/inline_fragment_on_client_interface.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/client_extensions_test.rs b/compiler/crates/relay-transforms/tests/client_extensions_test.rs index 8ad871e9b4d4d..f5892cdbd7976 100644 --- a/compiler/crates/relay-transforms/tests/client_extensions_test.rs +++ b/compiler/crates/relay-transforms/tests/client_extensions_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<8e76da20110296ac303bd405d2001557>> + * @generated SignedSource<> */ mod client_extensions; @@ -12,51 +12,51 @@ mod client_extensions; use client_extensions::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn client_conditions() { +#[tokio::test] +async fn client_conditions() { let input = include_str!("client_extensions/fixtures/client-conditions.graphql"); let expected = include_str!("client_extensions/fixtures/client-conditions.expected"); - test_fixture(transform_fixture, "client-conditions.graphql", "client_extensions/fixtures/client-conditions.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-conditions.graphql", "client_extensions/fixtures/client-conditions.expected", input, expected).await; } -#[test] -fn client_fields_in_inline_fragments() { +#[tokio::test] +async fn client_fields_in_inline_fragments() { let input = include_str!("client_extensions/fixtures/client-fields-in-inline-fragments.graphql"); let expected = include_str!("client_extensions/fixtures/client-fields-in-inline-fragments.expected"); - test_fixture(transform_fixture, "client-fields-in-inline-fragments.graphql", "client_extensions/fixtures/client-fields-in-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields-in-inline-fragments.graphql", "client_extensions/fixtures/client-fields-in-inline-fragments.expected", input, expected).await; } -#[test] -fn client_fields_of_client_type() { +#[tokio::test] +async fn client_fields_of_client_type() { let input = include_str!("client_extensions/fixtures/client-fields-of-client-type.graphql"); let expected = include_str!("client_extensions/fixtures/client-fields-of-client-type.expected"); - test_fixture(transform_fixture, "client-fields-of-client-type.graphql", "client_extensions/fixtures/client-fields-of-client-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields-of-client-type.graphql", "client_extensions/fixtures/client-fields-of-client-type.expected", input, expected).await; } -#[test] -fn client_fields_on_roots() { +#[tokio::test] +async fn client_fields_on_roots() { let input = include_str!("client_extensions/fixtures/client-fields-on-roots.graphql"); let expected = include_str!("client_extensions/fixtures/client-fields-on-roots.expected"); - test_fixture(transform_fixture, "client-fields-on-roots.graphql", "client_extensions/fixtures/client-fields-on-roots.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields-on-roots.graphql", "client_extensions/fixtures/client-fields-on-roots.expected", input, expected).await; } -#[test] -fn client_linked_fields() { +#[tokio::test] +async fn client_linked_fields() { let input = include_str!("client_extensions/fixtures/client-linked-fields.graphql"); let expected = include_str!("client_extensions/fixtures/client-linked-fields.expected"); - test_fixture(transform_fixture, "client-linked-fields.graphql", "client_extensions/fixtures/client-linked-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-linked-fields.graphql", "client_extensions/fixtures/client-linked-fields.expected", input, expected).await; } -#[test] -fn client_scalar_fields() { +#[tokio::test] +async fn client_scalar_fields() { let input = include_str!("client_extensions/fixtures/client-scalar-fields.graphql"); let expected = include_str!("client_extensions/fixtures/client-scalar-fields.expected"); - test_fixture(transform_fixture, "client-scalar-fields.graphql", "client_extensions/fixtures/client-scalar-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-scalar-fields.graphql", "client_extensions/fixtures/client-scalar-fields.expected", input, expected).await; } -#[test] -fn sibling_client_selections() { +#[tokio::test] +async fn sibling_client_selections() { let input = include_str!("client_extensions/fixtures/sibling-client-selections.graphql"); let expected = include_str!("client_extensions/fixtures/sibling-client-selections.expected"); - test_fixture(transform_fixture, "sibling-client-selections.graphql", "client_extensions/fixtures/sibling-client-selections.expected", input, expected); + test_fixture(transform_fixture, file!(), "sibling-client-selections.graphql", "client_extensions/fixtures/sibling-client-selections.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/declarative_connection.rs b/compiler/crates/relay-transforms/tests/declarative_connection.rs new file mode 100644 index 0000000000000..3e4388cbe21df --- /dev/null +++ b/compiler/crates/relay-transforms/tests/declarative_connection.rs @@ -0,0 +1,22 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::FeatureFlags; +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::transform_declarative_connection; +use relay_transforms::ConnectionInterface; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, |program| { + transform_declarative_connection( + program, + &ConnectionInterface::default(), + &FeatureFlags::default(), + ) + }) +} diff --git a/compiler/crates/relay-transforms/tests/declarative_connection/fixtures/append-node-edge-literal.expected b/compiler/crates/relay-transforms/tests/declarative_connection/fixtures/append-node-edge-literal.expected index c626d64902bf1..b3ee83759e91f 100644 --- a/compiler/crates/relay-transforms/tests/declarative_connection/fixtures/append-node-edge-literal.expected +++ b/compiler/crates/relay-transforms/tests/declarative_connection/fixtures/append-node-edge-literal.expected @@ -5,7 +5,7 @@ mutation CommentCreateMutation( ) { commentCreate(input: $input) { comment - @appendNode(connections: $connections, edgeTypeName: "CommentEdge") { + @appendNode(connections: $connections, edgeTypeName: "CommentsEdge") { id } } @@ -16,7 +16,7 @@ mutation CommentCreateMutation( $input: CommentCreateInput ) { commentCreate(input: $input) { - comment @__clientField(key: "", handle: "appendNode", filters: null, dynamicKey_UNSTABLE: null, handleArgs: {connections: $connections, edgeTypeName: "CommentEdge"}) { + comment @__clientField(key: "", handle: "appendNode", filters: null, dynamicKey_UNSTABLE: null, handleArgs: {connections: $connections, edgeTypeName: "CommentsEdge"}) { id } } diff --git a/compiler/crates/relay-transforms/tests/declarative_connection/fixtures/append-node-edge-literal.graphql b/compiler/crates/relay-transforms/tests/declarative_connection/fixtures/append-node-edge-literal.graphql index fc8195b448bbe..f82898dc3a7ba 100644 --- a/compiler/crates/relay-transforms/tests/declarative_connection/fixtures/append-node-edge-literal.graphql +++ b/compiler/crates/relay-transforms/tests/declarative_connection/fixtures/append-node-edge-literal.graphql @@ -4,7 +4,7 @@ mutation CommentCreateMutation( ) { commentCreate(input: $input) { comment - @appendNode(connections: $connections, edgeTypeName: "CommentEdge") { + @appendNode(connections: $connections, edgeTypeName: "CommentsEdge") { id } } diff --git a/compiler/crates/relay-transforms/tests/declarative_connection/mod.rs b/compiler/crates/relay-transforms/tests/declarative_connection/mod.rs deleted file mode 100644 index a513f077875bb..0000000000000 --- a/compiler/crates/relay-transforms/tests/declarative_connection/mod.rs +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::transform_declarative_connection; -use relay_transforms::ConnectionInterface; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| { - transform_declarative_connection(program, &ConnectionInterface::default()) - }) -} diff --git a/compiler/crates/relay-transforms/tests/declarative_connection_test.rs b/compiler/crates/relay-transforms/tests/declarative_connection_test.rs index 21ad0fae0ef06..242329d28e648 100644 --- a/compiler/crates/relay-transforms/tests/declarative_connection_test.rs +++ b/compiler/crates/relay-transforms/tests/declarative_connection_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<73cfaa2e0393cd88236a48e688f50dea>> + * @generated SignedSource<<838509b6ec69597e2eeb13479b5a2f7f>> */ mod declarative_connection; @@ -12,79 +12,79 @@ mod declarative_connection; use declarative_connection::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn append_edge() { +#[tokio::test] +async fn append_edge() { let input = include_str!("declarative_connection/fixtures/append-edge.graphql"); let expected = include_str!("declarative_connection/fixtures/append-edge.expected"); - test_fixture(transform_fixture, "append-edge.graphql", "declarative_connection/fixtures/append-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "append-edge.graphql", "declarative_connection/fixtures/append-edge.expected", input, expected).await; } -#[test] -fn append_edge_unspported_invalid() { +#[tokio::test] +async fn append_edge_unspported_invalid() { let input = include_str!("declarative_connection/fixtures/append-edge-unspported.invalid.graphql"); let expected = include_str!("declarative_connection/fixtures/append-edge-unspported.invalid.expected"); - test_fixture(transform_fixture, "append-edge-unspported.invalid.graphql", "declarative_connection/fixtures/append-edge-unspported.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "append-edge-unspported.invalid.graphql", "declarative_connection/fixtures/append-edge-unspported.invalid.expected", input, expected).await; } -#[test] -fn append_node() { +#[tokio::test] +async fn append_node() { let input = include_str!("declarative_connection/fixtures/append-node.graphql"); let expected = include_str!("declarative_connection/fixtures/append-node.expected"); - test_fixture(transform_fixture, "append-node.graphql", "declarative_connection/fixtures/append-node.expected", input, expected); + test_fixture(transform_fixture, file!(), "append-node.graphql", "declarative_connection/fixtures/append-node.expected", input, expected).await; } -#[test] -fn append_node_edge_literal() { +#[tokio::test] +async fn append_node_edge_literal() { let input = include_str!("declarative_connection/fixtures/append-node-edge-literal.graphql"); let expected = include_str!("declarative_connection/fixtures/append-node-edge-literal.expected"); - test_fixture(transform_fixture, "append-node-edge-literal.graphql", "declarative_connection/fixtures/append-node-edge-literal.expected", input, expected); + test_fixture(transform_fixture, file!(), "append-node-edge-literal.graphql", "declarative_connection/fixtures/append-node-edge-literal.expected", input, expected).await; } -#[test] -fn append_node_unsupported_invalid() { +#[tokio::test] +async fn append_node_unsupported_invalid() { let input = include_str!("declarative_connection/fixtures/append-node-unsupported.invalid.graphql"); let expected = include_str!("declarative_connection/fixtures/append-node-unsupported.invalid.expected"); - test_fixture(transform_fixture, "append-node-unsupported.invalid.graphql", "declarative_connection/fixtures/append-node-unsupported.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "append-node-unsupported.invalid.graphql", "declarative_connection/fixtures/append-node-unsupported.invalid.expected", input, expected).await; } -#[test] -fn delete_edge_from_connection() { +#[tokio::test] +async fn delete_edge_from_connection() { let input = include_str!("declarative_connection/fixtures/delete-edge-from-connection.graphql"); let expected = include_str!("declarative_connection/fixtures/delete-edge-from-connection.expected"); - test_fixture(transform_fixture, "delete-edge-from-connection.graphql", "declarative_connection/fixtures/delete-edge-from-connection.expected", input, expected); + test_fixture(transform_fixture, file!(), "delete-edge-from-connection.graphql", "declarative_connection/fixtures/delete-edge-from-connection.expected", input, expected).await; } -#[test] -fn delete_edge_from_connection_on_unsupported_type_invalid() { +#[tokio::test] +async fn delete_edge_from_connection_on_unsupported_type_invalid() { let input = include_str!("declarative_connection/fixtures/delete-edge-from-connection-on-unsupported-type.invalid.graphql"); let expected = include_str!("declarative_connection/fixtures/delete-edge-from-connection-on-unsupported-type.invalid.expected"); - test_fixture(transform_fixture, "delete-edge-from-connection-on-unsupported-type.invalid.graphql", "declarative_connection/fixtures/delete-edge-from-connection-on-unsupported-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "delete-edge-from-connection-on-unsupported-type.invalid.graphql", "declarative_connection/fixtures/delete-edge-from-connection-on-unsupported-type.invalid.expected", input, expected).await; } -#[test] -fn delete_edge_from_connection_plural() { +#[tokio::test] +async fn delete_edge_from_connection_plural() { let input = include_str!("declarative_connection/fixtures/delete-edge-from-connection-plural.graphql"); let expected = include_str!("declarative_connection/fixtures/delete-edge-from-connection-plural.expected"); - test_fixture(transform_fixture, "delete-edge-from-connection-plural.graphql", "declarative_connection/fixtures/delete-edge-from-connection-plural.expected", input, expected); + test_fixture(transform_fixture, file!(), "delete-edge-from-connection-plural.graphql", "declarative_connection/fixtures/delete-edge-from-connection-plural.expected", input, expected).await; } -#[test] -fn delete_from_store() { +#[tokio::test] +async fn delete_from_store() { let input = include_str!("declarative_connection/fixtures/delete-from-store.graphql"); let expected = include_str!("declarative_connection/fixtures/delete-from-store.expected"); - test_fixture(transform_fixture, "delete-from-store.graphql", "declarative_connection/fixtures/delete-from-store.expected", input, expected); + test_fixture(transform_fixture, file!(), "delete-from-store.graphql", "declarative_connection/fixtures/delete-from-store.expected", input, expected).await; } -#[test] -fn delete_from_store_plural() { +#[tokio::test] +async fn delete_from_store_plural() { let input = include_str!("declarative_connection/fixtures/delete-from-store-plural.graphql"); let expected = include_str!("declarative_connection/fixtures/delete-from-store-plural.expected"); - test_fixture(transform_fixture, "delete-from-store-plural.graphql", "declarative_connection/fixtures/delete-from-store-plural.expected", input, expected); + test_fixture(transform_fixture, file!(), "delete-from-store-plural.graphql", "declarative_connection/fixtures/delete-from-store-plural.expected", input, expected).await; } -#[test] -fn delete_on_unspported_type_invalid() { +#[tokio::test] +async fn delete_on_unspported_type_invalid() { let input = include_str!("declarative_connection/fixtures/delete-on-unspported-type.invalid.graphql"); let expected = include_str!("declarative_connection/fixtures/delete-on-unspported-type.invalid.expected"); - test_fixture(transform_fixture, "delete-on-unspported-type.invalid.graphql", "declarative_connection/fixtures/delete-on-unspported-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "delete-on-unspported-type.invalid.graphql", "declarative_connection/fixtures/delete-on-unspported-type.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/defer_stream.rs b/compiler/crates/relay-transforms/tests/defer_stream.rs new file mode 100644 index 0000000000000..a7435de962b42 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/defer_stream.rs @@ -0,0 +1,21 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_config::DeferStreamInterface; +use relay_transforms::transform_defer_stream; +use relay_transforms::unwrap_custom_directive_selection; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let defer_stream_interface = DeferStreamInterface::default(); + apply_transform_for_test(fixture, |program| { + let program = transform_defer_stream(program, &defer_stream_interface)?; + let program = unwrap_custom_directive_selection(&program, defer_stream_interface); + Ok(program) + }) +} diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-default-label.expected b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-default-label.expected index 4b424a088dd26..0061be156e355 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-default-label.expected +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-default-label.expected @@ -8,7 +8,7 @@ query QueryWithFragmentWithStream($id: ID!) { fragment FeedbackFragment on Feedback { id - actors @stream(initial_count: 1) { + actors @stream(initialCount: 1) { name } } @@ -24,7 +24,7 @@ query QueryWithFragmentWithStream( fragment FeedbackFragment on Feedback { id - actors @stream(label: "FeedbackFragment$stream$actors", initial_count: 1) { + actors @stream(label: "FeedbackFragment$stream$actors", initialCount: 1) { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-default-label.graphql b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-default-label.graphql index 23fa23b3e8e43..739db7de64c6a 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-default-label.graphql +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-default-label.graphql @@ -7,7 +7,7 @@ query QueryWithFragmentWithStream($id: ID!) { fragment FeedbackFragment on Feedback { id - actors @stream(initial_count: 1) { + actors @stream(initialCount: 1) { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-duplicate-label.invalid.expected b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-duplicate-label.invalid.expected index 37c40c910a975..7fe5f5834cdda 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-duplicate-label.invalid.expected +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-duplicate-label.invalid.expected @@ -9,10 +9,10 @@ query QueryWithFragmentWithStream($id: ID!) { fragment FeedbackFragment on Feedback { id - actors @stream(initial_count: 1, label: "actors") { + actors @stream(initialCount: 1, label: "actors") { name } - otherActors: actors @stream(initial_count: 1, label: "actors") { + otherActors: actors @stream(initialCount: 1, label: "actors") { # invalid: duplicate label name } @@ -22,7 +22,7 @@ fragment FeedbackFragment on Feedback { fragment-with-stream-duplicate-label.invalid.graphql:11:10 10 │ id - 11 │ actors @stream(initial_count: 1, label: "actors") { + 11 │ actors @stream(initialCount: 1, label: "actors") { │ ^^^^^^^ 12 │ name @@ -30,6 +30,6 @@ fragment FeedbackFragment on Feedback { fragment-with-stream-duplicate-label.invalid.graphql:14:23 13 │ } - 14 │ otherActors: actors @stream(initial_count: 1, label: "actors") { + 14 │ otherActors: actors @stream(initialCount: 1, label: "actors") { │ ^^^^^^^ 15 │ # invalid: duplicate label diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-duplicate-label.invalid.graphql b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-duplicate-label.invalid.graphql index 6f0d3488b2cb9..a05f524d1de46 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-duplicate-label.invalid.graphql +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-duplicate-label.invalid.graphql @@ -8,10 +8,10 @@ query QueryWithFragmentWithStream($id: ID!) { fragment FeedbackFragment on Feedback { id - actors @stream(initial_count: 1, label: "actors") { + actors @stream(initialCount: 1, label: "actors") { name } - otherActors: actors @stream(initial_count: 1, label: "actors") { + otherActors: actors @stream(initialCount: 1, label: "actors") { # invalid: duplicate label name } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-if-arg.expected b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-if-arg.expected index 07a4e1cfba531..d3db2ff831380 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-if-arg.expected +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-if-arg.expected @@ -9,7 +9,7 @@ query QueryWithFragmentWithStream($id: ID!, $enableStream: Boolean) { fragment FeedbackFragment on Feedback { id actors - @stream(initial_count: 1, label: "StreamedActorsLabel", if: $enableStream) { + @stream(initialCount: 1, label: "StreamedActorsLabel", if: $enableStream) { name } } @@ -26,7 +26,7 @@ query QueryWithFragmentWithStream( fragment FeedbackFragment on Feedback { id - actors @stream(label: "FeedbackFragment$stream$StreamedActorsLabel", if: $enableStream, initial_count: 1) { + actors @stream(label: "FeedbackFragment$stream$StreamedActorsLabel", if: $enableStream, initialCount: 1) { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-if-arg.graphql b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-if-arg.graphql index 20ae2c07f0bcf..266ebda30663d 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-if-arg.graphql +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-if-arg.graphql @@ -8,7 +8,7 @@ query QueryWithFragmentWithStream($id: ID!, $enableStream: Boolean) { fragment FeedbackFragment on Feedback { id actors - @stream(initial_count: 1, label: "StreamedActorsLabel", if: $enableStream) { + @stream(initialCount: 1, label: "StreamedActorsLabel", if: $enableStream) { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-initial-count-arg.expected b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-initial-count-arg.expected index 4c128c9f3f139..1f8aade373550 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-initial-count-arg.expected +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-initial-count-arg.expected @@ -8,7 +8,7 @@ query QueryWithFragmentWithStream($id: ID!, $initialCount: Int) { fragment FeedbackFragment on Feedback { id - actors @stream(initial_count: $initialCount, label: "StreamedActorsLabel") { + actors @stream(initialCount: $initialCount, label: "StreamedActorsLabel") { name } } @@ -25,7 +25,7 @@ query QueryWithFragmentWithStream( fragment FeedbackFragment on Feedback { id - actors @stream(label: "FeedbackFragment$stream$StreamedActorsLabel", initial_count: $initialCount) { + actors @stream(label: "FeedbackFragment$stream$StreamedActorsLabel", initialCount: $initialCount) { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-initial-count-arg.graphql b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-initial-count-arg.graphql index 9e689d6875cb5..338a8274a7ed7 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-initial-count-arg.graphql +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-initial-count-arg.graphql @@ -7,7 +7,7 @@ query QueryWithFragmentWithStream($id: ID!, $initialCount: Int) { fragment FeedbackFragment on Feedback { id - actors @stream(initial_count: $initialCount, label: "StreamedActorsLabel") { + actors @stream(initialCount: $initialCount, label: "StreamedActorsLabel") { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-missing-initial-count-arg.invalid.expected b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-missing-initial-count-arg.invalid.expected index e8b5acc305d89..58d629b758765 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-missing-initial-count-arg.invalid.expected +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-missing-initial-count-arg.invalid.expected @@ -10,15 +10,15 @@ query QueryWithFragmentWithStream($id: ID!, $initialCount: Int) { fragment FeedbackFragment on Feedback { id actors @stream(label: "StreamedActorsLabel") { - # invalid: missing initial_count + # invalid: missing initialCount name } } ==================================== ERROR ==================================== -✖︎ Missing required argument: `initial_count` +✖︎ Missing required argument: `initialCount` fragment-with-stream-missing-initial-count-arg.invalid.graphql:11:11 10 │ id 11 │ actors @stream(label: "StreamedActorsLabel") { │ ^^^^^^ - 12 │ # invalid: missing initial_count + 12 │ # invalid: missing initialCount diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-missing-initial-count-arg.invalid.graphql b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-missing-initial-count-arg.invalid.graphql index c21bf7d7967ff..205c6284cd71c 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-missing-initial-count-arg.invalid.graphql +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-missing-initial-count-arg.invalid.graphql @@ -9,7 +9,7 @@ query QueryWithFragmentWithStream($id: ID!, $initialCount: Int) { fragment FeedbackFragment on Feedback { id actors @stream(label: "StreamedActorsLabel") { - # invalid: missing initial_count + # invalid: missing initialCount name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-on-scalar-field.invalid.expected b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-on-scalar-field.invalid.expected index fe338e6a8ba90..d1b7e4edc6d6d 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-on-scalar-field.invalid.expected +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-on-scalar-field.invalid.expected @@ -9,13 +9,13 @@ query QueryWithFragmentWithStream($id: ID!, $label: String!) { fragment UserFragment on User { id - name @stream(initial_count: 1, label: $label) + name @stream(initialCount: 1, label: $label) } ==================================== ERROR ==================================== ✖︎ Invalid use of @stream on scalar field 'name' fragment-with-stream-on-scalar-field.invalid.graphql:11:8 10 │ id - 11 │ name @stream(initial_count: 1, label: $label) + 11 │ name @stream(initialCount: 1, label: $label) │ ^^^^^^^ 12 │ } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-on-scalar-field.invalid.graphql b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-on-scalar-field.invalid.graphql index 5ee3aa3cc33fc..907553dc9f516 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-on-scalar-field.invalid.graphql +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-on-scalar-field.invalid.graphql @@ -8,5 +8,5 @@ query QueryWithFragmentWithStream($id: ID!, $label: String!) { fragment UserFragment on User { id - name @stream(initial_count: 1, label: $label) + name @stream(initialCount: 1, label: $label) } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-statically-disabled.expected b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-statically-disabled.expected index fdcdc1f94f927..18a1d123489f1 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-statically-disabled.expected +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-statically-disabled.expected @@ -8,7 +8,7 @@ query QueryWithFragmentWithStream($id: ID!) { fragment FeedbackFragment on Feedback { id - actors @stream(initial_count: 1, label: "StreamedActorsLabel", if: false) { + actors @stream(initialCount: 1, label: "StreamedActorsLabel", if: false) { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-statically-disabled.graphql b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-statically-disabled.graphql index cc529d908f6db..9fb1e0e3212d6 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-statically-disabled.graphql +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-statically-disabled.graphql @@ -7,7 +7,7 @@ query QueryWithFragmentWithStream($id: ID!) { fragment FeedbackFragment on Feedback { id - actors @stream(initial_count: 1, label: "StreamedActorsLabel", if: false) { + actors @stream(initialCount: 1, label: "StreamedActorsLabel", if: false) { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-use_customized_batch-arg.expected b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-use_customized_batch-arg.expected index e63411c2c5e3f..f7e18f6a86691 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-use_customized_batch-arg.expected +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-use_customized_batch-arg.expected @@ -11,9 +11,9 @@ fragment FeedbackFragment on Feedback { actors @stream( if: true - initial_count: 1 + initialCount: 1 label: "StreamedActorsLabel" - use_customized_batch: $useCustomizedBatch + useCustomizedBatch: $useCustomizedBatch ) { name } @@ -31,7 +31,7 @@ query QueryWithFragmentWithStream( fragment FeedbackFragment on Feedback { id - actors @stream(label: "FeedbackFragment$stream$StreamedActorsLabel", if: true, initial_count: 1, use_customized_batch: $useCustomizedBatch) { + actors @stream(label: "FeedbackFragment$stream$StreamedActorsLabel", if: true, initialCount: 1, useCustomizedBatch: $useCustomizedBatch) { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-use_customized_batch-arg.graphql b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-use_customized_batch-arg.graphql index 5c0f7c15b697e..b252e2ba8dfbe 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-use_customized_batch-arg.graphql +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-use_customized_batch-arg.graphql @@ -10,9 +10,9 @@ fragment FeedbackFragment on Feedback { actors @stream( if: true - initial_count: 1 + initialCount: 1 label: "StreamedActorsLabel" - use_customized_batch: $useCustomizedBatch + useCustomizedBatch: $useCustomizedBatch ) { name } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-variable-label.invalid.expected b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-variable-label.invalid.expected index 7c03a46f98759..de2cc3bf22754 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-variable-label.invalid.expected +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-variable-label.invalid.expected @@ -9,7 +9,7 @@ query QueryWithFragmentWithStream($id: ID!, $label: String!) { fragment FeedbackFragment on Feedback { id - actors @stream(initial_count: 1, label: $label) { + actors @stream(initialCount: 1, label: $label) { name } } @@ -18,6 +18,6 @@ fragment FeedbackFragment on Feedback { fragment-with-stream-variable-label.invalid.graphql:11:10 10 │ id - 11 │ actors @stream(initial_count: 1, label: $label) { + 11 │ actors @stream(initialCount: 1, label: $label) { │ ^^^^^^^ 12 │ name diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-variable-label.invalid.graphql b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-variable-label.invalid.graphql index 0877996f4bd7b..9d6f748019d5d 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-variable-label.invalid.graphql +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream-variable-label.invalid.graphql @@ -8,7 +8,7 @@ query QueryWithFragmentWithStream($id: ID!, $label: String!) { fragment FeedbackFragment on Feedback { id - actors @stream(initial_count: 1, label: $label) { + actors @stream(initialCount: 1, label: $label) { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream.expected b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream.expected index 61c9e8b532e49..2c4055bc32bc2 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream.expected +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream.expected @@ -8,7 +8,7 @@ query QueryWithFragmentWithStream($id: ID!) { fragment FeedbackFragment on Feedback { id - actors @stream(initial_count: 1, label: "StreamedActorsLabel") { + actors @stream(initialCount: 1, label: "StreamedActorsLabel") { name } } @@ -24,7 +24,7 @@ query QueryWithFragmentWithStream( fragment FeedbackFragment on Feedback { id - actors @stream(label: "FeedbackFragment$stream$StreamedActorsLabel", initial_count: 1) { + actors @stream(label: "FeedbackFragment$stream$StreamedActorsLabel", initialCount: 1) { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream.graphql b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream.graphql index 8891fba1b9c8d..ceb87c5700edc 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream.graphql +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/fragment-with-stream.graphql @@ -7,7 +7,7 @@ query QueryWithFragmentWithStream($id: ID!) { fragment FeedbackFragment on Feedback { id - actors @stream(initial_count: 1, label: "StreamedActorsLabel") { + actors @stream(initialCount: 1, label: "StreamedActorsLabel") { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/query-with-stream.expected b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/query-with-stream.expected index 94b95aa77dd22..99d8ff40f48c6 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/query-with-stream.expected +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/query-with-stream.expected @@ -2,7 +2,7 @@ query QueryWithStream($id: ID!) { node(id: $id) { ... on Feedback { - actors @stream(initial_count: 1, label: "StreamedActorsLabel") { + actors @stream(initialCount: 1, label: "StreamedActorsLabel") { name } } @@ -14,7 +14,7 @@ query QueryWithStream( ) { node(id: $id) { ... on Feedback { - actors @stream(label: "QueryWithStream$stream$StreamedActorsLabel", initial_count: 1) { + actors @stream(label: "QueryWithStream$stream$StreamedActorsLabel", initialCount: 1) { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/query-with-stream.graphql b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/query-with-stream.graphql index 2b7d7173527d1..cb244a79f1267 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/query-with-stream.graphql +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/query-with-stream.graphql @@ -1,7 +1,7 @@ query QueryWithStream($id: ID!) { node(id: $id) { ... on Feedback { - actors @stream(initial_count: 1, label: "StreamedActorsLabel") { + actors @stream(initialCount: 1, label: "StreamedActorsLabel") { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/stream.invalid.expected b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/stream.invalid.expected index 4d392cfd59708..e5a18b3a14b11 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/stream.invalid.expected +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/stream.invalid.expected @@ -3,7 +3,7 @@ fragment FeedbackFragment on Feedback { id - actor @stream(initial_count: 1) { + actor @stream(initialCount: 1) { name } } @@ -12,6 +12,6 @@ fragment FeedbackFragment on Feedback { stream.invalid.graphql:5:9 4 │ id - 5 │ actor @stream(initial_count: 1) { + 5 │ actor @stream(initialCount: 1) { │ ^^^^^^^ 6 │ name diff --git a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/stream.invalid.graphql b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/stream.invalid.graphql index b8e9a950fe4fc..5c640c998741a 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream/fixtures/stream.invalid.graphql +++ b/compiler/crates/relay-transforms/tests/defer_stream/fixtures/stream.invalid.graphql @@ -2,7 +2,7 @@ fragment FeedbackFragment on Feedback { id - actor @stream(initial_count: 1) { + actor @stream(initialCount: 1) { name } } diff --git a/compiler/crates/relay-transforms/tests/defer_stream/mod.rs b/compiler/crates/relay-transforms/tests/defer_stream/mod.rs deleted file mode 100644 index f5991116af9b3..0000000000000 --- a/compiler/crates/relay-transforms/tests/defer_stream/mod.rs +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::transform_defer_stream; -use relay_transforms::unwrap_custom_directive_selection; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| { - let program = transform_defer_stream(program)?; - let program = unwrap_custom_directive_selection(&program); - Ok(program) - }) -} diff --git a/compiler/crates/relay-transforms/tests/defer_stream_test.rs b/compiler/crates/relay-transforms/tests/defer_stream_test.rs index 7fd9f3fc8d517..a6f67cbac9099 100644 --- a/compiler/crates/relay-transforms/tests/defer_stream_test.rs +++ b/compiler/crates/relay-transforms/tests/defer_stream_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<5a6476f3b75666c15c6b256baace7519>> */ mod defer_stream; @@ -12,156 +12,156 @@ mod defer_stream; use defer_stream::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn fragment_with_defer() { +#[tokio::test] +async fn fragment_with_defer() { let input = include_str!("defer_stream/fixtures/fragment-with-defer.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-defer.expected"); - test_fixture(transform_fixture, "fragment-with-defer.graphql", "defer_stream/fixtures/fragment-with-defer.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-defer.graphql", "defer_stream/fixtures/fragment-with-defer.expected", input, expected).await; } -#[test] -fn fragment_with_defer_arguments() { +#[tokio::test] +async fn fragment_with_defer_arguments() { let input = include_str!("defer_stream/fixtures/fragment-with-defer-arguments.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-defer-arguments.expected"); - test_fixture(transform_fixture, "fragment-with-defer-arguments.graphql", "defer_stream/fixtures/fragment-with-defer-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-defer-arguments.graphql", "defer_stream/fixtures/fragment-with-defer-arguments.expected", input, expected).await; } -#[test] -fn fragment_with_defer_default_label() { +#[tokio::test] +async fn fragment_with_defer_default_label() { let input = include_str!("defer_stream/fixtures/fragment-with-defer-default-label.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-defer-default-label.expected"); - test_fixture(transform_fixture, "fragment-with-defer-default-label.graphql", "defer_stream/fixtures/fragment-with-defer-default-label.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-defer-default-label.graphql", "defer_stream/fixtures/fragment-with-defer-default-label.expected", input, expected).await; } -#[test] -fn fragment_with_defer_duplicate_label_invalid() { +#[tokio::test] +async fn fragment_with_defer_duplicate_label_invalid() { let input = include_str!("defer_stream/fixtures/fragment-with-defer-duplicate-label.invalid.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-defer-duplicate-label.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-defer-duplicate-label.invalid.graphql", "defer_stream/fixtures/fragment-with-defer-duplicate-label.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-defer-duplicate-label.invalid.graphql", "defer_stream/fixtures/fragment-with-defer-duplicate-label.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_defer_if_arg() { +#[tokio::test] +async fn fragment_with_defer_if_arg() { let input = include_str!("defer_stream/fixtures/fragment-with-defer-if-arg.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-defer-if-arg.expected"); - test_fixture(transform_fixture, "fragment-with-defer-if-arg.graphql", "defer_stream/fixtures/fragment-with-defer-if-arg.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-defer-if-arg.graphql", "defer_stream/fixtures/fragment-with-defer-if-arg.expected", input, expected).await; } -#[test] -fn fragment_with_defer_if_false() { +#[tokio::test] +async fn fragment_with_defer_if_false() { let input = include_str!("defer_stream/fixtures/fragment-with-defer-if-false.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-defer-if-false.expected"); - test_fixture(transform_fixture, "fragment-with-defer-if-false.graphql", "defer_stream/fixtures/fragment-with-defer-if-false.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-defer-if-false.graphql", "defer_stream/fixtures/fragment-with-defer-if-false.expected", input, expected).await; } -#[test] -fn fragment_with_defer_statically_disabled() { +#[tokio::test] +async fn fragment_with_defer_statically_disabled() { let input = include_str!("defer_stream/fixtures/fragment-with-defer-statically-disabled.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-defer-statically-disabled.expected"); - test_fixture(transform_fixture, "fragment-with-defer-statically-disabled.graphql", "defer_stream/fixtures/fragment-with-defer-statically-disabled.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-defer-statically-disabled.graphql", "defer_stream/fixtures/fragment-with-defer-statically-disabled.expected", input, expected).await; } -#[test] -fn fragment_with_stream() { +#[tokio::test] +async fn fragment_with_stream() { let input = include_str!("defer_stream/fixtures/fragment-with-stream.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-stream.expected"); - test_fixture(transform_fixture, "fragment-with-stream.graphql", "defer_stream/fixtures/fragment-with-stream.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-stream.graphql", "defer_stream/fixtures/fragment-with-stream.expected", input, expected).await; } -#[test] -fn fragment_with_stream_default_label() { +#[tokio::test] +async fn fragment_with_stream_default_label() { let input = include_str!("defer_stream/fixtures/fragment-with-stream-default-label.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-stream-default-label.expected"); - test_fixture(transform_fixture, "fragment-with-stream-default-label.graphql", "defer_stream/fixtures/fragment-with-stream-default-label.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-stream-default-label.graphql", "defer_stream/fixtures/fragment-with-stream-default-label.expected", input, expected).await; } -#[test] -fn fragment_with_stream_duplicate_label_invalid() { +#[tokio::test] +async fn fragment_with_stream_duplicate_label_invalid() { let input = include_str!("defer_stream/fixtures/fragment-with-stream-duplicate-label.invalid.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-stream-duplicate-label.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-stream-duplicate-label.invalid.graphql", "defer_stream/fixtures/fragment-with-stream-duplicate-label.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-stream-duplicate-label.invalid.graphql", "defer_stream/fixtures/fragment-with-stream-duplicate-label.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_stream_if_arg() { +#[tokio::test] +async fn fragment_with_stream_if_arg() { let input = include_str!("defer_stream/fixtures/fragment-with-stream-if-arg.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-stream-if-arg.expected"); - test_fixture(transform_fixture, "fragment-with-stream-if-arg.graphql", "defer_stream/fixtures/fragment-with-stream-if-arg.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-stream-if-arg.graphql", "defer_stream/fixtures/fragment-with-stream-if-arg.expected", input, expected).await; } -#[test] -fn fragment_with_stream_initial_count_arg() { +#[tokio::test] +async fn fragment_with_stream_initial_count_arg() { let input = include_str!("defer_stream/fixtures/fragment-with-stream-initial-count-arg.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-stream-initial-count-arg.expected"); - test_fixture(transform_fixture, "fragment-with-stream-initial-count-arg.graphql", "defer_stream/fixtures/fragment-with-stream-initial-count-arg.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-stream-initial-count-arg.graphql", "defer_stream/fixtures/fragment-with-stream-initial-count-arg.expected", input, expected).await; } -#[test] -fn fragment_with_stream_missing_initial_count_arg_invalid() { +#[tokio::test] +async fn fragment_with_stream_missing_initial_count_arg_invalid() { let input = include_str!("defer_stream/fixtures/fragment-with-stream-missing-initial-count-arg.invalid.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-stream-missing-initial-count-arg.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-stream-missing-initial-count-arg.invalid.graphql", "defer_stream/fixtures/fragment-with-stream-missing-initial-count-arg.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-stream-missing-initial-count-arg.invalid.graphql", "defer_stream/fixtures/fragment-with-stream-missing-initial-count-arg.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_stream_on_scalar_field_invalid() { +#[tokio::test] +async fn fragment_with_stream_on_scalar_field_invalid() { let input = include_str!("defer_stream/fixtures/fragment-with-stream-on-scalar-field.invalid.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-stream-on-scalar-field.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-stream-on-scalar-field.invalid.graphql", "defer_stream/fixtures/fragment-with-stream-on-scalar-field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-stream-on-scalar-field.invalid.graphql", "defer_stream/fixtures/fragment-with-stream-on-scalar-field.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_stream_statically_disabled() { +#[tokio::test] +async fn fragment_with_stream_statically_disabled() { let input = include_str!("defer_stream/fixtures/fragment-with-stream-statically-disabled.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-stream-statically-disabled.expected"); - test_fixture(transform_fixture, "fragment-with-stream-statically-disabled.graphql", "defer_stream/fixtures/fragment-with-stream-statically-disabled.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-stream-statically-disabled.graphql", "defer_stream/fixtures/fragment-with-stream-statically-disabled.expected", input, expected).await; } -#[test] -fn fragment_with_stream_use_customized_batch_arg() { +#[tokio::test] +async fn fragment_with_stream_use_customized_batch_arg() { let input = include_str!("defer_stream/fixtures/fragment-with-stream-use_customized_batch-arg.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-stream-use_customized_batch-arg.expected"); - test_fixture(transform_fixture, "fragment-with-stream-use_customized_batch-arg.graphql", "defer_stream/fixtures/fragment-with-stream-use_customized_batch-arg.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-stream-use_customized_batch-arg.graphql", "defer_stream/fixtures/fragment-with-stream-use_customized_batch-arg.expected", input, expected).await; } -#[test] -fn fragment_with_stream_variable_label_invalid() { +#[tokio::test] +async fn fragment_with_stream_variable_label_invalid() { let input = include_str!("defer_stream/fixtures/fragment-with-stream-variable-label.invalid.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-stream-variable-label.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-stream-variable-label.invalid.graphql", "defer_stream/fixtures/fragment-with-stream-variable-label.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-stream-variable-label.invalid.graphql", "defer_stream/fixtures/fragment-with-stream-variable-label.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_variable_label_invalid() { +#[tokio::test] +async fn fragment_with_variable_label_invalid() { let input = include_str!("defer_stream/fixtures/fragment-with-variable-label.invalid.graphql"); let expected = include_str!("defer_stream/fixtures/fragment-with-variable-label.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-variable-label.invalid.graphql", "defer_stream/fixtures/fragment-with-variable-label.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-variable-label.invalid.graphql", "defer_stream/fixtures/fragment-with-variable-label.invalid.expected", input, expected).await; } -#[test] -fn inline_fragment_with_defer_invalid() { +#[tokio::test] +async fn inline_fragment_with_defer_invalid() { let input = include_str!("defer_stream/fixtures/inline-fragment-with-defer.invalid.graphql"); let expected = include_str!("defer_stream/fixtures/inline-fragment-with-defer.invalid.expected"); - test_fixture(transform_fixture, "inline-fragment-with-defer.invalid.graphql", "defer_stream/fixtures/inline-fragment-with-defer.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-with-defer.invalid.graphql", "defer_stream/fixtures/inline-fragment-with-defer.invalid.expected", input, expected).await; } -#[test] -fn query_with_defer() { +#[tokio::test] +async fn query_with_defer() { let input = include_str!("defer_stream/fixtures/query-with-defer.graphql"); let expected = include_str!("defer_stream/fixtures/query-with-defer.expected"); - test_fixture(transform_fixture, "query-with-defer.graphql", "defer_stream/fixtures/query-with-defer.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-defer.graphql", "defer_stream/fixtures/query-with-defer.expected", input, expected).await; } -#[test] -fn query_with_stream() { +#[tokio::test] +async fn query_with_stream() { let input = include_str!("defer_stream/fixtures/query-with-stream.graphql"); let expected = include_str!("defer_stream/fixtures/query-with-stream.expected"); - test_fixture(transform_fixture, "query-with-stream.graphql", "defer_stream/fixtures/query-with-stream.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-stream.graphql", "defer_stream/fixtures/query-with-stream.expected", input, expected).await; } -#[test] -fn stream_invalid() { +#[tokio::test] +async fn stream_invalid() { let input = include_str!("defer_stream/fixtures/stream.invalid.graphql"); let expected = include_str!("defer_stream/fixtures/stream.invalid.expected"); - test_fixture(transform_fixture, "stream.invalid.graphql", "defer_stream/fixtures/stream.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "stream.invalid.graphql", "defer_stream/fixtures/stream.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/disallow_non_node_id_fields.rs b/compiler/crates/relay-transforms/tests/disallow_non_node_id_fields.rs new file mode 100644 index 0000000000000..a506676d3d866 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_non_node_id_fields.rs @@ -0,0 +1,61 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::collections::HashMap; +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use intern::intern; +use relay_config::NonNodeIdFieldsConfig; +use relay_config::SchemaConfig; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::disallow_non_node_id_fields; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + + if let [base, extensions] = parts.as_slice() { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(base, source_location).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + + let schema_config = SchemaConfig { + non_node_id_fields: Some(NonNodeIdFieldsConfig { + allowed_id_types: { + let mut mappings = HashMap::new(); + + // Add types to allow here from `testschema.graphql` + mappings.insert(intern!("NonNode"), intern!("String")); + + // Add test types that should also be allowed here + mappings.insert( + intern!("UserWithAllowedCustomIDType"), + intern!("AllowedCustomIDType"), + ); + + mappings + }, + }), + ..Default::default() + }; + + disallow_non_node_id_fields(&program, &schema_config) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-transforms/tests/disallow_non_node_id_fields/mod.rs b/compiler/crates/relay-transforms/tests/disallow_non_node_id_fields/mod.rs deleted file mode 100644 index 40aec1dfc25c0..0000000000000 --- a/compiler/crates/relay-transforms/tests/disallow_non_node_id_fields/mod.rs +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::collections::HashMap; -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use intern::intern; -use relay_config::NonNodeIdFieldsConfig; -use relay_config::SchemaConfig; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::disallow_non_node_id_fields; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - - if let [base, extensions] = parts.as_slice() { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(base, source_location).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - - let schema_config = SchemaConfig { - non_node_id_fields: Some(NonNodeIdFieldsConfig { - allowed_id_types: { - let mut mappings = HashMap::new(); - - // Add types to allow here from `testschema.graphql` - mappings.insert(intern!("NonNode"), intern!("String")); - - // Add test types that should also be allowed here - mappings.insert( - intern!("UserWithAllowedCustomIDType"), - intern!("AllowedCustomIDType"), - ); - - mappings - }, - }), - ..Default::default() - }; - - disallow_non_node_id_fields(&program, &schema_config) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} diff --git a/compiler/crates/relay-transforms/tests/disallow_non_node_id_fields_test.rs b/compiler/crates/relay-transforms/tests/disallow_non_node_id_fields_test.rs index b77689146777d..22e28adb7cc31 100644 --- a/compiler/crates/relay-transforms/tests/disallow_non_node_id_fields_test.rs +++ b/compiler/crates/relay-transforms/tests/disallow_non_node_id_fields_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<2a4c5d6f2ace83c3fdcea2feebf2ea23>> */ mod disallow_non_node_id_fields; @@ -12,37 +12,37 @@ mod disallow_non_node_id_fields; use disallow_non_node_id_fields::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn disallowed_definitions_invalid() { +#[tokio::test] +async fn disallowed_definitions_invalid() { let input = include_str!("disallow_non_node_id_fields/fixtures/disallowed-definitions.invalid.graphql"); let expected = include_str!("disallow_non_node_id_fields/fixtures/disallowed-definitions.invalid.expected"); - test_fixture(transform_fixture, "disallowed-definitions.invalid.graphql", "disallow_non_node_id_fields/fixtures/disallowed-definitions.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "disallowed-definitions.invalid.graphql", "disallow_non_node_id_fields/fixtures/disallowed-definitions.invalid.expected", input, expected).await; } -#[test] -fn illegal_scalar_invalid() { +#[tokio::test] +async fn illegal_scalar_invalid() { let input = include_str!("disallow_non_node_id_fields/fixtures/illegal-scalar.invalid.graphql"); let expected = include_str!("disallow_non_node_id_fields/fixtures/illegal-scalar.invalid.expected"); - test_fixture(transform_fixture, "illegal-scalar.invalid.graphql", "disallow_non_node_id_fields/fixtures/illegal-scalar.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "illegal-scalar.invalid.graphql", "disallow_non_node_id_fields/fixtures/illegal-scalar.invalid.expected", input, expected).await; } -#[test] -fn invalid_id_selection_allowed() { +#[tokio::test] +async fn invalid_id_selection_allowed() { let input = include_str!("disallow_non_node_id_fields/fixtures/invalid-id-selection-allowed.graphql"); let expected = include_str!("disallow_non_node_id_fields/fixtures/invalid-id-selection-allowed.expected"); - test_fixture(transform_fixture, "invalid-id-selection-allowed.graphql", "disallow_non_node_id_fields/fixtures/invalid-id-selection-allowed.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-id-selection-allowed.graphql", "disallow_non_node_id_fields/fixtures/invalid-id-selection-allowed.expected", input, expected).await; } -#[test] -fn invalid_id_selection_disallowed_invalid() { +#[tokio::test] +async fn invalid_id_selection_disallowed_invalid() { let input = include_str!("disallow_non_node_id_fields/fixtures/invalid-id-selection-disallowed.invalid.graphql"); let expected = include_str!("disallow_non_node_id_fields/fixtures/invalid-id-selection-disallowed.invalid.expected"); - test_fixture(transform_fixture, "invalid-id-selection-disallowed.invalid.graphql", "disallow_non_node_id_fields/fixtures/invalid-id-selection-disallowed.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-id-selection-disallowed.invalid.graphql", "disallow_non_node_id_fields/fixtures/invalid-id-selection-disallowed.invalid.expected", input, expected).await; } -#[test] -fn valid_id_selection() { +#[tokio::test] +async fn valid_id_selection() { let input = include_str!("disallow_non_node_id_fields/fixtures/valid-id-selection.graphql"); let expected = include_str!("disallow_non_node_id_fields/fixtures/valid-id-selection.expected"); - test_fixture(transform_fixture, "valid-id-selection.graphql", "disallow_non_node_id_fields/fixtures/valid-id-selection.expected", input, expected); + test_fixture(transform_fixture, file!(), "valid-id-selection.graphql", "disallow_non_node_id_fields/fixtures/valid-id-selection.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations.rs b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations.rs new file mode 100644 index 0000000000000..c53344c0b8bc5 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations.rs @@ -0,0 +1,44 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::FeatureFlag; +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::disallow_readtime_features_in_mutations; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + + if let [base, extensions] = parts.as_slice() { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(base, source_location) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let schema = get_test_schema_with_extensions(extensions); + + let ir = build(&schema, &ast.definitions) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let program = Program::from_definitions(Arc::clone(&schema), ir); + disallow_readtime_features_in_mutations( + &program, + &FeatureFlag::Disabled, + &FeatureFlag::Disabled, + false, + ) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/fragment_with_required_spread_in_fragment.expected b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/fragment_with_required_spread_in_fragment.expected new file mode 100644 index 0000000000000..0bdbbd81a4e42 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/fragment_with_required_spread_in_fragment.expected @@ -0,0 +1,14 @@ +==================================== INPUT ==================================== +mutation MyMutation { + setName(name: "Alice") { + ...myFragment + } +} + +fragment myFragment on User { + name @required(action: THROW) +} + +# %extensions% +==================================== OUTPUT =================================== +OK diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/fragment_with_required_spread_in_fragment.graphql b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/fragment_with_required_spread_in_fragment.graphql new file mode 100644 index 0000000000000..95559422408e0 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/fragment_with_required_spread_in_fragment.graphql @@ -0,0 +1,11 @@ +mutation MyMutation { + setName(name: "Alice") { + ...myFragment + } +} + +fragment myFragment on User { + name @required(action: THROW) +} + +# %extensions% diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_linked_resolver.invalid.expected b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_linked_resolver.invalid.expected new file mode 100644 index 0000000000000..10fbfe1483c2a --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_linked_resolver.invalid.expected @@ -0,0 +1,21 @@ +==================================== INPUT ==================================== +# expected-to-throw +mutation MyMutation { + some_resolver +} + +# %extensions% +type SomeType { + id: ID +} +extend type Mutation { + some_resolver: SomeType @relay_resolver +} +==================================== ERROR ==================================== +✖︎ Expected selections on field `some_resolver` of type `Mutation` + + mutation_with_linked_resolver.invalid.graphql:3:3 + 2 │ mutation MyMutation { + 3 │ some_resolver + │ ^^^^^^^^^^^^^ + 4 │ } diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_linked_resolver.invalid.graphql b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_linked_resolver.invalid.graphql new file mode 100644 index 0000000000000..51f8e0a1c356e --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_linked_resolver.invalid.graphql @@ -0,0 +1,12 @@ +# expected-to-throw +mutation MyMutation { + some_resolver +} + +# %extensions% +type SomeType { + id: ID +} +extend type Mutation { + some_resolver: SomeType @relay_resolver +} diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field.invalid.expected b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field.invalid.expected new file mode 100644 index 0000000000000..22a16c134559b --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field.invalid.expected @@ -0,0 +1,17 @@ +==================================== INPUT ==================================== +# expected-to-throw +mutation MyMutation { + setName(name: "Alice") { + name @required(action: THROW) + } +} + +# %extensions% +==================================== ERROR ==================================== +✖︎ Unexpected `@required(action: THROW)` directive in mutation response. The use of `@required(action: THROW)` is not supported in mutations. + + mutation_with_required_field.invalid.graphql:4:10 + 3 │ setName(name: "Alice") { + 4 │ name @required(action: THROW) + │ ^^^^^^^^^ + 5 │ } diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field.invalid.graphql b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field.invalid.graphql new file mode 100644 index 0000000000000..a146eef66dba3 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field.invalid.graphql @@ -0,0 +1,8 @@ +# expected-to-throw +mutation MyMutation { + setName(name: "Alice") { + name @required(action: THROW) + } +} + +# %extensions% diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field_in_inline_fragment.invalid.expected b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field_in_inline_fragment.invalid.expected new file mode 100644 index 0000000000000..995d490a3d127 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field_in_inline_fragment.invalid.expected @@ -0,0 +1,19 @@ +==================================== INPUT ==================================== +# expected-to-throw +mutation MyMutation { + setName(name: "Alice") { + ... on User { + name @required(action: THROW) + } + } +} + +# %extensions% +==================================== ERROR ==================================== +✖︎ Unexpected `@required(action: THROW)` directive in mutation response. The use of `@required(action: THROW)` is not supported in mutations. + + mutation_with_required_field_in_inline_fragment.invalid.graphql:5:12 + 4 │ ... on User { + 5 │ name @required(action: THROW) + │ ^^^^^^^^^ + 6 │ } diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field_in_inline_fragment.invalid.graphql b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field_in_inline_fragment.invalid.graphql new file mode 100644 index 0000000000000..a0d303a74e49a --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field_in_inline_fragment.invalid.graphql @@ -0,0 +1,10 @@ +# expected-to-throw +mutation MyMutation { + setName(name: "Alice") { + ... on User { + name @required(action: THROW) + } + } +} + +# %extensions% diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_log_or_none_field.expected b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_log_or_none_field.expected new file mode 100644 index 0000000000000..1ecc88886781b --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_log_or_none_field.expected @@ -0,0 +1,11 @@ +==================================== INPUT ==================================== +mutation MyMutation { + setName(name: "Alice") { + name @required(action: LOG) + also_name: name @required(action: NONE) + } +} + +# %extensions% +==================================== OUTPUT =================================== +OK diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_log_or_none_field.graphql b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_log_or_none_field.graphql new file mode 100644 index 0000000000000..0660b09372fb7 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_required_log_or_none_field.graphql @@ -0,0 +1,8 @@ +mutation MyMutation { + setName(name: "Alice") { + name @required(action: LOG) + also_name: name @required(action: NONE) + } +} + +# %extensions% diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_scalar_resolver.invalid.expected b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_scalar_resolver.invalid.expected new file mode 100644 index 0000000000000..eba30a14935fe --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_scalar_resolver.invalid.expected @@ -0,0 +1,18 @@ +==================================== INPUT ==================================== +# expected-to-throw +mutation MyMutation { + some_resolver +} + +# %extensions% +extend type Mutation { + some_resolver: Int @relay_resolver +} +==================================== ERROR ==================================== +✖︎ Unexpected `@RelayResolver` field referenced in mutation response. Relay Resolver fields may not be read as part of a mutation response. + + mutation_with_scalar_resolver.invalid.graphql:3:3 + 2 │ mutation MyMutation { + 3 │ some_resolver + │ ^^^^^^^^^^^^^ + 4 │ } diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_scalar_resolver.invalid.graphql b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_scalar_resolver.invalid.graphql new file mode 100644 index 0000000000000..cb14f4f2bec41 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/mutation_with_scalar_resolver.invalid.graphql @@ -0,0 +1,9 @@ +# expected-to-throw +mutation MyMutation { + some_resolver +} + +# %extensions% +extend type Mutation { + some_resolver: Int @relay_resolver +} diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/query_with_required_field.expected b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/query_with_required_field.expected new file mode 100644 index 0000000000000..98111f199977d --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/query_with_required_field.expected @@ -0,0 +1,10 @@ +==================================== INPUT ==================================== +query MyQuery { + me { + name @required(action: THROW) + } +} + +# %extensions% +==================================== OUTPUT =================================== +OK diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/query_with_required_field.graphql b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/query_with_required_field.graphql new file mode 100644 index 0000000000000..e0a92e1feb35a --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations/fixtures/query_with_required_field.graphql @@ -0,0 +1,7 @@ +query MyQuery { + me { + name @required(action: THROW) + } +} + +# %extensions% diff --git a/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations_test.rs b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations_test.rs new file mode 100644 index 0000000000000..d499ad40a3e0a --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_readtime_features_in_mutations_test.rs @@ -0,0 +1,62 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @generated SignedSource<> + */ + +mod disallow_readtime_features_in_mutations; + +use disallow_readtime_features_in_mutations::transform_fixture; +use fixture_tests::test_fixture; + +#[tokio::test] +async fn fragment_with_required_spread_in_fragment() { + let input = include_str!("disallow_readtime_features_in_mutations/fixtures/fragment_with_required_spread_in_fragment.graphql"); + let expected = include_str!("disallow_readtime_features_in_mutations/fixtures/fragment_with_required_spread_in_fragment.expected"); + test_fixture(transform_fixture, file!(), "fragment_with_required_spread_in_fragment.graphql", "disallow_readtime_features_in_mutations/fixtures/fragment_with_required_spread_in_fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn mutation_with_linked_resolver_invalid() { + let input = include_str!("disallow_readtime_features_in_mutations/fixtures/mutation_with_linked_resolver.invalid.graphql"); + let expected = include_str!("disallow_readtime_features_in_mutations/fixtures/mutation_with_linked_resolver.invalid.expected"); + test_fixture(transform_fixture, file!(), "mutation_with_linked_resolver.invalid.graphql", "disallow_readtime_features_in_mutations/fixtures/mutation_with_linked_resolver.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn mutation_with_required_field_in_inline_fragment_invalid() { + let input = include_str!("disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field_in_inline_fragment.invalid.graphql"); + let expected = include_str!("disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field_in_inline_fragment.invalid.expected"); + test_fixture(transform_fixture, file!(), "mutation_with_required_field_in_inline_fragment.invalid.graphql", "disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field_in_inline_fragment.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn mutation_with_required_field_invalid() { + let input = include_str!("disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field.invalid.graphql"); + let expected = include_str!("disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field.invalid.expected"); + test_fixture(transform_fixture, file!(), "mutation_with_required_field.invalid.graphql", "disallow_readtime_features_in_mutations/fixtures/mutation_with_required_field.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn mutation_with_required_log_or_none_field() { + let input = include_str!("disallow_readtime_features_in_mutations/fixtures/mutation_with_required_log_or_none_field.graphql"); + let expected = include_str!("disallow_readtime_features_in_mutations/fixtures/mutation_with_required_log_or_none_field.expected"); + test_fixture(transform_fixture, file!(), "mutation_with_required_log_or_none_field.graphql", "disallow_readtime_features_in_mutations/fixtures/mutation_with_required_log_or_none_field.expected", input, expected).await; +} + +#[tokio::test] +async fn mutation_with_scalar_resolver_invalid() { + let input = include_str!("disallow_readtime_features_in_mutations/fixtures/mutation_with_scalar_resolver.invalid.graphql"); + let expected = include_str!("disallow_readtime_features_in_mutations/fixtures/mutation_with_scalar_resolver.invalid.expected"); + test_fixture(transform_fixture, file!(), "mutation_with_scalar_resolver.invalid.graphql", "disallow_readtime_features_in_mutations/fixtures/mutation_with_scalar_resolver.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn query_with_required_field() { + let input = include_str!("disallow_readtime_features_in_mutations/fixtures/query_with_required_field.graphql"); + let expected = include_str!("disallow_readtime_features_in_mutations/fixtures/query_with_required_field.expected"); + test_fixture(transform_fixture, file!(), "query_with_required_field.graphql", "disallow_readtime_features_in_mutations/fixtures/query_with_required_field.expected", input, expected).await; +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field.rs b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field.rs new file mode 100644 index 0000000000000..fa65625754730 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field.rs @@ -0,0 +1,46 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::disallow_required_on_non_null_field; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + + if let [base, extensions] = parts.as_slice() { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(base, source_location) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let schema = get_test_schema_with_extensions(extensions); + + let ir = build(&schema, &ast.definitions) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let program = Program::from_definitions(Arc::clone(&schema), ir); + disallow_required_on_non_null_field( + &program, + fixture + .content + .contains("# relay:disallow_required_on_non_null_fields"), + fixture + .content + .contains("# relay:experimental_emit_semantic_nullability_types"), + ) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_multiple_required_non_null_fields.invalid.expected b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_multiple_required_non_null_fields.invalid.expected new file mode 100644 index 0000000000000..097312700cf5d --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_multiple_required_non_null_fields.invalid.expected @@ -0,0 +1,53 @@ +==================================== INPUT ==================================== +# relay:disallow_required_on_non_null_fields +# expected-to-throw +fragment MyFragment on User { + some_field @required(action: THROW) + some_other_field @required(action: THROW) + best_friend { + some_field @required(action: THROW) + some_other_field @required(action: THROW) + } +} + +# %extensions% +extend type User { + some_field: Int! + some_other_field: Int! + best_friend: User +} +==================================== ERROR ==================================== +✖︎ Unexpected `@required` directive on a non-null field. This field is already non-null and does not need the `@required` directive. + + fragment_with_multiple_required_non_null_fields.invalid.graphql:4:14 + 3 │ fragment MyFragment on User { + 4 │ some_field @required(action: THROW) + │ ^^^^^^^^^ + 5 │ some_other_field @required(action: THROW) + + +✖︎ Unexpected `@required` directive on a non-null field. This field is already non-null and does not need the `@required` directive. + + fragment_with_multiple_required_non_null_fields.invalid.graphql:5:20 + 4 │ some_field @required(action: THROW) + 5 │ some_other_field @required(action: THROW) + │ ^^^^^^^^^ + 6 │ best_friend { + + +✖︎ Unexpected `@required` directive on a non-null field. This field is already non-null and does not need the `@required` directive. + + fragment_with_multiple_required_non_null_fields.invalid.graphql:7:16 + 6 │ best_friend { + 7 │ some_field @required(action: THROW) + │ ^^^^^^^^^ + 8 │ some_other_field @required(action: THROW) + + +✖︎ Unexpected `@required` directive on a non-null field. This field is already non-null and does not need the `@required` directive. + + fragment_with_multiple_required_non_null_fields.invalid.graphql:8:22 + 7 │ some_field @required(action: THROW) + 8 │ some_other_field @required(action: THROW) + │ ^^^^^^^^^ + 9 │ } diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_multiple_required_non_null_fields.invalid.graphql b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_multiple_required_non_null_fields.invalid.graphql new file mode 100644 index 0000000000000..a666cf5020b4d --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_multiple_required_non_null_fields.invalid.graphql @@ -0,0 +1,17 @@ +# relay:disallow_required_on_non_null_fields +# expected-to-throw +fragment MyFragment on User { + some_field @required(action: THROW) + some_other_field @required(action: THROW) + best_friend { + some_field @required(action: THROW) + some_other_field @required(action: THROW) + } +} + +# %extensions% +extend type User { + some_field: Int! + some_other_field: Int! + best_friend: User +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_non_null_field.invalid.expected b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_non_null_field.invalid.expected new file mode 100644 index 0000000000000..d65cc991bd72c --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_non_null_field.invalid.expected @@ -0,0 +1,19 @@ +==================================== INPUT ==================================== +# relay:disallow_required_on_non_null_fields +# expected-to-throw +fragment MyFragment on User { + some_field @required(action: THROW) +} + +# %extensions% +extend type User { + some_field: Int! +} +==================================== ERROR ==================================== +✖︎ Unexpected `@required` directive on a non-null field. This field is already non-null and does not need the `@required` directive. + + fragment_with_required_non_null_field.invalid.graphql:4:14 + 3 │ fragment MyFragment on User { + 4 │ some_field @required(action: THROW) + │ ^^^^^^^^^ + 5 │ } diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_non_null_field.invalid.graphql b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_non_null_field.invalid.graphql new file mode 100644 index 0000000000000..55cd19fe6119c --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_non_null_field.invalid.graphql @@ -0,0 +1,10 @@ +# relay:disallow_required_on_non_null_fields +# expected-to-throw +fragment MyFragment on User { + some_field @required(action: THROW) +} + +# %extensions% +extend type User { + some_field: Int! +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field.invalid.expected b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field.invalid.expected new file mode 100644 index 0000000000000..285b952e8b852 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field.invalid.expected @@ -0,0 +1,18 @@ +==================================== INPUT ==================================== +# expected-to-throw +fragment MyFragment on User @throwOnFieldError { + some_field @required(action: THROW) +} + +# %extensions% +extend type User { + some_field: Int @semanticNonNull +} +==================================== ERROR ==================================== +✖︎ Unexpected `@throwOnFieldError` directive. The `@throwOnFieldError` directive is not supported unless experimental_emit_semantic_nullability_types is enabled. + + fragment_with_required_semantic_field.invalid.graphql:2:29 + 1 │ # expected-to-throw + 2 │ fragment MyFragment on User @throwOnFieldError { + │ ^^^^^^^^^^^^^^^^^^ + 3 │ some_field @required(action: THROW) diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field.invalid.graphql b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field.invalid.graphql new file mode 100644 index 0000000000000..8c1bdcd19d009 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field.invalid.graphql @@ -0,0 +1,9 @@ +# expected-to-throw +fragment MyFragment on User @throwOnFieldError { + some_field @required(action: THROW) +} + +# %extensions% +extend type User { + some_field: Int @semanticNonNull +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_no_explicit_errors.expected b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_no_explicit_errors.expected new file mode 100644 index 0000000000000..5938d97f31d44 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_no_explicit_errors.expected @@ -0,0 +1,11 @@ +==================================== INPUT ==================================== +fragment MyFragment on User { + some_field @required(action: THROW) +} + +# %extensions% +extend type User { + some_field: Int @semanticNonNull +} +==================================== OUTPUT =================================== +OK diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_no_explicit_errors.graphql b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_no_explicit_errors.graphql new file mode 100644 index 0000000000000..c3c55156fa109 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_no_explicit_errors.graphql @@ -0,0 +1,8 @@ +fragment MyFragment on User { + some_field @required(action: THROW) +} + +# %extensions% +extend type User { + some_field: Int @semanticNonNull +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_via_linked.invalid.expected b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_via_linked.invalid.expected new file mode 100644 index 0000000000000..02ea08d519362 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_via_linked.invalid.expected @@ -0,0 +1,25 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +# expected-to-throw +fragment MyFragment on User @throwOnFieldError { + some_linked_field { + some_other_field @required(action: THROW) + } +} + +# %extensions% +extend type User { + some_linked_field: Foo +} + +type Foo { + some_other_field: Int @semanticNonNull +} +==================================== ERROR ==================================== +✖︎ Unexpected `@required` directive on a `@semanticNonNull` field within a `@throwOnFieldError` fragment or operation. Such fields are already non-null and do not need the `@required` directive. + + fragment_with_required_semantic_field_via_linked.invalid.graphql:5:22 + 4 │ some_linked_field { + 5 │ some_other_field @required(action: THROW) + │ ^^^^^^^^^ + 6 │ } diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_via_linked.invalid.graphql b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_via_linked.invalid.graphql new file mode 100644 index 0000000000000..a0f6b64d8c11c --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_via_linked.invalid.graphql @@ -0,0 +1,16 @@ +# relay:experimental_emit_semantic_nullability_types +# expected-to-throw +fragment MyFragment on User @throwOnFieldError { + some_linked_field { + some_other_field @required(action: THROW) + } +} + +# %extensions% +extend type User { + some_linked_field: Foo +} + +type Foo { + some_other_field: Int @semanticNonNull +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_disallowed_throw.invalid.expected b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_disallowed_throw.invalid.expected new file mode 100644 index 0000000000000..797fac45fe875 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_disallowed_throw.invalid.expected @@ -0,0 +1,19 @@ +==================================== INPUT ==================================== +# expected-to-throw +# No comment enabling the @throwOnFieldError directive +query MyQuery @throwOnFieldError { + some_field +} + +# %extensions% +extend type Query { + some_field: Int +} +==================================== ERROR ==================================== +✖︎ Unexpected `@throwOnFieldError` directive. The `@throwOnFieldError` directive is not supported unless experimental_emit_semantic_nullability_types is enabled. + + query_with_disallowed_throw.invalid.graphql:3:15 + 2 │ # No comment enabling the @throwOnFieldError directive + 3 │ query MyQuery @throwOnFieldError { + │ ^^^^^^^^^^^^^^^^^^ + 4 │ some_field diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_disallowed_throw.invalid.graphql b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_disallowed_throw.invalid.graphql new file mode 100644 index 0000000000000..ced095232f45e --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_disallowed_throw.invalid.graphql @@ -0,0 +1,10 @@ +# expected-to-throw +# No comment enabling the @throwOnFieldError directive +query MyQuery @throwOnFieldError { + some_field +} + +# %extensions% +extend type Query { + some_field: Int +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_field.expected b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_field.expected new file mode 100644 index 0000000000000..bf1a7b845b209 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_field.expected @@ -0,0 +1,12 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +query MyQuery @throwOnFieldError { + some_field @required(action: LOG) +} + +# %extensions% +extend type Query { + some_field: Int +} +==================================== OUTPUT =================================== +OK diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_field.graphql b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_field.graphql new file mode 100644 index 0000000000000..a8f1f582bf96e --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_field.graphql @@ -0,0 +1,9 @@ +# relay:experimental_emit_semantic_nullability_types +query MyQuery @throwOnFieldError { + some_field @required(action: LOG) +} + +# %extensions% +extend type Query { + some_field: Int +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_field_no_explicit_errors.expected b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_field_no_explicit_errors.expected new file mode 100644 index 0000000000000..1f64e6b35c5eb --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_field_no_explicit_errors.expected @@ -0,0 +1,11 @@ +==================================== INPUT ==================================== +query MyQuery { + some_field @required(action: LOG) +} + +# %extensions% +extend type Query { + some_field: Int +} +==================================== OUTPUT =================================== +OK diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_field_no_explicit_errors.graphql b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_field_no_explicit_errors.graphql new file mode 100644 index 0000000000000..a00789baf600e --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_field_no_explicit_errors.graphql @@ -0,0 +1,8 @@ +query MyQuery { + some_field @required(action: LOG) +} + +# %extensions% +extend type Query { + some_field: Int +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field.invalid.expected b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field.invalid.expected new file mode 100644 index 0000000000000..5a39fd1371f81 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field.invalid.expected @@ -0,0 +1,18 @@ +==================================== INPUT ==================================== +# expected-to-throw +query MyQuery @throwOnFieldError { + some_field @required(action: LOG) +} + +# %extensions% +extend type Query { + some_field: Int @semanticNonNull +} +==================================== ERROR ==================================== +✖︎ Unexpected `@throwOnFieldError` directive. The `@throwOnFieldError` directive is not supported unless experimental_emit_semantic_nullability_types is enabled. + + query_with_required_semantic_field.invalid.graphql:2:15 + 1 │ # expected-to-throw + 2 │ query MyQuery @throwOnFieldError { + │ ^^^^^^^^^^^^^^^^^^ + 3 │ some_field @required(action: LOG) diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field.invalid.graphql b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field.invalid.graphql new file mode 100644 index 0000000000000..b87b094b94623 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field.invalid.graphql @@ -0,0 +1,9 @@ +# expected-to-throw +query MyQuery @throwOnFieldError { + some_field @required(action: LOG) +} + +# %extensions% +extend type Query { + some_field: Int @semanticNonNull +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field_no_explicit_errors.expected b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field_no_explicit_errors.expected new file mode 100644 index 0000000000000..57e09179d547d --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field_no_explicit_errors.expected @@ -0,0 +1,11 @@ +==================================== INPUT ==================================== +query MyQuery { + some_field @required(action: LOG) +} + +# %extensions% +extend type Query { + some_field: Int @semanticNonNull +} +==================================== OUTPUT =================================== +OK diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field_no_explicit_errors.graphql b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field_no_explicit_errors.graphql new file mode 100644 index 0000000000000..b709e3697d43b --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field_no_explicit_errors.graphql @@ -0,0 +1,8 @@ +query MyQuery { + some_field @required(action: LOG) +} + +# %extensions% +extend type Query { + some_field: Int @semanticNonNull +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_plural_field.invalid.expected b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_plural_field.invalid.expected new file mode 100644 index 0000000000000..9649e63ae8fba --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_plural_field.invalid.expected @@ -0,0 +1,18 @@ +==================================== INPUT ==================================== +# expected-to-throw +query MyQuery @throwOnFieldError { + some_field @required(action: LOG) +} + +# %extensions% +extend type Query { + some_field: [Int] @semanticNonNull(levels: [1]) +} +==================================== ERROR ==================================== +✖︎ Unexpected `@throwOnFieldError` directive. The `@throwOnFieldError` directive is not supported unless experimental_emit_semantic_nullability_types is enabled. + + query_with_required_semantic_plural_field.invalid.graphql:2:15 + 1 │ # expected-to-throw + 2 │ query MyQuery @throwOnFieldError { + │ ^^^^^^^^^^^^^^^^^^ + 3 │ some_field @required(action: LOG) diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_plural_field.invalid.graphql b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_plural_field.invalid.graphql new file mode 100644 index 0000000000000..9b4075396ec0c --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_required_semantic_plural_field.invalid.graphql @@ -0,0 +1,9 @@ +# expected-to-throw +query MyQuery @throwOnFieldError { + some_field @required(action: LOG) +} + +# %extensions% +extend type Query { + some_field: [Int] @semanticNonNull(levels: [1]) +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_semantic_field.expected b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_semantic_field.expected new file mode 100644 index 0000000000000..3898541e7d6fb --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_semantic_field.expected @@ -0,0 +1,12 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +query MyQuery @throwOnFieldError { + some_field +} + +# %extensions% +extend type Query { + some_field: Int @semanticNonNull +} +==================================== OUTPUT =================================== +OK diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_semantic_field.graphql b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_semantic_field.graphql new file mode 100644 index 0000000000000..7efebf4cb213e --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_semantic_field.graphql @@ -0,0 +1,9 @@ +# relay:experimental_emit_semantic_nullability_types +query MyQuery @throwOnFieldError { + some_field +} + +# %extensions% +extend type Query { + some_field: Int @semanticNonNull +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_semantic_field_no_explicit_errors.expected b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_semantic_field_no_explicit_errors.expected new file mode 100644 index 0000000000000..d7e2654ae14d2 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_semantic_field_no_explicit_errors.expected @@ -0,0 +1,11 @@ +==================================== INPUT ==================================== +query MyQuery { + some_field +} + +# %extensions% +extend type Query { + some_field: Int @semanticNonNull +} +==================================== OUTPUT =================================== +OK diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_semantic_field_no_explicit_errors.graphql b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_semantic_field_no_explicit_errors.graphql new file mode 100644 index 0000000000000..119092510e833 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field/fixtures/query_with_semantic_field_no_explicit_errors.graphql @@ -0,0 +1,8 @@ +query MyQuery { + some_field +} + +# %extensions% +extend type Query { + some_field: Int @semanticNonNull +} diff --git a/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field_test.rs b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field_test.rs new file mode 100644 index 0000000000000..21a3838c3ad89 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_required_on_non_null_field_test.rs @@ -0,0 +1,104 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @generated SignedSource<<5746353336950e077c65f751091a962a>> + */ + +mod disallow_required_on_non_null_field; + +use disallow_required_on_non_null_field::transform_fixture; +use fixture_tests::test_fixture; + +#[tokio::test] +async fn fragment_with_multiple_required_non_null_fields_invalid() { + let input = include_str!("disallow_required_on_non_null_field/fixtures/fragment_with_multiple_required_non_null_fields.invalid.graphql"); + let expected = include_str!("disallow_required_on_non_null_field/fixtures/fragment_with_multiple_required_non_null_fields.invalid.expected"); + test_fixture(transform_fixture, file!(), "fragment_with_multiple_required_non_null_fields.invalid.graphql", "disallow_required_on_non_null_field/fixtures/fragment_with_multiple_required_non_null_fields.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_with_required_non_null_field_invalid() { + let input = include_str!("disallow_required_on_non_null_field/fixtures/fragment_with_required_non_null_field.invalid.graphql"); + let expected = include_str!("disallow_required_on_non_null_field/fixtures/fragment_with_required_non_null_field.invalid.expected"); + test_fixture(transform_fixture, file!(), "fragment_with_required_non_null_field.invalid.graphql", "disallow_required_on_non_null_field/fixtures/fragment_with_required_non_null_field.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_with_required_semantic_field_invalid() { + let input = include_str!("disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field.invalid.graphql"); + let expected = include_str!("disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field.invalid.expected"); + test_fixture(transform_fixture, file!(), "fragment_with_required_semantic_field.invalid.graphql", "disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_with_required_semantic_field_no_explicit_errors() { + let input = include_str!("disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_no_explicit_errors.graphql"); + let expected = include_str!("disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_no_explicit_errors.expected"); + test_fixture(transform_fixture, file!(), "fragment_with_required_semantic_field_no_explicit_errors.graphql", "disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_no_explicit_errors.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_with_required_semantic_field_via_linked_invalid() { + let input = include_str!("disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_via_linked.invalid.graphql"); + let expected = include_str!("disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_via_linked.invalid.expected"); + test_fixture(transform_fixture, file!(), "fragment_with_required_semantic_field_via_linked.invalid.graphql", "disallow_required_on_non_null_field/fixtures/fragment_with_required_semantic_field_via_linked.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn query_with_disallowed_throw_invalid() { + let input = include_str!("disallow_required_on_non_null_field/fixtures/query_with_disallowed_throw.invalid.graphql"); + let expected = include_str!("disallow_required_on_non_null_field/fixtures/query_with_disallowed_throw.invalid.expected"); + test_fixture(transform_fixture, file!(), "query_with_disallowed_throw.invalid.graphql", "disallow_required_on_non_null_field/fixtures/query_with_disallowed_throw.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn query_with_required_field() { + let input = include_str!("disallow_required_on_non_null_field/fixtures/query_with_required_field.graphql"); + let expected = include_str!("disallow_required_on_non_null_field/fixtures/query_with_required_field.expected"); + test_fixture(transform_fixture, file!(), "query_with_required_field.graphql", "disallow_required_on_non_null_field/fixtures/query_with_required_field.expected", input, expected).await; +} + +#[tokio::test] +async fn query_with_required_field_no_explicit_errors() { + let input = include_str!("disallow_required_on_non_null_field/fixtures/query_with_required_field_no_explicit_errors.graphql"); + let expected = include_str!("disallow_required_on_non_null_field/fixtures/query_with_required_field_no_explicit_errors.expected"); + test_fixture(transform_fixture, file!(), "query_with_required_field_no_explicit_errors.graphql", "disallow_required_on_non_null_field/fixtures/query_with_required_field_no_explicit_errors.expected", input, expected).await; +} + +#[tokio::test] +async fn query_with_required_semantic_field_invalid() { + let input = include_str!("disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field.invalid.graphql"); + let expected = include_str!("disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field.invalid.expected"); + test_fixture(transform_fixture, file!(), "query_with_required_semantic_field.invalid.graphql", "disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn query_with_required_semantic_field_no_explicit_errors() { + let input = include_str!("disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field_no_explicit_errors.graphql"); + let expected = include_str!("disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field_no_explicit_errors.expected"); + test_fixture(transform_fixture, file!(), "query_with_required_semantic_field_no_explicit_errors.graphql", "disallow_required_on_non_null_field/fixtures/query_with_required_semantic_field_no_explicit_errors.expected", input, expected).await; +} + +#[tokio::test] +async fn query_with_required_semantic_plural_field_invalid() { + let input = include_str!("disallow_required_on_non_null_field/fixtures/query_with_required_semantic_plural_field.invalid.graphql"); + let expected = include_str!("disallow_required_on_non_null_field/fixtures/query_with_required_semantic_plural_field.invalid.expected"); + test_fixture(transform_fixture, file!(), "query_with_required_semantic_plural_field.invalid.graphql", "disallow_required_on_non_null_field/fixtures/query_with_required_semantic_plural_field.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn query_with_semantic_field() { + let input = include_str!("disallow_required_on_non_null_field/fixtures/query_with_semantic_field.graphql"); + let expected = include_str!("disallow_required_on_non_null_field/fixtures/query_with_semantic_field.expected"); + test_fixture(transform_fixture, file!(), "query_with_semantic_field.graphql", "disallow_required_on_non_null_field/fixtures/query_with_semantic_field.expected", input, expected).await; +} + +#[tokio::test] +async fn query_with_semantic_field_no_explicit_errors() { + let input = include_str!("disallow_required_on_non_null_field/fixtures/query_with_semantic_field_no_explicit_errors.graphql"); + let expected = include_str!("disallow_required_on_non_null_field/fixtures/query_with_semantic_field_no_explicit_errors.expected"); + test_fixture(transform_fixture, file!(), "query_with_semantic_field_no_explicit_errors.graphql", "disallow_required_on_non_null_field/fixtures/query_with_semantic_field_no_explicit_errors.expected", input, expected).await; +} diff --git a/compiler/crates/relay-transforms/tests/disallow_reserved_aliases.rs b/compiler/crates/relay-transforms/tests/disallow_reserved_aliases.rs new file mode 100644 index 0000000000000..e4d5e2c1873e5 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_reserved_aliases.rs @@ -0,0 +1,27 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::get_test_schema; +use relay_transforms::disallow_reserved_aliases; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let schema = get_test_schema(); + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(schema, ir); + disallow_reserved_aliases(&program, &Default::default()) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) +} diff --git a/compiler/crates/relay-transforms/tests/disallow_reserved_aliases/mod.rs b/compiler/crates/relay-transforms/tests/disallow_reserved_aliases/mod.rs deleted file mode 100644 index 0f5645d2ec298..0000000000000 --- a/compiler/crates/relay-transforms/tests/disallow_reserved_aliases/mod.rs +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::get_test_schema; -use relay_transforms::disallow_reserved_aliases; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let schema = get_test_schema(); - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(schema, ir); - disallow_reserved_aliases(&program, &Default::default()) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) -} diff --git a/compiler/crates/relay-transforms/tests/disallow_reserved_aliases_test.rs b/compiler/crates/relay-transforms/tests/disallow_reserved_aliases_test.rs index 6850ff8a420f2..273fb7fde1331 100644 --- a/compiler/crates/relay-transforms/tests/disallow_reserved_aliases_test.rs +++ b/compiler/crates/relay-transforms/tests/disallow_reserved_aliases_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<8184d04f25bd777e8ff56df2e2baff89>> + * @generated SignedSource<<8a54c4298c83b3b072fa04aaa6053eda>> */ mod disallow_reserved_aliases; @@ -12,30 +12,30 @@ mod disallow_reserved_aliases; use disallow_reserved_aliases::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn id_alias() { +#[tokio::test] +async fn id_alias() { let input = include_str!("disallow_reserved_aliases/fixtures/id-alias.graphql"); let expected = include_str!("disallow_reserved_aliases/fixtures/id-alias.expected"); - test_fixture(transform_fixture, "id-alias.graphql", "disallow_reserved_aliases/fixtures/id-alias.expected", input, expected); + test_fixture(transform_fixture, file!(), "id-alias.graphql", "disallow_reserved_aliases/fixtures/id-alias.expected", input, expected).await; } -#[test] -fn id_alias_with_errors_invalid() { +#[tokio::test] +async fn id_alias_with_errors_invalid() { let input = include_str!("disallow_reserved_aliases/fixtures/id-alias-with-errors.invalid.graphql"); let expected = include_str!("disallow_reserved_aliases/fixtures/id-alias-with-errors.invalid.expected"); - test_fixture(transform_fixture, "id-alias-with-errors.invalid.graphql", "disallow_reserved_aliases/fixtures/id-alias-with-errors.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "id-alias-with-errors.invalid.graphql", "disallow_reserved_aliases/fixtures/id-alias-with-errors.invalid.expected", input, expected).await; } -#[test] -fn relay_id_alias_with_errors_invalid() { +#[tokio::test] +async fn relay_id_alias_with_errors_invalid() { let input = include_str!("disallow_reserved_aliases/fixtures/relay_id-alias-with-errors.invalid.graphql"); let expected = include_str!("disallow_reserved_aliases/fixtures/relay_id-alias-with-errors.invalid.expected"); - test_fixture(transform_fixture, "relay_id-alias-with-errors.invalid.graphql", "disallow_reserved_aliases/fixtures/relay_id-alias-with-errors.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay_id-alias-with-errors.invalid.graphql", "disallow_reserved_aliases/fixtures/relay_id-alias-with-errors.invalid.expected", input, expected).await; } -#[test] -fn typename_alias_with_errors_invalid() { +#[tokio::test] +async fn typename_alias_with_errors_invalid() { let input = include_str!("disallow_reserved_aliases/fixtures/typename-alias-with-errors.invalid.graphql"); let expected = include_str!("disallow_reserved_aliases/fixtures/typename-alias-with-errors.invalid.expected"); - test_fixture(transform_fixture, "typename-alias-with-errors.invalid.graphql", "disallow_reserved_aliases/fixtures/typename-alias-with-errors.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "typename-alias-with-errors.invalid.graphql", "disallow_reserved_aliases/fixtures/typename-alias-with-errors.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/disallow_typename_on_root.rs b/compiler/crates/relay-transforms/tests/disallow_typename_on_root.rs new file mode 100644 index 0000000000000..4a36780ed53b8 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/disallow_typename_on_root.rs @@ -0,0 +1,27 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::get_test_schema; +use relay_transforms::disallow_typename_on_root; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let schema = get_test_schema(); + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(schema, ir); + disallow_typename_on_root(&program) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) +} diff --git a/compiler/crates/relay-transforms/tests/disallow_typename_on_root/mod.rs b/compiler/crates/relay-transforms/tests/disallow_typename_on_root/mod.rs deleted file mode 100644 index 40296f8e00fe9..0000000000000 --- a/compiler/crates/relay-transforms/tests/disallow_typename_on_root/mod.rs +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::get_test_schema; -use relay_transforms::disallow_typename_on_root; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let schema = get_test_schema(); - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(schema, ir); - disallow_typename_on_root(&program) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) -} diff --git a/compiler/crates/relay-transforms/tests/disallow_typename_on_root_test.rs b/compiler/crates/relay-transforms/tests/disallow_typename_on_root_test.rs index 6b68be1b124b0..4c1f375e490a7 100644 --- a/compiler/crates/relay-transforms/tests/disallow_typename_on_root_test.rs +++ b/compiler/crates/relay-transforms/tests/disallow_typename_on_root_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<39f0a34f3acf5f9d49cd579af5dbc8f8>> + * @generated SignedSource<> */ mod disallow_typename_on_root; @@ -12,30 +12,30 @@ mod disallow_typename_on_root; use disallow_typename_on_root::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn typename_on_fragment_invalid() { +#[tokio::test] +async fn typename_on_fragment_invalid() { let input = include_str!("disallow_typename_on_root/fixtures/typename-on-fragment.invalid.graphql"); let expected = include_str!("disallow_typename_on_root/fixtures/typename-on-fragment.invalid.expected"); - test_fixture(transform_fixture, "typename-on-fragment.invalid.graphql", "disallow_typename_on_root/fixtures/typename-on-fragment.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "typename-on-fragment.invalid.graphql", "disallow_typename_on_root/fixtures/typename-on-fragment.invalid.expected", input, expected).await; } -#[test] -fn typename_on_mutation_invalid() { +#[tokio::test] +async fn typename_on_mutation_invalid() { let input = include_str!("disallow_typename_on_root/fixtures/typename-on-mutation.invalid.graphql"); let expected = include_str!("disallow_typename_on_root/fixtures/typename-on-mutation.invalid.expected"); - test_fixture(transform_fixture, "typename-on-mutation.invalid.graphql", "disallow_typename_on_root/fixtures/typename-on-mutation.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "typename-on-mutation.invalid.graphql", "disallow_typename_on_root/fixtures/typename-on-mutation.invalid.expected", input, expected).await; } -#[test] -fn typename_on_query_invalid() { +#[tokio::test] +async fn typename_on_query_invalid() { let input = include_str!("disallow_typename_on_root/fixtures/typename-on-query.invalid.graphql"); let expected = include_str!("disallow_typename_on_root/fixtures/typename-on-query.invalid.expected"); - test_fixture(transform_fixture, "typename-on-query.invalid.graphql", "disallow_typename_on_root/fixtures/typename-on-query.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "typename-on-query.invalid.graphql", "disallow_typename_on_root/fixtures/typename-on-query.invalid.expected", input, expected).await; } -#[test] -fn valid() { +#[tokio::test] +async fn valid() { let input = include_str!("disallow_typename_on_root/fixtures/valid.graphql"); let expected = include_str!("disallow_typename_on_root/fixtures/valid.expected"); - test_fixture(transform_fixture, "valid.graphql", "disallow_typename_on_root/fixtures/valid.expected", input, expected); + test_fixture(transform_fixture, file!(), "valid.graphql", "disallow_typename_on_root/fixtures/valid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/flatten.rs b/compiler/crates/relay-transforms/tests/flatten.rs new file mode 100644 index 0000000000000..822bdd8abced1 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/flatten.rs @@ -0,0 +1,57 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use graphql_text_printer::print_fragment; +use graphql_text_printer::print_operation; +use graphql_text_printer::PrinterOptions; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::flatten; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(fixture.content, source_location) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let schema = get_test_schema_with_extensions( + r#" +directive @serverInlineDirective on INLINE_FRAGMENT"#, + ); + let ir = build(&schema, &ast.definitions) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let mut context = Program::from_definitions(Arc::clone(&schema), ir); + flatten( + &mut context, + !fixture.content.contains("%for_printing%"), + false, + ) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let printer_options = PrinterOptions { + debug_directive_data: true, + ..Default::default() + }; + + let mut printed_queries = context + .operations() + .map(|def| print_operation(&schema, def, printer_options.clone())) + .collect::>(); + + let mut printed = context + .fragments() + .map(|def| print_fragment(&schema, def, printer_options.clone())) + .collect::>(); + printed.append(&mut printed_queries); + printed.sort(); + Ok(printed.join("\n\n")) +} diff --git a/compiler/crates/relay-transforms/tests/flatten/mod.rs b/compiler/crates/relay-transforms/tests/flatten/mod.rs deleted file mode 100644 index ae930c19e4e49..0000000000000 --- a/compiler/crates/relay-transforms/tests/flatten/mod.rs +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use graphql_text_printer::print_fragment; -use graphql_text_printer::print_operation; -use graphql_text_printer::PrinterOptions; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::flatten; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(fixture.content, source_location) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - let schema = get_test_schema_with_extensions( - r#" -directive @serverInlineDirective on INLINE_FRAGMENT"#, - ); - let ir = build(&schema, &ast.definitions) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - let mut context = Program::from_definitions(Arc::clone(&schema), ir); - flatten( - &mut context, - !fixture.content.contains("%for_printing%"), - false, - ) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let printer_options = PrinterOptions { - debug_directive_data: true, - ..Default::default() - }; - - let mut printed_queries = context - .operations() - .map(|def| print_operation(&schema, def, printer_options.clone())) - .collect::>(); - - let mut printed = context - .fragments() - .map(|def| print_fragment(&schema, def, printer_options.clone())) - .collect::>(); - printed.append(&mut printed_queries); - printed.sort(); - Ok(printed.join("\n\n")) -} diff --git a/compiler/crates/relay-transforms/tests/flatten_test.rs b/compiler/crates/relay-transforms/tests/flatten_test.rs index 04791f037e0c7..2afd26234b039 100644 --- a/compiler/crates/relay-transforms/tests/flatten_test.rs +++ b/compiler/crates/relay-transforms/tests/flatten_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<8a035f3972bb95c60f23754821230364>> */ mod flatten; @@ -12,93 +12,93 @@ mod flatten; use flatten::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn anonymous_inline_fragment_with_directives() { +#[tokio::test] +async fn anonymous_inline_fragment_with_directives() { let input = include_str!("flatten/fixtures/anonymous-inline-fragment-with-directives.graphql"); let expected = include_str!("flatten/fixtures/anonymous-inline-fragment-with-directives.expected"); - test_fixture(transform_fixture, "anonymous-inline-fragment-with-directives.graphql", "flatten/fixtures/anonymous-inline-fragment-with-directives.expected", input, expected); + test_fixture(transform_fixture, file!(), "anonymous-inline-fragment-with-directives.graphql", "flatten/fixtures/anonymous-inline-fragment-with-directives.expected", input, expected).await; } -#[test] -fn flatten_multiple_conditions() { +#[tokio::test] +async fn flatten_multiple_conditions() { let input = include_str!("flatten/fixtures/flatten-multiple-conditions.graphql"); let expected = include_str!("flatten/fixtures/flatten-multiple-conditions.expected"); - test_fixture(transform_fixture, "flatten-multiple-conditions.graphql", "flatten/fixtures/flatten-multiple-conditions.expected", input, expected); + test_fixture(transform_fixture, file!(), "flatten-multiple-conditions.graphql", "flatten/fixtures/flatten-multiple-conditions.expected", input, expected).await; } -#[test] -fn flatten_same_conditions() { +#[tokio::test] +async fn flatten_same_conditions() { let input = include_str!("flatten/fixtures/flatten-same-conditions.graphql"); let expected = include_str!("flatten/fixtures/flatten-same-conditions.expected"); - test_fixture(transform_fixture, "flatten-same-conditions.graphql", "flatten/fixtures/flatten-same-conditions.expected", input, expected); + test_fixture(transform_fixture, file!(), "flatten-same-conditions.graphql", "flatten/fixtures/flatten-same-conditions.expected", input, expected).await; } -#[test] -fn flatten_transform() { +#[tokio::test] +async fn flatten_transform() { let input = include_str!("flatten/fixtures/flatten-transform.graphql"); let expected = include_str!("flatten/fixtures/flatten-transform.expected"); - test_fixture(transform_fixture, "flatten-transform.graphql", "flatten/fixtures/flatten-transform.expected", input, expected); + test_fixture(transform_fixture, file!(), "flatten-transform.graphql", "flatten/fixtures/flatten-transform.expected", input, expected).await; } -#[test] -fn flattens_inline_inside_condition() { +#[tokio::test] +async fn flattens_inline_inside_condition() { let input = include_str!("flatten/fixtures/flattens-inline-inside-condition.graphql"); let expected = include_str!("flatten/fixtures/flattens-inline-inside-condition.expected"); - test_fixture(transform_fixture, "flattens-inline-inside-condition.graphql", "flatten/fixtures/flattens-inline-inside-condition.expected", input, expected); + test_fixture(transform_fixture, file!(), "flattens-inline-inside-condition.graphql", "flatten/fixtures/flattens-inline-inside-condition.expected", input, expected).await; } -#[test] -fn flattens_inside_plural() { +#[tokio::test] +async fn flattens_inside_plural() { let input = include_str!("flatten/fixtures/flattens-inside-plural.graphql"); let expected = include_str!("flatten/fixtures/flattens-inside-plural.expected"); - test_fixture(transform_fixture, "flattens-inside-plural.graphql", "flatten/fixtures/flattens-inside-plural.expected", input, expected); + test_fixture(transform_fixture, file!(), "flattens-inside-plural.graphql", "flatten/fixtures/flattens-inside-plural.expected", input, expected).await; } -#[test] -fn flattens_matching_fragment_types() { +#[tokio::test] +async fn flattens_matching_fragment_types() { let input = include_str!("flatten/fixtures/flattens-matching-fragment-types.graphql"); let expected = include_str!("flatten/fixtures/flattens-matching-fragment-types.expected"); - test_fixture(transform_fixture, "flattens-matching-fragment-types.graphql", "flatten/fixtures/flattens-matching-fragment-types.expected", input, expected); + test_fixture(transform_fixture, file!(), "flattens-matching-fragment-types.graphql", "flatten/fixtures/flattens-matching-fragment-types.expected", input, expected).await; } -#[test] -fn inline_fragment_with_directives() { +#[tokio::test] +async fn inline_fragment_with_directives() { let input = include_str!("flatten/fixtures/inline-fragment-with-directives.graphql"); let expected = include_str!("flatten/fixtures/inline-fragment-with-directives.expected"); - test_fixture(transform_fixture, "inline-fragment-with-directives.graphql", "flatten/fixtures/inline-fragment-with-directives.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-with-directives.graphql", "flatten/fixtures/inline-fragment-with-directives.expected", input, expected).await; } -#[test] -fn inline_fragment_with_directives_text() { +#[tokio::test] +async fn inline_fragment_with_directives_text() { let input = include_str!("flatten/fixtures/inline-fragment-with-directives-text.graphql"); let expected = include_str!("flatten/fixtures/inline-fragment-with-directives-text.expected"); - test_fixture(transform_fixture, "inline-fragment-with-directives-text.graphql", "flatten/fixtures/inline-fragment-with-directives-text.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-with-directives-text.graphql", "flatten/fixtures/inline-fragment-with-directives-text.expected", input, expected).await; } -#[test] -fn linked_handle_field() { +#[tokio::test] +async fn linked_handle_field() { let input = include_str!("flatten/fixtures/linked-handle-field.graphql"); let expected = include_str!("flatten/fixtures/linked-handle-field.expected"); - test_fixture(transform_fixture, "linked-handle-field.graphql", "flatten/fixtures/linked-handle-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "linked-handle-field.graphql", "flatten/fixtures/linked-handle-field.expected", input, expected).await; } -#[test] -fn match_field() { +#[tokio::test] +async fn match_field() { let input = include_str!("flatten/fixtures/match-field.graphql"); let expected = include_str!("flatten/fixtures/match-field.expected"); - test_fixture(transform_fixture, "match-field.graphql", "flatten/fixtures/match-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-field.graphql", "flatten/fixtures/match-field.expected", input, expected).await; } -#[test] -fn match_field_overlap() { +#[tokio::test] +async fn match_field_overlap() { let input = include_str!("flatten/fixtures/match-field-overlap.graphql"); let expected = include_str!("flatten/fixtures/match-field-overlap.expected"); - test_fixture(transform_fixture, "match-field-overlap.graphql", "flatten/fixtures/match-field-overlap.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-field-overlap.graphql", "flatten/fixtures/match-field-overlap.expected", input, expected).await; } -#[test] -fn scalar_handle_field() { +#[tokio::test] +async fn scalar_handle_field() { let input = include_str!("flatten/fixtures/scalar-handle-field.graphql"); let expected = include_str!("flatten/fixtures/scalar-handle-field.expected"); - test_fixture(transform_fixture, "scalar-handle-field.graphql", "flatten/fixtures/scalar-handle-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "scalar-handle-field.graphql", "flatten/fixtures/scalar-handle-field.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive.rs b/compiler/crates/relay-transforms/tests/fragment_alias_directive.rs new file mode 100644 index 0000000000000..e680daa1204f0 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive.rs @@ -0,0 +1,16 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::fragment_alias_directive; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, |program| { + fragment_alias_directive(program, true, true) + }) +} diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_as_empty_string.invalid.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_as_empty_string.invalid.expected new file mode 100644 index 0000000000000..de3725972e15c --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_as_empty_string.invalid.expected @@ -0,0 +1,18 @@ +==================================== INPUT ==================================== +# expected-to-throw +query RelayReaderNamedFragmentsTest2Query { + me { + id + ... on User @alias(as: "") { + name + } + } +} +==================================== ERROR ==================================== +✖︎ Unexpected empty string supplied for `as` argument of the @alias directive. If provided, the `as` argument of the `@alias` directive must be a non-empty string literal. + + alias_as_empty_string.invalid.graphql:5:28 + 4 │ id + 5 │ ... on User @alias(as: "") { + │ ^^ + 6 │ name diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_as_empty_string.invalid.graphql b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_as_empty_string.invalid.graphql new file mode 100644 index 0000000000000..8a217dec098bf --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_as_empty_string.invalid.graphql @@ -0,0 +1,9 @@ +# expected-to-throw +query RelayReaderNamedFragmentsTest2Query { + me { + id + ... on User @alias(as: "") { + name + } + } +} \ No newline at end of file diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_not_required_on_module_fragments.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_not_required_on_module_fragments.expected new file mode 100644 index 0000000000000..17c69260ec984 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_not_required_on_module_fragments.expected @@ -0,0 +1,51 @@ +==================================== INPUT ==================================== +fragment NameRendererFragment on User { + id + nameRenderer @match { + ...PlainUserNameRenderer_name @module(name: "PlainUserNameRenderer.react") + ...MarkdownUserNameRenderer_name + @module(name: "MarkdownUserNameRenderer.react") + } + plainNameRenderer: nameRenderer @match { + ...PlainUserNameRenderer_name @module(name: "PlainUserNameRenderer.react") + } +} + +fragment PlainUserNameRenderer_name on PlainUserNameRenderer { + plaintext + data { + text + } +} + +fragment MarkdownUserNameRenderer_name on MarkdownUserNameRenderer { + markdown + data { + markup + } +} +==================================== OUTPUT =================================== +fragment MarkdownUserNameRenderer_name on MarkdownUserNameRenderer { + markdown + data { + markup + } +} + +fragment NameRendererFragment on User { + id + nameRenderer @match { + ...PlainUserNameRenderer_name @module(name: "PlainUserNameRenderer.react") + ...MarkdownUserNameRenderer_name @module(name: "MarkdownUserNameRenderer.react") + } + plainNameRenderer: nameRenderer @match { + ...PlainUserNameRenderer_name @module(name: "PlainUserNameRenderer.react") + } +} + +fragment PlainUserNameRenderer_name on PlainUserNameRenderer { + plaintext + data { + text + } +} diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_not_required_on_module_fragments.graphql b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_not_required_on_module_fragments.graphql new file mode 100644 index 0000000000000..4b790f391367c --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_not_required_on_module_fragments.graphql @@ -0,0 +1,25 @@ +fragment NameRendererFragment on User { + id + nameRenderer @match { + ...PlainUserNameRenderer_name @module(name: "PlainUserNameRenderer.react") + ...MarkdownUserNameRenderer_name + @module(name: "MarkdownUserNameRenderer.react") + } + plainNameRenderer: nameRenderer @match { + ...PlainUserNameRenderer_name @module(name: "PlainUserNameRenderer.react") + } +} + +fragment PlainUserNameRenderer_name on PlainUserNameRenderer { + plaintext + data { + text + } +} + +fragment MarkdownUserNameRenderer_name on MarkdownUserNameRenderer { + markdown + data { + markup + } +} diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_not_required_within_aliased_refined_inline_fragment.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_not_required_within_aliased_refined_inline_fragment.expected new file mode 100644 index 0000000000000..a47348b3e7060 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_not_required_within_aliased_refined_inline_fragment.expected @@ -0,0 +1,36 @@ +==================================== INPUT ==================================== +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query { + node { + ... on User @alias { + ...RelayReaderNamedFragmentsTest_user + } + } +} +==================================== OUTPUT =================================== +query RelayReaderNamedFragmentsTest2Query { + node { + ... on User @alias @__FragmentAliasMetadata + # FragmentAliasMetadata { + # alias: WithLocation { + # location: alias_not_required_within_aliased_refined_inline_fragment.graphql:133:139, + # item: "User", + # }, + # type_condition: Some( + # Object(70), + # ), + # non_nullable: false, + # selection_type: Object(70), + # } + { + ...RelayReaderNamedFragmentsTest_user + } + } +} + +fragment RelayReaderNamedFragmentsTest_user on User { + name +} diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_not_required_within_aliased_refined_inline_fragment.graphql b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_not_required_within_aliased_refined_inline_fragment.graphql new file mode 100644 index 0000000000000..83e5f83528a0e --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_not_required_within_aliased_refined_inline_fragment.graphql @@ -0,0 +1,11 @@ +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query { + node { + ... on User @alias { + ...RelayReaderNamedFragmentsTest_user + } + } +} diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_on_abstract_type.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_on_abstract_type.expected index 634c026d8505c..23f8e37806dcb 100644 --- a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_on_abstract_type.expected +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_on_abstract_type.expected @@ -11,16 +11,17 @@ query RelayReaderNamedFragmentsTest2Query { ==================================== OUTPUT =================================== query RelayReaderNamedFragmentsTest2Query { node(id: "1") { - ...RelayReaderNamedFragmentsTest_maybe_node_interface @__FragmentAliasMetadata + ...RelayReaderNamedFragmentsTest_maybe_node_interface @alias(as: "aliased_fragment") @__FragmentAliasMetadata # FragmentAliasMetadata { # alias: WithLocation { - # location: alias_on_abstract_type.graphql:221:223, + # location: alias_on_abstract_type.graphql:225:243, # item: "aliased_fragment", # }, # type_condition: Some( # Interface(0), # ), - # selection_type: Interface(0), + # non_nullable: false, + # selection_type: Interface(5), # } } diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_on_named_fragment.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_on_named_fragment.expected index 7e0221a264b6c..72b08843ec136 100644 --- a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_on_named_fragment.expected +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_on_named_fragment.expected @@ -11,16 +11,17 @@ query RelayReaderNamedFragmentsTest2Query { ==================================== OUTPUT =================================== query RelayReaderNamedFragmentsTest2Query { me { - ...RelayReaderNamedFragmentsTest_user @__FragmentAliasMetadata + ...RelayReaderNamedFragmentsTest_user @alias(as: "aliased_fragment") @__FragmentAliasMetadata # FragmentAliasMetadata { # alias: WithLocation { - # location: alias_on_named_fragment.graphql:164:166, + # location: alias_on_named_fragment.graphql:168:186, # item: "aliased_fragment", # }, # type_condition: Some( - # Object(69), + # Object(70), # ), - # selection_type: Object(69), + # non_nullable: true, + # selection_type: Object(70), # } } diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_on_spread_of_plural_fragment.invalid.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_on_spread_of_plural_fragment.invalid.expected new file mode 100644 index 0000000000000..808f41269fc25 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_on_spread_of_plural_fragment.invalid.expected @@ -0,0 +1,19 @@ +==================================== INPUT ==================================== +# expected-to-throw +fragment RelayReaderNamedFragmentsTest_user on User @relay(plural: true) { + name +} + +query RelayReaderNamedFragmentsTest2Query { + nodes { + ...RelayReaderNamedFragmentsTest_user @alias + } +} +==================================== ERROR ==================================== +✖︎ Unexpected `@alias` on spread of plural fragment. @alias may not be used on fragments marked as `@relay(plural: true)`. + + alias_on_spread_of_plural_fragment.invalid.graphql:8:43 + 7 │ nodes { + 8 │ ...RelayReaderNamedFragmentsTest_user @alias + │ ^^^^^^ + 9 │ } diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_on_spread_of_plural_fragment.invalid.graphql b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_on_spread_of_plural_fragment.invalid.graphql new file mode 100644 index 0000000000000..7be9c2f92553b --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_on_spread_of_plural_fragment.invalid.graphql @@ -0,0 +1,10 @@ +# expected-to-throw +fragment RelayReaderNamedFragmentsTest_user on User @relay(plural: true) { + name +} + +query RelayReaderNamedFragmentsTest2Query { + nodes { + ...RelayReaderNamedFragmentsTest_user @alias + } +} \ No newline at end of file diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_required_within_refined_inline_fragment.invalid.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_required_within_refined_inline_fragment.invalid.expected new file mode 100644 index 0000000000000..99c757df3e221 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_required_within_refined_inline_fragment.invalid.expected @@ -0,0 +1,24 @@ +==================================== INPUT ==================================== +# expected-to-throw +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query { + node { + # Relay is not reliable in modeling this as a discriminated union in its + # typescript/flow types. To be safe we need the user to supply an alias. + ... on User { + # This might not match! + ...RelayReaderNamedFragmentsTest_user + } + } +} +==================================== ERROR ==================================== +✖︎ Expected `@alias` directive. `RelayReaderNamedFragmentsTest_user` is defined on `User` which might not match this selection type of `Node`. Add `@alias` to this spread to expose the fragment reference as a nullable property. NOTE: The selection type inferred here does not include inline fragments because Relay does not always model inline fragment type refinements in its generated types. + + alias_required_within_refined_inline_fragment.invalid.graphql:12:10 + 11 │ # This might not match! + 12 │ ...RelayReaderNamedFragmentsTest_user + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 13 │ } diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_required_within_refined_inline_fragment.invalid.graphql b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_required_within_refined_inline_fragment.invalid.graphql new file mode 100644 index 0000000000000..5016a966ffb45 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/alias_required_within_refined_inline_fragment.invalid.graphql @@ -0,0 +1,15 @@ +# expected-to-throw +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query { + node { + # Relay is not reliable in modeling this as a discriminated union in its + # typescript/flow types. To be safe we need the user to supply an alias. + ... on User { + # This might not match! + ...RelayReaderNamedFragmentsTest_user + } + } +} diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/aliased_inline_fragment.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/aliased_inline_fragment.expected index 31d32ae80fa13..5ea49d27faa71 100644 --- a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/aliased_inline_fragment.expected +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/aliased_inline_fragment.expected @@ -11,16 +11,17 @@ query RelayReaderNamedFragmentsTest2Query { query RelayReaderNamedFragmentsTest2Query { me { id - ... on User @__FragmentAliasMetadata + ... on User @alias(as: "aliased_fragment") @__FragmentAliasMetadata # FragmentAliasMetadata { # alias: WithLocation { - # location: aliased_inline_fragment.graphql:81:83, + # location: aliased_inline_fragment.graphql:85:103, # item: "aliased_fragment", # }, # type_condition: Some( - # Object(69), + # Object(70), # ), - # selection_type: Object(69), + # non_nullable: true, + # selection_type: Object(70), # } { name diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/aliased_inline_fragment_without_type_condition.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/aliased_inline_fragment_without_type_condition.expected index 490c6242401d6..6d1b588483c6e 100644 --- a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/aliased_inline_fragment_without_type_condition.expected +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/aliased_inline_fragment_without_type_condition.expected @@ -11,14 +11,15 @@ query RelayReaderNamedFragmentsTest2Query { query RelayReaderNamedFragmentsTest2Query { me { id - ... @__FragmentAliasMetadata + ... @alias(as: "aliased_fragment") @__FragmentAliasMetadata # FragmentAliasMetadata { # alias: WithLocation { - # location: aliased_inline_fragment_without_type_condition.graphql:73:75, + # location: aliased_inline_fragment_without_type_condition.graphql:77:95, # item: "aliased_fragment", # }, # type_condition: None, - # selection_type: Object(69), + # non_nullable: true, + # selection_type: Object(70), # } { name diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/default_alias_on_fragment_spread.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/default_alias_on_fragment_spread.expected index 46e87fed00ec1..6dd556300a1e3 100644 --- a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/default_alias_on_fragment_spread.expected +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/default_alias_on_fragment_spread.expected @@ -11,16 +11,17 @@ query RelayReaderNamedFragmentsTest2Query { ==================================== OUTPUT =================================== query RelayReaderNamedFragmentsTest2Query { me { - ...RelayReaderNamedFragmentsTest_user @__FragmentAliasMetadata + ...RelayReaderNamedFragmentsTest_user @alias @__FragmentAliasMetadata # FragmentAliasMetadata { # alias: WithLocation { # location: default_alias_on_fragment_spread.graphql:157:163, # item: "RelayReaderNamedFragmentsTest_user", # }, # type_condition: Some( - # Object(69), + # Object(70), # ), - # selection_type: Object(69), + # non_nullable: true, + # selection_type: Object(70), # } } diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/default_alias_on_inline_fragment.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/default_alias_on_inline_fragment.expected index 4b08014d7aff8..11b4fb8e771af 100644 --- a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/default_alias_on_inline_fragment.expected +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/default_alias_on_inline_fragment.expected @@ -6,16 +6,17 @@ fragment Foo on Node { } ==================================== OUTPUT =================================== fragment Foo on Node { - ... on User @__FragmentAliasMetadata + ... on User @alias @__FragmentAliasMetadata # FragmentAliasMetadata { # alias: WithLocation { # location: default_alias_on_inline_fragment.graphql:39:45, # item: "User", # }, # type_condition: Some( - # Object(69), + # Object(70), # ), - # selection_type: Object(69), + # non_nullable: false, + # selection_type: Object(70), # } { name diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias.invalid.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias.invalid.expected new file mode 100644 index 0000000000000..b5e440f5ed2a4 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias.invalid.expected @@ -0,0 +1,20 @@ +==================================== INPUT ==================================== +# expected-to-throw +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query { + node { + # This might not match! + ...RelayReaderNamedFragmentsTest_user + } +} +==================================== ERROR ==================================== +✖︎ Expected `@alias` directive. `RelayReaderNamedFragmentsTest_user` is defined on `User` which might not match this selection type of `Node`. Add `@alias` to this spread to expose the fragment reference as a nullable property. + + fragment_spread_into_supertype_without_alias.invalid.graphql:9:8 + 8 │ # This might not match! + 9 │ ...RelayReaderNamedFragmentsTest_user + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 10 │ } diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias.invalid.graphql b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias.invalid.graphql new file mode 100644 index 0000000000000..16b7206ee5445 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias.invalid.graphql @@ -0,0 +1,11 @@ +# expected-to-throw +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query { + node { + # This might not match! + ...RelayReaderNamedFragmentsTest_user + } +} \ No newline at end of file diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias_suppressed.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias_suppressed.expected new file mode 100644 index 0000000000000..b93ac178b7f63 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias_suppressed.expected @@ -0,0 +1,21 @@ +==================================== INPUT ==================================== +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query { + node { + # This might not match! + ...RelayReaderNamedFragmentsTest_user @dangerously_unaliased_fixme + } +} +==================================== OUTPUT =================================== +query RelayReaderNamedFragmentsTest2Query { + node { + ...RelayReaderNamedFragmentsTest_user @dangerously_unaliased_fixme + } +} + +fragment RelayReaderNamedFragmentsTest_user on User { + name +} diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias_suppressed.graphql b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias_suppressed.graphql new file mode 100644 index 0000000000000..27769ab3be88a --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias_suppressed.graphql @@ -0,0 +1,10 @@ +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query { + node { + # This might not match! + ...RelayReaderNamedFragmentsTest_user @dangerously_unaliased_fixme + } +} diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/inline_fragment_spread_into_supertype_without_alias.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/inline_fragment_spread_into_supertype_without_alias.expected new file mode 100644 index 0000000000000..f68a365ac9245 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/inline_fragment_spread_into_supertype_without_alias.expected @@ -0,0 +1,18 @@ +==================================== INPUT ==================================== +query RelayReaderNamedFragmentsTest2Query { + node { + # This might not match, but that's fine since our types capture the + # nullability of the individual fields + ... on User { + name + } + } +} +==================================== OUTPUT =================================== +query RelayReaderNamedFragmentsTest2Query { + node { + ... on User { + name + } + } +} diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/inline_fragment_spread_into_supertype_without_alias.graphql b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/inline_fragment_spread_into_supertype_without_alias.graphql new file mode 100644 index 0000000000000..1b11dcf8fe815 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/inline_fragment_spread_into_supertype_without_alias.graphql @@ -0,0 +1,9 @@ +query RelayReaderNamedFragmentsTest2Query { + node { + # This might not match, but that's fine since our types capture the + # nullability of the individual fields + ... on User { + name + } + } +} \ No newline at end of file diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_fragment_spread_without_alias.invalid.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_fragment_spread_without_alias.invalid.expected new file mode 100644 index 0000000000000..75b97e13b99d9 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_fragment_spread_without_alias.invalid.expected @@ -0,0 +1,20 @@ +==================================== INPUT ==================================== +# expected-to-throw +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query($someCondition: Boolean!) { + me { + # This might not match! + ...RelayReaderNamedFragmentsTest_user @skip(if: $someCondition) + } +} +==================================== ERROR ==================================== +✖︎ Expected `@alias` directive. Fragment spreads with `@skip` are conditionally fetched. Add `@alias` to this spread to expose the fragment reference as a nullable property. + + skip_fragment_spread_without_alias.invalid.graphql:9:43 + 8 │ # This might not match! + 9 │ ...RelayReaderNamedFragmentsTest_user @skip(if: $someCondition) + │ ^^^^^ + 10 │ } diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_fragment_spread_without_alias.invalid.graphql b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_fragment_spread_without_alias.invalid.graphql new file mode 100644 index 0000000000000..f4dc44e0a407e --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_fragment_spread_without_alias.invalid.graphql @@ -0,0 +1,11 @@ +# expected-to-throw +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query($someCondition: Boolean!) { + me { + # This might not match! + ...RelayReaderNamedFragmentsTest_user @skip(if: $someCondition) + } +} \ No newline at end of file diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_fragment_spread_without_alias_suppressed.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_fragment_spread_without_alias_suppressed.expected new file mode 100644 index 0000000000000..78f4b476f714d --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_fragment_spread_without_alias_suppressed.expected @@ -0,0 +1,25 @@ +==================================== INPUT ==================================== +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query($someCondition: Boolean!) { + me { + # This might not match! + ...RelayReaderNamedFragmentsTest_user + @skip(if: $someCondition) + @dangerously_unaliased_fixme + } +} +==================================== OUTPUT =================================== +query RelayReaderNamedFragmentsTest2Query( + $someCondition: Boolean! +) { + me { + ...RelayReaderNamedFragmentsTest_user @skip(if: $someCondition) @dangerously_unaliased_fixme + } +} + +fragment RelayReaderNamedFragmentsTest_user on User { + name +} diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_fragment_spread_without_alias_suppressed.graphql b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_fragment_spread_without_alias_suppressed.graphql new file mode 100644 index 0000000000000..fb9494e6d937f --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_fragment_spread_without_alias_suppressed.graphql @@ -0,0 +1,12 @@ +fragment RelayReaderNamedFragmentsTest_user on User { + name +} + +query RelayReaderNamedFragmentsTest2Query($someCondition: Boolean!) { + me { + # This might not match! + ...RelayReaderNamedFragmentsTest_user + @skip(if: $someCondition) + @dangerously_unaliased_fixme + } +} diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_inline_fragment_without_alias.expected b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_inline_fragment_without_alias.expected new file mode 100644 index 0000000000000..4e6027512ef21 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_inline_fragment_without_alias.expected @@ -0,0 +1,20 @@ +==================================== INPUT ==================================== +query RelayReaderNamedFragmentsTest2Query($someCondition: Boolean!) { + me { + # This might not match, but that's fine since our types capture the + # nullability of the individual fields + ... on User { + name + } + } +} +==================================== OUTPUT =================================== +query RelayReaderNamedFragmentsTest2Query( + $someCondition: Boolean! +) { + me { + ... on User { + name + } + } +} diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_inline_fragment_without_alias.graphql b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_inline_fragment_without_alias.graphql new file mode 100644 index 0000000000000..ee054324f358f --- /dev/null +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive/fixtures/skip_inline_fragment_without_alias.graphql @@ -0,0 +1,9 @@ +query RelayReaderNamedFragmentsTest2Query($someCondition: Boolean!) { + me { + # This might not match, but that's fine since our types capture the + # nullability of the individual fields + ... on User { + name + } + } +} \ No newline at end of file diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive/mod.rs b/compiler/crates/relay-transforms/tests/fragment_alias_directive/mod.rs deleted file mode 100644 index 91cb000330703..0000000000000 --- a/compiler/crates/relay-transforms/tests/fragment_alias_directive/mod.rs +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::FeatureFlag; -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::fragment_alias_directive; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| { - fragment_alias_directive(program, &FeatureFlag::Enabled) - }) -} diff --git a/compiler/crates/relay-transforms/tests/fragment_alias_directive_test.rs b/compiler/crates/relay-transforms/tests/fragment_alias_directive_test.rs index ab280e09057f7..67c63f481dc2c 100644 --- a/compiler/crates/relay-transforms/tests/fragment_alias_directive_test.rs +++ b/compiler/crates/relay-transforms/tests/fragment_alias_directive_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<181f8435539d1237e4b620d14a79c100>> + * @generated SignedSource<<9999601a445e2154561da251e0ca30b4>> */ mod fragment_alias_directive; @@ -12,51 +12,128 @@ mod fragment_alias_directive; use fragment_alias_directive::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn alias_on_abstract_type() { +#[tokio::test] +async fn alias_as_empty_string_invalid() { + let input = include_str!("fragment_alias_directive/fixtures/alias_as_empty_string.invalid.graphql"); + let expected = include_str!("fragment_alias_directive/fixtures/alias_as_empty_string.invalid.expected"); + test_fixture(transform_fixture, file!(), "alias_as_empty_string.invalid.graphql", "fragment_alias_directive/fixtures/alias_as_empty_string.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn alias_not_required_on_module_fragments() { + let input = include_str!("fragment_alias_directive/fixtures/alias_not_required_on_module_fragments.graphql"); + let expected = include_str!("fragment_alias_directive/fixtures/alias_not_required_on_module_fragments.expected"); + test_fixture(transform_fixture, file!(), "alias_not_required_on_module_fragments.graphql", "fragment_alias_directive/fixtures/alias_not_required_on_module_fragments.expected", input, expected).await; +} + +#[tokio::test] +async fn alias_not_required_within_aliased_refined_inline_fragment() { + let input = include_str!("fragment_alias_directive/fixtures/alias_not_required_within_aliased_refined_inline_fragment.graphql"); + let expected = include_str!("fragment_alias_directive/fixtures/alias_not_required_within_aliased_refined_inline_fragment.expected"); + test_fixture(transform_fixture, file!(), "alias_not_required_within_aliased_refined_inline_fragment.graphql", "fragment_alias_directive/fixtures/alias_not_required_within_aliased_refined_inline_fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn alias_on_abstract_type() { let input = include_str!("fragment_alias_directive/fixtures/alias_on_abstract_type.graphql"); let expected = include_str!("fragment_alias_directive/fixtures/alias_on_abstract_type.expected"); - test_fixture(transform_fixture, "alias_on_abstract_type.graphql", "fragment_alias_directive/fixtures/alias_on_abstract_type.expected", input, expected); + test_fixture(transform_fixture, file!(), "alias_on_abstract_type.graphql", "fragment_alias_directive/fixtures/alias_on_abstract_type.expected", input, expected).await; } -#[test] -fn alias_on_named_fragment() { +#[tokio::test] +async fn alias_on_named_fragment() { let input = include_str!("fragment_alias_directive/fixtures/alias_on_named_fragment.graphql"); let expected = include_str!("fragment_alias_directive/fixtures/alias_on_named_fragment.expected"); - test_fixture(transform_fixture, "alias_on_named_fragment.graphql", "fragment_alias_directive/fixtures/alias_on_named_fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "alias_on_named_fragment.graphql", "fragment_alias_directive/fixtures/alias_on_named_fragment.expected", input, expected).await; } -#[test] -fn aliased_inline_fragment() { +#[tokio::test] +async fn alias_on_spread_of_plural_fragment_invalid() { + let input = include_str!("fragment_alias_directive/fixtures/alias_on_spread_of_plural_fragment.invalid.graphql"); + let expected = include_str!("fragment_alias_directive/fixtures/alias_on_spread_of_plural_fragment.invalid.expected"); + test_fixture(transform_fixture, file!(), "alias_on_spread_of_plural_fragment.invalid.graphql", "fragment_alias_directive/fixtures/alias_on_spread_of_plural_fragment.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn alias_required_within_refined_inline_fragment_invalid() { + let input = include_str!("fragment_alias_directive/fixtures/alias_required_within_refined_inline_fragment.invalid.graphql"); + let expected = include_str!("fragment_alias_directive/fixtures/alias_required_within_refined_inline_fragment.invalid.expected"); + test_fixture(transform_fixture, file!(), "alias_required_within_refined_inline_fragment.invalid.graphql", "fragment_alias_directive/fixtures/alias_required_within_refined_inline_fragment.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn aliased_inline_fragment() { let input = include_str!("fragment_alias_directive/fixtures/aliased_inline_fragment.graphql"); let expected = include_str!("fragment_alias_directive/fixtures/aliased_inline_fragment.expected"); - test_fixture(transform_fixture, "aliased_inline_fragment.graphql", "fragment_alias_directive/fixtures/aliased_inline_fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased_inline_fragment.graphql", "fragment_alias_directive/fixtures/aliased_inline_fragment.expected", input, expected).await; } -#[test] -fn aliased_inline_fragment_without_type_condition() { +#[tokio::test] +async fn aliased_inline_fragment_without_type_condition() { let input = include_str!("fragment_alias_directive/fixtures/aliased_inline_fragment_without_type_condition.graphql"); let expected = include_str!("fragment_alias_directive/fixtures/aliased_inline_fragment_without_type_condition.expected"); - test_fixture(transform_fixture, "aliased_inline_fragment_without_type_condition.graphql", "fragment_alias_directive/fixtures/aliased_inline_fragment_without_type_condition.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased_inline_fragment_without_type_condition.graphql", "fragment_alias_directive/fixtures/aliased_inline_fragment_without_type_condition.expected", input, expected).await; } -#[test] -fn default_alias_on_fragment_spread() { +#[tokio::test] +async fn default_alias_on_fragment_spread() { let input = include_str!("fragment_alias_directive/fixtures/default_alias_on_fragment_spread.graphql"); let expected = include_str!("fragment_alias_directive/fixtures/default_alias_on_fragment_spread.expected"); - test_fixture(transform_fixture, "default_alias_on_fragment_spread.graphql", "fragment_alias_directive/fixtures/default_alias_on_fragment_spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "default_alias_on_fragment_spread.graphql", "fragment_alias_directive/fixtures/default_alias_on_fragment_spread.expected", input, expected).await; } -#[test] -fn default_alias_on_inline_fragment() { +#[tokio::test] +async fn default_alias_on_inline_fragment() { let input = include_str!("fragment_alias_directive/fixtures/default_alias_on_inline_fragment.graphql"); let expected = include_str!("fragment_alias_directive/fixtures/default_alias_on_inline_fragment.expected"); - test_fixture(transform_fixture, "default_alias_on_inline_fragment.graphql", "fragment_alias_directive/fixtures/default_alias_on_inline_fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "default_alias_on_inline_fragment.graphql", "fragment_alias_directive/fixtures/default_alias_on_inline_fragment.expected", input, expected).await; } -#[test] -fn default_alias_on_inline_fragment_without_type_invalid() { +#[tokio::test] +async fn default_alias_on_inline_fragment_without_type_invalid() { let input = include_str!("fragment_alias_directive/fixtures/default_alias_on_inline_fragment_without_type.invalid.graphql"); let expected = include_str!("fragment_alias_directive/fixtures/default_alias_on_inline_fragment_without_type.invalid.expected"); - test_fixture(transform_fixture, "default_alias_on_inline_fragment_without_type.invalid.graphql", "fragment_alias_directive/fixtures/default_alias_on_inline_fragment_without_type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "default_alias_on_inline_fragment_without_type.invalid.graphql", "fragment_alias_directive/fixtures/default_alias_on_inline_fragment_without_type.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_spread_into_supertype_without_alias_invalid() { + let input = include_str!("fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias.invalid.graphql"); + let expected = include_str!("fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias.invalid.expected"); + test_fixture(transform_fixture, file!(), "fragment_spread_into_supertype_without_alias.invalid.graphql", "fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_spread_into_supertype_without_alias_suppressed() { + let input = include_str!("fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias_suppressed.graphql"); + let expected = include_str!("fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias_suppressed.expected"); + test_fixture(transform_fixture, file!(), "fragment_spread_into_supertype_without_alias_suppressed.graphql", "fragment_alias_directive/fixtures/fragment_spread_into_supertype_without_alias_suppressed.expected", input, expected).await; +} + +#[tokio::test] +async fn inline_fragment_spread_into_supertype_without_alias() { + let input = include_str!("fragment_alias_directive/fixtures/inline_fragment_spread_into_supertype_without_alias.graphql"); + let expected = include_str!("fragment_alias_directive/fixtures/inline_fragment_spread_into_supertype_without_alias.expected"); + test_fixture(transform_fixture, file!(), "inline_fragment_spread_into_supertype_without_alias.graphql", "fragment_alias_directive/fixtures/inline_fragment_spread_into_supertype_without_alias.expected", input, expected).await; +} + +#[tokio::test] +async fn skip_fragment_spread_without_alias_invalid() { + let input = include_str!("fragment_alias_directive/fixtures/skip_fragment_spread_without_alias.invalid.graphql"); + let expected = include_str!("fragment_alias_directive/fixtures/skip_fragment_spread_without_alias.invalid.expected"); + test_fixture(transform_fixture, file!(), "skip_fragment_spread_without_alias.invalid.graphql", "fragment_alias_directive/fixtures/skip_fragment_spread_without_alias.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn skip_fragment_spread_without_alias_suppressed() { + let input = include_str!("fragment_alias_directive/fixtures/skip_fragment_spread_without_alias_suppressed.graphql"); + let expected = include_str!("fragment_alias_directive/fixtures/skip_fragment_spread_without_alias_suppressed.expected"); + test_fixture(transform_fixture, file!(), "skip_fragment_spread_without_alias_suppressed.graphql", "fragment_alias_directive/fixtures/skip_fragment_spread_without_alias_suppressed.expected", input, expected).await; +} + +#[tokio::test] +async fn skip_inline_fragment_without_alias() { + let input = include_str!("fragment_alias_directive/fixtures/skip_inline_fragment_without_alias.graphql"); + let expected = include_str!("fragment_alias_directive/fixtures/skip_inline_fragment_without_alias.expected"); + test_fixture(transform_fixture, file!(), "skip_inline_fragment_without_alias.graphql", "fragment_alias_directive/fixtures/skip_inline_fragment_without_alias.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/generate_data_driven_dependency_metadata.rs b/compiler/crates/relay-transforms/tests/generate_data_driven_dependency_metadata.rs new file mode 100644 index 0000000000000..8b64bac9f068f --- /dev/null +++ b/compiler/crates/relay-transforms/tests/generate_data_driven_dependency_metadata.rs @@ -0,0 +1,21 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::FeatureFlags; +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::generate_data_driven_dependency_metadata; +use relay_transforms::transform_match; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, |program| { + let flags = FeatureFlags::default(); + let program = transform_match(program, &flags, Default::default(), Default::default())?; + let program = generate_data_driven_dependency_metadata(&program); + Ok(program) + }) +} diff --git a/compiler/crates/relay-transforms/tests/generate_data_driven_dependency_metadata/mod.rs b/compiler/crates/relay-transforms/tests/generate_data_driven_dependency_metadata/mod.rs deleted file mode 100644 index 7927b3c64c280..0000000000000 --- a/compiler/crates/relay-transforms/tests/generate_data_driven_dependency_metadata/mod.rs +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::FeatureFlags; -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::generate_data_driven_dependency_metadata; -use relay_transforms::transform_match; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| { - let flags = FeatureFlags::default(); - let program = transform_match(program, &flags, Default::default())?; - let program = generate_data_driven_dependency_metadata(&program); - Ok(program) - }) -} diff --git a/compiler/crates/relay-transforms/tests/generate_data_driven_dependency_metadata_test.rs b/compiler/crates/relay-transforms/tests/generate_data_driven_dependency_metadata_test.rs index 1a3996ddd7d8f..7bd2c81f0f1ca 100644 --- a/compiler/crates/relay-transforms/tests/generate_data_driven_dependency_metadata_test.rs +++ b/compiler/crates/relay-transforms/tests/generate_data_driven_dependency_metadata_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<378ad574a05b62abf204013f996c8c8f>> + * @generated SignedSource<<7ffddcb334a047ed5d5b7f8b9e045c6e>> */ mod generate_data_driven_dependency_metadata; @@ -12,65 +12,65 @@ mod generate_data_driven_dependency_metadata; use generate_data_driven_dependency_metadata::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn match_on_child_of_plural() { +#[tokio::test] +async fn match_on_child_of_plural() { let input = include_str!("generate_data_driven_dependency_metadata/fixtures/match-on-child-of-plural.graphql"); let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/match-on-child-of-plural.expected"); - test_fixture(transform_fixture, "match-on-child-of-plural.graphql", "generate_data_driven_dependency_metadata/fixtures/match-on-child-of-plural.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-on-child-of-plural.graphql", "generate_data_driven_dependency_metadata/fixtures/match-on-child-of-plural.expected", input, expected).await; } -#[test] -fn match_with_extra_args() { +#[tokio::test] +async fn match_with_extra_args() { let input = include_str!("generate_data_driven_dependency_metadata/fixtures/match-with-extra-args.graphql"); let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/match-with-extra-args.expected"); - test_fixture(transform_fixture, "match-with-extra-args.graphql", "generate_data_driven_dependency_metadata/fixtures/match-with-extra-args.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-with-extra-args.graphql", "generate_data_driven_dependency_metadata/fixtures/match-with-extra-args.expected", input, expected).await; } -#[test] -fn module_without_match() { +#[tokio::test] +async fn module_without_match() { let input = include_str!("generate_data_driven_dependency_metadata/fixtures/module-without-match.graphql"); let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/module-without-match.expected"); - test_fixture(transform_fixture, "module-without-match.graphql", "generate_data_driven_dependency_metadata/fixtures/module-without-match.expected", input, expected); + test_fixture(transform_fixture, file!(), "module-without-match.graphql", "generate_data_driven_dependency_metadata/fixtures/module-without-match.expected", input, expected).await; } -#[test] -fn query_with_direct_and_transitive_module_dep() { +#[tokio::test] +async fn query_with_direct_and_transitive_module_dep() { let input = include_str!("generate_data_driven_dependency_metadata/fixtures/query-with-direct-and-transitive-module-dep.graphql"); let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/query-with-direct-and-transitive-module-dep.expected"); - test_fixture(transform_fixture, "query-with-direct-and-transitive-module-dep.graphql", "generate_data_driven_dependency_metadata/fixtures/query-with-direct-and-transitive-module-dep.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-direct-and-transitive-module-dep.graphql", "generate_data_driven_dependency_metadata/fixtures/query-with-direct-and-transitive-module-dep.expected", input, expected).await; } -#[test] -fn query_with_direct_module_dep() { +#[tokio::test] +async fn query_with_direct_module_dep() { let input = include_str!("generate_data_driven_dependency_metadata/fixtures/query-with-direct-module-dep.graphql"); let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/query-with-direct-module-dep.expected"); - test_fixture(transform_fixture, "query-with-direct-module-dep.graphql", "generate_data_driven_dependency_metadata/fixtures/query-with-direct-module-dep.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-direct-module-dep.graphql", "generate_data_driven_dependency_metadata/fixtures/query-with-direct-module-dep.expected", input, expected).await; } -#[test] -fn query_with_transitive_module_dep() { +#[tokio::test] +async fn query_with_transitive_module_dep() { let input = include_str!("generate_data_driven_dependency_metadata/fixtures/query-with-transitive-module-dep.graphql"); let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/query-with-transitive-module-dep.expected"); - test_fixture(transform_fixture, "query-with-transitive-module-dep.graphql", "generate_data_driven_dependency_metadata/fixtures/query-with-transitive-module-dep.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-transitive-module-dep.graphql", "generate_data_driven_dependency_metadata/fixtures/query-with-transitive-module-dep.expected", input, expected).await; } -#[test] -fn relay_match_on_interface() { +#[tokio::test] +async fn relay_match_on_interface() { let input = include_str!("generate_data_driven_dependency_metadata/fixtures/relay-match-on-interface.graphql"); let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/relay-match-on-interface.expected"); - test_fixture(transform_fixture, "relay-match-on-interface.graphql", "generate_data_driven_dependency_metadata/fixtures/relay-match-on-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-match-on-interface.graphql", "generate_data_driven_dependency_metadata/fixtures/relay-match-on-interface.expected", input, expected).await; } -#[test] -fn relay_match_on_union() { +#[tokio::test] +async fn relay_match_on_union() { let input = include_str!("generate_data_driven_dependency_metadata/fixtures/relay-match-on-union.graphql"); let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/relay-match-on-union.expected"); - test_fixture(transform_fixture, "relay-match-on-union.graphql", "generate_data_driven_dependency_metadata/fixtures/relay-match-on-union.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-match-on-union.graphql", "generate_data_driven_dependency_metadata/fixtures/relay-match-on-union.expected", input, expected).await; } -#[test] -fn relay_match_on_union_plural() { +#[tokio::test] +async fn relay_match_on_union_plural() { let input = include_str!("generate_data_driven_dependency_metadata/fixtures/relay-match-on-union-plural.graphql"); let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/relay-match-on-union-plural.expected"); - test_fixture(transform_fixture, "relay-match-on-union-plural.graphql", "generate_data_driven_dependency_metadata/fixtures/relay-match-on-union-plural.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-match-on-union-plural.graphql", "generate_data_driven_dependency_metadata/fixtures/relay-match-on-union-plural.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/generate_id_field.rs b/compiler/crates/relay-transforms/tests/generate_id_field.rs new file mode 100644 index 0000000000000..bbce9c0be4e42 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/generate_id_field.rs @@ -0,0 +1,16 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::generate_id_field; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, |program| { + Ok(generate_id_field(program, &Default::default())) + }) +} diff --git a/compiler/crates/relay-transforms/tests/generate_id_field/mod.rs b/compiler/crates/relay-transforms/tests/generate_id_field/mod.rs deleted file mode 100644 index 3f9f98098b0c4..0000000000000 --- a/compiler/crates/relay-transforms/tests/generate_id_field/mod.rs +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::generate_id_field; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| { - Ok(generate_id_field(program, &Default::default())) - }) -} diff --git a/compiler/crates/relay-transforms/tests/generate_id_field_test.rs b/compiler/crates/relay-transforms/tests/generate_id_field_test.rs index dae336431a871..322414df08620 100644 --- a/compiler/crates/relay-transforms/tests/generate_id_field_test.rs +++ b/compiler/crates/relay-transforms/tests/generate_id_field_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<63e612da0245a0cd1493300c56063de9>> + * @generated SignedSource<<0c48dc8497a4f85ba11723da2e93dfdf>> */ mod generate_id_field; @@ -12,30 +12,30 @@ mod generate_id_field; use generate_id_field::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn abstract_plural() { +#[tokio::test] +async fn abstract_plural() { let input = include_str!("generate_id_field/fixtures/abstract-plural.graphql"); let expected = include_str!("generate_id_field/fixtures/abstract-plural.expected"); - test_fixture(transform_fixture, "abstract-plural.graphql", "generate_id_field/fixtures/abstract-plural.expected", input, expected); + test_fixture(transform_fixture, file!(), "abstract-plural.graphql", "generate_id_field/fixtures/abstract-plural.expected", input, expected).await; } -#[test] -fn node_union() { +#[tokio::test] +async fn node_union() { let input = include_str!("generate_id_field/fixtures/node-union.graphql"); let expected = include_str!("generate_id_field/fixtures/node-union.expected"); - test_fixture(transform_fixture, "node-union.graphql", "generate_id_field/fixtures/node-union.expected", input, expected); + test_fixture(transform_fixture, file!(), "node-union.graphql", "generate_id_field/fixtures/node-union.expected", input, expected).await; } -#[test] -fn non_node_union() { +#[tokio::test] +async fn non_node_union() { let input = include_str!("generate_id_field/fixtures/non-node-union.graphql"); let expected = include_str!("generate_id_field/fixtures/non-node-union.expected"); - test_fixture(transform_fixture, "non-node-union.graphql", "generate_id_field/fixtures/non-node-union.expected", input, expected); + test_fixture(transform_fixture, file!(), "non-node-union.graphql", "generate_id_field/fixtures/non-node-union.expected", input, expected).await; } -#[test] -fn query_with_fragment_variables() { +#[tokio::test] +async fn query_with_fragment_variables() { let input = include_str!("generate_id_field/fixtures/query-with-fragment-variables.graphql"); let expected = include_str!("generate_id_field/fixtures/query-with-fragment-variables.expected"); - test_fixture(transform_fixture, "query-with-fragment-variables.graphql", "generate_id_field/fixtures/query-with-fragment-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-fragment-variables.graphql", "generate_id_field/fixtures/query-with-fragment-variables.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/generate_live_query_metadata.rs b/compiler/crates/relay-transforms/tests/generate_live_query_metadata.rs new file mode 100644 index 0000000000000..217311685cd6c --- /dev/null +++ b/compiler/crates/relay-transforms/tests/generate_live_query_metadata.rs @@ -0,0 +1,14 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::generate_live_query_metadata; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, generate_live_query_metadata) +} diff --git a/compiler/crates/relay-transforms/tests/generate_live_query_metadata/mod.rs b/compiler/crates/relay-transforms/tests/generate_live_query_metadata/mod.rs deleted file mode 100644 index 4beab14a0b305..0000000000000 --- a/compiler/crates/relay-transforms/tests/generate_live_query_metadata/mod.rs +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::generate_live_query_metadata; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, generate_live_query_metadata) -} diff --git a/compiler/crates/relay-transforms/tests/generate_live_query_metadata_test.rs b/compiler/crates/relay-transforms/tests/generate_live_query_metadata_test.rs index 1ae17d930136e..27c3a630f7a6b 100644 --- a/compiler/crates/relay-transforms/tests/generate_live_query_metadata_test.rs +++ b/compiler/crates/relay-transforms/tests/generate_live_query_metadata_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<9da1c41afc4ceb2bf703a95c2c85fc79>> + * @generated SignedSource<> */ mod generate_live_query_metadata; @@ -12,16 +12,16 @@ mod generate_live_query_metadata; use generate_live_query_metadata::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn live_by_config_id() { +#[tokio::test] +async fn live_by_config_id() { let input = include_str!("generate_live_query_metadata/fixtures/live_by_config_id.graphql"); let expected = include_str!("generate_live_query_metadata/fixtures/live_by_config_id.expected"); - test_fixture(transform_fixture, "live_by_config_id.graphql", "generate_live_query_metadata/fixtures/live_by_config_id.expected", input, expected); + test_fixture(transform_fixture, file!(), "live_by_config_id.graphql", "generate_live_query_metadata/fixtures/live_by_config_id.expected", input, expected).await; } -#[test] -fn live_by_polling_interval() { +#[tokio::test] +async fn live_by_polling_interval() { let input = include_str!("generate_live_query_metadata/fixtures/live_by_polling_interval.graphql"); let expected = include_str!("generate_live_query_metadata/fixtures/live_by_polling_interval.expected"); - test_fixture(transform_fixture, "live_by_polling_interval.graphql", "generate_live_query_metadata/fixtures/live_by_polling_interval.expected", input, expected); + test_fixture(transform_fixture, file!(), "live_by_polling_interval.graphql", "generate_live_query_metadata/fixtures/live_by_polling_interval.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects.rs b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects.rs new file mode 100644 index 0000000000000..28d342d69340d --- /dev/null +++ b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects.rs @@ -0,0 +1,22 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_config::ProjectName; +use relay_config::SchemaConfig; +use relay_transforms::generate_relay_resolvers_operations_for_nested_objects; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, |program| { + generate_relay_resolvers_operations_for_nested_objects( + ProjectName::default(), + program, + &SchemaConfig::default(), + ) + }) +} diff --git a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-client-type.expected b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-client-type.expected index f5d29acb29f36..91bc14c7263b8 100644 --- a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-client-type.expected +++ b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-client-type.expected @@ -30,13 +30,7 @@ query User__pop_star_name$normalization @__SplitOperationMetadata # SplitOperationMetadata { # derived_from: None, # location: output-type-client-type.graphql:81:94, -# parent_documents: { -# OperationDefinitionName( -# OperationDefinitionName( -# "User__pop_star_name$normalization", -# ), -# ), -# }, +# parent_documents: {}, # raw_response_type_generation_mode: Some( # AllFieldsRequired, # ), diff --git a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface.expected b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface.expected index a95e1223862fc..7b2a45485c51f 100644 --- a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface.expected +++ b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface.expected @@ -37,13 +37,7 @@ query User__pop_star_name$normalization @__SplitOperationMetadata # SplitOperationMetadata { # derived_from: None, # location: output-type-with-client-interface.graphql:195:208, -# parent_documents: { -# OperationDefinitionName( -# OperationDefinitionName( -# "User__pop_star_name$normalization", -# ), -# ), -# }, +# parent_documents: {}, # raw_response_type_generation_mode: Some( # AllFieldsRequired, # ), diff --git a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-interface-recursion-but-lying-client-type.expected b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-interface-recursion-but-lying-client-type.expected index 5931ba69b4e7e..b091b7e66b201 100644 --- a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-interface-recursion-but-lying-client-type.expected +++ b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-interface-recursion-but-lying-client-type.expected @@ -43,13 +43,7 @@ query User__pop_star_name$normalization @__SplitOperationMetadata # SplitOperationMetadata { # derived_from: None, # location: output-type-with-interface-recursion-but-lying-client-type.graphql:547:560, -# parent_documents: { -# OperationDefinitionName( -# OperationDefinitionName( -# "User__pop_star_name$normalization", -# ), -# ), -# }, +# parent_documents: {}, # raw_response_type_generation_mode: Some( # AllFieldsRequired, # ), diff --git a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-relay-resolver-fields.expected b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-relay-resolver-fields.expected index 5e5229c547d4f..512132c9c86c4 100644 --- a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-relay-resolver-fields.expected +++ b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-relay-resolver-fields.expected @@ -40,13 +40,7 @@ query User__pop_star_name$normalization @__SplitOperationMetadata # SplitOperationMetadata { # derived_from: None, # location: output-type-with-relay-resolver-fields.graphql:204:217, -# parent_documents: { -# OperationDefinitionName( -# OperationDefinitionName( -# "User__pop_star_name$normalization", -# ), -# ), -# }, +# parent_documents: {}, # raw_response_type_generation_mode: Some( # AllFieldsRequired, # ), diff --git a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-unimplemented-interface.invalid.expected b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-unimplemented-interface.invalid.expected index 7cc440ddf2c50..c2801529553f4 100644 --- a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-unimplemented-interface.invalid.expected +++ b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-unimplemented-interface.invalid.expected @@ -32,7 +32,7 @@ extend type User { top_level: IStageName @relay_resolver(fragment_name: "PopStarNameResolverFragment_name", import_path: "PopStarNameResolver", has_output_type: true) } ==================================== ERROR ==================================== -✖︎ No types implement the client interface IStageName. For a client interface to be used as a @RelayResolver @outputType, at least one Object type must implement the interface. +✖︎ No types implement the client interface IStageName. Interfaces returned by a @RelayResolver must have at least one concrete implementation. output-type-with-unimplemented-interface.invalid.graphql:7:11 6 │ @@ -41,7 +41,7 @@ extend type User { 8 │ value: String -✖︎ No types implement the client interface IStageName. For a client interface to be used as a @RelayResolver @outputType, at least one Object type must implement the interface. +✖︎ No types implement the client interface IStageName. Interfaces returned by a @RelayResolver must have at least one concrete implementation. output-type-with-unimplemented-interface.invalid.graphql:7:11 6 │ @@ -50,7 +50,7 @@ extend type User { 8 │ value: String -✖︎ No types implement the client interface IStageName. For a client interface to be used as a @RelayResolver @outputType, at least one Object type must implement the interface. +✖︎ No types implement the client interface IStageName. Interfaces returned by a @RelayResolver must have at least one concrete implementation. output-type-with-unimplemented-interface.invalid.graphql:7:11 6 │ diff --git a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/mod.rs b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/mod.rs deleted file mode 100644 index 249d0c5cdff58..0000000000000 --- a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects/mod.rs +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_config::SchemaConfig; -use relay_transforms::generate_relay_resolvers_operations_for_nested_objects; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| { - generate_relay_resolvers_operations_for_nested_objects(program, &SchemaConfig::default()) - }) -} diff --git a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects_test.rs b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects_test.rs index 4bef037599540..f97d964b49279 100644 --- a/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects_test.rs +++ b/compiler/crates/relay-transforms/tests/generate_relay_resolvers_operations_for_nested_objects_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<905ebd71110679c262b41e411ebc3fe4>> + * @generated SignedSource<> */ mod generate_relay_resolvers_operations_for_nested_objects; @@ -12,121 +12,121 @@ mod generate_relay_resolvers_operations_for_nested_objects; use generate_relay_resolvers_operations_for_nested_objects::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn output_type_client_type() { +#[tokio::test] +async fn output_type_client_type() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-client-type.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-client-type.expected"); - test_fixture(transform_fixture, "output-type-client-type.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-client-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-client-type.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-client-type.expected", input, expected).await; } -#[test] -fn output_type_input_invalid() { +#[tokio::test] +async fn output_type_input_invalid() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-input.invalid.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-input.invalid.expected"); - test_fixture(transform_fixture, "output-type-input.invalid.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-input.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-input.invalid.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-input.invalid.expected", input, expected).await; } -#[test] -fn output_type_scalar() { +#[tokio::test] +async fn output_type_scalar() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-scalar.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-scalar.expected"); - test_fixture(transform_fixture, "output-type-scalar.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-scalar.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-scalar.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-scalar.expected", input, expected).await; } -#[test] -fn output_type_with_arguments() { +#[tokio::test] +async fn output_type_with_arguments() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-arguments.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-arguments.expected"); - test_fixture(transform_fixture, "output-type-with-arguments.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-with-arguments.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-arguments.expected", input, expected).await; } -#[test] -fn output_type_with_client_interface() { +#[tokio::test] +async fn output_type_with_client_interface() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface.expected"); - test_fixture(transform_fixture, "output-type-with-client-interface.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-with-client-interface.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface.expected", input, expected).await; } -#[test] -fn output_type_with_client_interface_and_object_recursion_invalid() { +#[tokio::test] +async fn output_type_with_client_interface_and_object_recursion_invalid() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface-and-object-recursion.invalid.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface-and-object-recursion.invalid.expected"); - test_fixture(transform_fixture, "output-type-with-client-interface-and-object-recursion.invalid.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface-and-object-recursion.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-with-client-interface-and-object-recursion.invalid.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface-and-object-recursion.invalid.expected", input, expected).await; } -#[test] -fn output_type_with_client_interface_recursion_invalid() { +#[tokio::test] +async fn output_type_with_client_interface_recursion_invalid() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface-recursion.invalid.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface-recursion.invalid.expected"); - test_fixture(transform_fixture, "output-type-with-client-interface-recursion.invalid.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface-recursion.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-with-client-interface-recursion.invalid.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-client-interface-recursion.invalid.expected", input, expected).await; } -#[test] -fn output_type_with_id() { +#[tokio::test] +async fn output_type_with_id() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-id.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-id.expected"); - test_fixture(transform_fixture, "output-type-with-id.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-id.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-with-id.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-id.expected", input, expected).await; } -#[test] -fn output_type_with_interface_recursion_but_lying_client_type() { +#[tokio::test] +async fn output_type_with_interface_recursion_but_lying_client_type() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-interface-recursion-but-lying-client-type.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-interface-recursion-but-lying-client-type.expected"); - test_fixture(transform_fixture, "output-type-with-interface-recursion-but-lying-client-type.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-interface-recursion-but-lying-client-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-with-interface-recursion-but-lying-client-type.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-interface-recursion-but-lying-client-type.expected", input, expected).await; } -#[test] -fn output_type_with_nested_recursion() { +#[tokio::test] +async fn output_type_with_nested_recursion() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-nested-recursion.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-nested-recursion.expected"); - test_fixture(transform_fixture, "output-type-with-nested-recursion.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-nested-recursion.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-with-nested-recursion.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-nested-recursion.expected", input, expected).await; } -#[test] -fn output_type_with_nested_server_object() { +#[tokio::test] +async fn output_type_with_nested_server_object() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-nested-server-object.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-nested-server-object.expected"); - test_fixture(transform_fixture, "output-type-with-nested-server-object.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-nested-server-object.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-with-nested-server-object.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-nested-server-object.expected", input, expected).await; } -#[test] -fn output_type_with_recursion() { +#[tokio::test] +async fn output_type_with_recursion() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-recursion.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-recursion.expected"); - test_fixture(transform_fixture, "output-type-with-recursion.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-recursion.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-with-recursion.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-recursion.expected", input, expected).await; } -#[test] -fn output_type_with_relay_resolver_fields() { +#[tokio::test] +async fn output_type_with_relay_resolver_fields() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-relay-resolver-fields.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-relay-resolver-fields.expected"); - test_fixture(transform_fixture, "output-type-with-relay-resolver-fields.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-relay-resolver-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-with-relay-resolver-fields.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-relay-resolver-fields.expected", input, expected).await; } -#[test] -fn output_type_with_server_interface_invalid() { +#[tokio::test] +async fn output_type_with_server_interface_invalid() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-server-interface.invalid.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-server-interface.invalid.expected"); - test_fixture(transform_fixture, "output-type-with-server-interface.invalid.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-server-interface.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-with-server-interface.invalid.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-server-interface.invalid.expected", input, expected).await; } -#[test] -fn output_type_with_server_object() { +#[tokio::test] +async fn output_type_with_server_object() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-server-object.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-server-object.expected"); - test_fixture(transform_fixture, "output-type-with-server-object.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-server-object.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-with-server-object.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-server-object.expected", input, expected).await; } -#[test] -fn output_type_with_type_with_id() { +#[tokio::test] +async fn output_type_with_type_with_id() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-type-with-id.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-type-with-id.expected"); - test_fixture(transform_fixture, "output-type-with-type-with-id.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-type-with-id.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-with-type-with-id.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-type-with-id.expected", input, expected).await; } -#[test] -fn output_type_with_unimplemented_interface_invalid() { +#[tokio::test] +async fn output_type_with_unimplemented_interface_invalid() { let input = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-unimplemented-interface.invalid.graphql"); let expected = include_str!("generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-unimplemented-interface.invalid.expected"); - test_fixture(transform_fixture, "output-type-with-unimplemented-interface.invalid.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-unimplemented-interface.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "output-type-with-unimplemented-interface.invalid.graphql", "generate_relay_resolvers_operations_for_nested_objects/fixtures/output-type-with-unimplemented-interface.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/generate_typename.rs b/compiler/crates/relay-transforms/tests/generate_typename.rs new file mode 100644 index 0000000000000..f767821669e88 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/generate_typename.rs @@ -0,0 +1,14 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::generate_typename; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, |program| Ok(generate_typename(program, false))) +} diff --git a/compiler/crates/relay-transforms/tests/generate_typename/mod.rs b/compiler/crates/relay-transforms/tests/generate_typename/mod.rs deleted file mode 100644 index 396434bcb7a7b..0000000000000 --- a/compiler/crates/relay-transforms/tests/generate_typename/mod.rs +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::generate_typename; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| Ok(generate_typename(program, false))) -} diff --git a/compiler/crates/relay-transforms/tests/generate_typename_test.rs b/compiler/crates/relay-transforms/tests/generate_typename_test.rs index b5a8a8f0564d2..30f21bdc29250 100644 --- a/compiler/crates/relay-transforms/tests/generate_typename_test.rs +++ b/compiler/crates/relay-transforms/tests/generate_typename_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<3cb2d87b70b16df6d256ff07d9daa83d>> + * @generated SignedSource<<8459432821418914ae65bda3f9c9dec3>> */ mod generate_typename; @@ -12,16 +12,16 @@ mod generate_typename; use generate_typename::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn type_name_does_not_exist() { +#[tokio::test] +async fn type_name_does_not_exist() { let input = include_str!("generate_typename/fixtures/type-name-does-not-exist.graphql"); let expected = include_str!("generate_typename/fixtures/type-name-does-not-exist.expected"); - test_fixture(transform_fixture, "type-name-does-not-exist.graphql", "generate_typename/fixtures/type-name-does-not-exist.expected", input, expected); + test_fixture(transform_fixture, file!(), "type-name-does-not-exist.graphql", "generate_typename/fixtures/type-name-does-not-exist.expected", input, expected).await; } -#[test] -fn type_name_exists() { +#[tokio::test] +async fn type_name_exists() { let input = include_str!("generate_typename/fixtures/type-name-exists.graphql"); let expected = include_str!("generate_typename/fixtures/type-name-exists.expected"); - test_fixture(transform_fixture, "type-name-exists.graphql", "generate_typename/fixtures/type-name-exists.expected", input, expected); + test_fixture(transform_fixture, file!(), "type-name-exists.graphql", "generate_typename/fixtures/type-name-exists.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/inline_data_fragment.rs b/compiler/crates/relay-transforms/tests/inline_data_fragment.rs new file mode 100644 index 0000000000000..e0f5c7132c846 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/inline_data_fragment.rs @@ -0,0 +1,14 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::inline_data_fragment; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, inline_data_fragment) +} diff --git a/compiler/crates/relay-transforms/tests/inline_data_fragment/mod.rs b/compiler/crates/relay-transforms/tests/inline_data_fragment/mod.rs deleted file mode 100644 index b350842aeecda..0000000000000 --- a/compiler/crates/relay-transforms/tests/inline_data_fragment/mod.rs +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::inline_data_fragment; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, inline_data_fragment) -} diff --git a/compiler/crates/relay-transforms/tests/inline_data_fragment_test.rs b/compiler/crates/relay-transforms/tests/inline_data_fragment_test.rs index 02b88cafb2ce1..ecc004db1c375 100644 --- a/compiler/crates/relay-transforms/tests/inline_data_fragment_test.rs +++ b/compiler/crates/relay-transforms/tests/inline_data_fragment_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<0b5686d10624fd1700d69be68ba3f437>> + * @generated SignedSource<<478562e7f3611ecb9ca2e3f7e120762c>> */ mod inline_data_fragment; @@ -12,16 +12,16 @@ mod inline_data_fragment; use inline_data_fragment::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn recursive() { +#[tokio::test] +async fn recursive() { let input = include_str!("inline_data_fragment/fixtures/recursive.graphql"); let expected = include_str!("inline_data_fragment/fixtures/recursive.expected"); - test_fixture(transform_fixture, "recursive.graphql", "inline_data_fragment/fixtures/recursive.expected", input, expected); + test_fixture(transform_fixture, file!(), "recursive.graphql", "inline_data_fragment/fixtures/recursive.expected", input, expected).await; } -#[test] -fn variables() { +#[tokio::test] +async fn variables() { let input = include_str!("inline_data_fragment/fixtures/variables.graphql"); let expected = include_str!("inline_data_fragment/fixtures/variables.expected"); - test_fixture(transform_fixture, "variables.graphql", "inline_data_fragment/fixtures/variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "variables.graphql", "inline_data_fragment/fixtures/variables.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/inline_fragments.rs b/compiler/crates/relay-transforms/tests/inline_fragments.rs new file mode 100644 index 0000000000000..d92024fa1e47d --- /dev/null +++ b/compiler/crates/relay-transforms/tests/inline_fragments.rs @@ -0,0 +1,22 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::inline_fragments; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, |program| { + let next_program = inline_fragments(program); + assert_eq!(next_program.fragments().count(), 0); + assert_eq!( + next_program.operations().count(), + program.operations().count() + ); + Ok(next_program) + }) +} diff --git a/compiler/crates/relay-transforms/tests/inline_fragments/mod.rs b/compiler/crates/relay-transforms/tests/inline_fragments/mod.rs deleted file mode 100644 index cd3013d2cf55a..0000000000000 --- a/compiler/crates/relay-transforms/tests/inline_fragments/mod.rs +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::inline_fragments; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| { - let next_program = inline_fragments(program); - assert_eq!(next_program.fragments().count(), 0); - assert_eq!( - next_program.operations().count(), - program.operations().count() - ); - Ok(next_program) - }) -} diff --git a/compiler/crates/relay-transforms/tests/inline_fragments_test.rs b/compiler/crates/relay-transforms/tests/inline_fragments_test.rs index f311d32b90932..595aaa16d1fdf 100644 --- a/compiler/crates/relay-transforms/tests/inline_fragments_test.rs +++ b/compiler/crates/relay-transforms/tests/inline_fragments_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<> */ mod inline_fragments; @@ -12,16 +12,16 @@ mod inline_fragments; use inline_fragments::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn inlines_nested_fragments() { +#[tokio::test] +async fn inlines_nested_fragments() { let input = include_str!("inline_fragments/fixtures/inlines-nested-fragments.graphql"); let expected = include_str!("inline_fragments/fixtures/inlines-nested-fragments.expected"); - test_fixture(transform_fixture, "inlines-nested-fragments.graphql", "inline_fragments/fixtures/inlines-nested-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "inlines-nested-fragments.graphql", "inline_fragments/fixtures/inlines-nested-fragments.expected", input, expected).await; } -#[test] -fn inlines_with_directive() { +#[tokio::test] +async fn inlines_with_directive() { let input = include_str!("inline_fragments/fixtures/inlines-with-directive.graphql"); let expected = include_str!("inline_fragments/fixtures/inlines-with-directive.expected"); - test_fixture(transform_fixture, "inlines-with-directive.graphql", "inline_fragments/fixtures/inlines-with-directive.expected", input, expected); + test_fixture(transform_fixture, file!(), "inlines-with-directive.graphql", "inline_fragments/fixtures/inlines-with-directive.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/mask.rs b/compiler/crates/relay-transforms/tests/mask.rs new file mode 100644 index 0000000000000..332b1540011f5 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/mask.rs @@ -0,0 +1,50 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_text_printer::print_fragment; +use graphql_text_printer::PrinterOptions; +use relay_test_schema::get_test_schema; +use relay_transforms::mask; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let schema = get_test_schema(); + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + let next_program = &mask(&program); + + assert_eq!( + next_program.fragments().count(), + program.fragments().count() + ); + + let printer_options = PrinterOptions { + debug_directive_data: true, + ..Default::default() + }; + let mut printed = next_program + .fragments() + .map(|def| { + format!( + "{}\n{:#?}", + print_fragment(&schema, def, printer_options.clone()), + def.used_global_variables + ) + }) + .collect::>(); + printed.sort(); + + Ok(printed.join("\n\n")) +} diff --git a/compiler/crates/relay-transforms/tests/mask/mod.rs b/compiler/crates/relay-transforms/tests/mask/mod.rs deleted file mode 100644 index 06265174f0089..0000000000000 --- a/compiler/crates/relay-transforms/tests/mask/mod.rs +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_text_printer::print_fragment; -use graphql_text_printer::PrinterOptions; -use relay_test_schema::get_test_schema; -use relay_transforms::mask; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let schema = get_test_schema(); - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - let next_program = &mask(&program); - - assert_eq!( - next_program.fragments().count(), - program.fragments().count() - ); - - let printer_options = PrinterOptions { - debug_directive_data: true, - ..Default::default() - }; - let mut printed = next_program - .fragments() - .map(|def| { - format!( - "{}\n{:#?}", - print_fragment(&schema, def, printer_options.clone()), - def.used_global_variables - ) - }) - .collect::>(); - printed.sort(); - - Ok(printed.join("\n\n")) -} diff --git a/compiler/crates/relay-transforms/tests/mask_test.rs b/compiler/crates/relay-transforms/tests/mask_test.rs index c6674a1ff7993..3ed637ee080bc 100644 --- a/compiler/crates/relay-transforms/tests/mask_test.rs +++ b/compiler/crates/relay-transforms/tests/mask_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<06cfcd9a6d5345cf8fba25d3382362fa>> + * @generated SignedSource<<823286f8733a4925af9a58997f2b03a6>> */ mod mask; @@ -12,16 +12,16 @@ mod mask; use mask::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn mask_mixed_null() { +#[tokio::test] +async fn mask_mixed_null() { let input = include_str!("mask/fixtures/mask-mixed-null.graphql"); let expected = include_str!("mask/fixtures/mask-mixed-null.expected"); - test_fixture(transform_fixture, "mask-mixed-null.graphql", "mask/fixtures/mask-mixed-null.expected", input, expected); + test_fixture(transform_fixture, file!(), "mask-mixed-null.graphql", "mask/fixtures/mask-mixed-null.expected", input, expected).await; } -#[test] -fn relay_mask_transform() { +#[tokio::test] +async fn relay_mask_transform() { let input = include_str!("mask/fixtures/relay-mask-transform.graphql"); let expected = include_str!("mask/fixtures/relay-mask-transform.expected"); - test_fixture(transform_fixture, "relay-mask-transform.graphql", "mask/fixtures/relay-mask-transform.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-mask-transform.graphql", "mask/fixtures/relay-mask-transform.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/match_transform.rs b/compiler/crates/relay-transforms/tests/match_transform.rs new file mode 100644 index 0000000000000..263344ccd37ef --- /dev/null +++ b/compiler/crates/relay-transforms/tests/match_transform.rs @@ -0,0 +1,18 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::FeatureFlags; +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::transform_match; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let flags = FeatureFlags::default(); + apply_transform_for_test(fixture, |program| { + transform_match(program, &flags, Default::default(), Default::default()) + }) +} diff --git a/compiler/crates/relay-transforms/tests/match_transform/mod.rs b/compiler/crates/relay-transforms/tests/match_transform/mod.rs deleted file mode 100644 index 234904e278d0b..0000000000000 --- a/compiler/crates/relay-transforms/tests/match_transform/mod.rs +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::FeatureFlags; -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::transform_match; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let flags = FeatureFlags::default(); - apply_transform_for_test(fixture, |program| { - transform_match(program, &flags, Default::default()) - }) -} diff --git a/compiler/crates/relay-transforms/tests/match_transform_local.rs b/compiler/crates/relay-transforms/tests/match_transform_local.rs new file mode 100644 index 0000000000000..47c1dff650dcc --- /dev/null +++ b/compiler/crates/relay-transforms/tests/match_transform_local.rs @@ -0,0 +1,30 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::FeatureFlags; +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_config::DeferStreamInterface; +use relay_config::DynamicModuleProvider; +use relay_config::ModuleImportConfig; +use relay_transforms::transform_match; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let flags = FeatureFlags::default(); + let module_import_config = ModuleImportConfig { + dynamic_module_provider: Some(DynamicModuleProvider::JSResource), + }; + let defer_stream_interface = DeferStreamInterface::default(); + apply_transform_for_test(fixture, |program| { + transform_match( + program, + &flags, + module_import_config, + defer_stream_interface, + ) + }) +} diff --git a/compiler/crates/relay-transforms/tests/match_transform_local/mod.rs b/compiler/crates/relay-transforms/tests/match_transform_local/mod.rs deleted file mode 100644 index 70946a7fe5c2f..0000000000000 --- a/compiler/crates/relay-transforms/tests/match_transform_local/mod.rs +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::FeatureFlags; -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_config::DynamicModuleProvider; -use relay_config::ModuleImportConfig; -use relay_transforms::transform_match; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let flags = FeatureFlags::default(); - let module_import_config = ModuleImportConfig { - dynamic_module_provider: Some(DynamicModuleProvider::JSResource), - }; - apply_transform_for_test(fixture, |program| { - transform_match(program, &flags, module_import_config) - }) -} diff --git a/compiler/crates/relay-transforms/tests/match_transform_local_test.rs b/compiler/crates/relay-transforms/tests/match_transform_local_test.rs index 62b32dcf5be6d..7c0ea13b1205f 100644 --- a/compiler/crates/relay-transforms/tests/match_transform_local_test.rs +++ b/compiler/crates/relay-transforms/tests/match_transform_local_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<3f821e14128e35b76bc9d17c3ec8e96f>> + * @generated SignedSource<<1a87e4dd8afcddaaad9bfe4324a07ce6>> */ mod match_transform_local; @@ -12,16 +12,16 @@ mod match_transform_local; use match_transform_local::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn module_on_field_without_js() { +#[tokio::test] +async fn module_on_field_without_js() { let input = include_str!("match_transform_local/fixtures/module-on-field-without-js.graphql"); let expected = include_str!("match_transform_local/fixtures/module-on-field-without-js.expected"); - test_fixture(transform_fixture, "module-on-field-without-js.graphql", "match_transform_local/fixtures/module-on-field-without-js.expected", input, expected); + test_fixture(transform_fixture, file!(), "module-on-field-without-js.graphql", "match_transform_local/fixtures/module-on-field-without-js.expected", input, expected).await; } -#[test] -fn module_without_match() { +#[tokio::test] +async fn module_without_match() { let input = include_str!("match_transform_local/fixtures/module-without-match.graphql"); let expected = include_str!("match_transform_local/fixtures/module-without-match.expected"); - test_fixture(transform_fixture, "module-without-match.graphql", "match_transform_local/fixtures/module-without-match.expected", input, expected); + test_fixture(transform_fixture, file!(), "module-without-match.graphql", "match_transform_local/fixtures/module-without-match.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/match_transform_test.rs b/compiler/crates/relay-transforms/tests/match_transform_test.rs index 26ec56af366d3..84bff75cb2b9c 100644 --- a/compiler/crates/relay-transforms/tests/match_transform_test.rs +++ b/compiler/crates/relay-transforms/tests/match_transform_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<57bebdb3f789e63e238d5c603847f33b>> */ mod match_transform; @@ -12,100 +12,100 @@ mod match_transform; use match_transform::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn direct_js_field_invalid() { +#[tokio::test] +async fn direct_js_field_invalid() { let input = include_str!("match_transform/fixtures/direct-js-field.invalid.graphql"); let expected = include_str!("match_transform/fixtures/direct-js-field.invalid.expected"); - test_fixture(transform_fixture, "direct-js-field.invalid.graphql", "match_transform/fixtures/direct-js-field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "direct-js-field.invalid.graphql", "match_transform/fixtures/direct-js-field.invalid.expected", input, expected).await; } -#[test] -fn invalid_match_type() { +#[tokio::test] +async fn invalid_match_type() { let input = include_str!("match_transform/fixtures/invalid-match-type.graphql"); let expected = include_str!("match_transform/fixtures/invalid-match-type.expected"); - test_fixture(transform_fixture, "invalid-match-type.graphql", "match_transform/fixtures/invalid-match-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-match-type.graphql", "match_transform/fixtures/invalid-match-type.expected", input, expected).await; } -#[test] -fn invalid_no_matches() { +#[tokio::test] +async fn invalid_no_matches() { let input = include_str!("match_transform/fixtures/invalid-no-matches.graphql"); let expected = include_str!("match_transform/fixtures/invalid-no-matches.expected"); - test_fixture(transform_fixture, "invalid-no-matches.graphql", "match_transform/fixtures/invalid-no-matches.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-no-matches.graphql", "match_transform/fixtures/invalid-no-matches.expected", input, expected).await; } -#[test] -fn invalid_non_empty_selections() { +#[tokio::test] +async fn invalid_non_empty_selections() { let input = include_str!("match_transform/fixtures/invalid-non-empty-selections.graphql"); let expected = include_str!("match_transform/fixtures/invalid-non-empty-selections.expected"); - test_fixture(transform_fixture, "invalid-non-empty-selections.graphql", "match_transform/fixtures/invalid-non-empty-selections.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-non-empty-selections.graphql", "match_transform/fixtures/invalid-non-empty-selections.expected", input, expected).await; } -#[test] -fn match_on_child_of_plural() { +#[tokio::test] +async fn match_on_child_of_plural() { let input = include_str!("match_transform/fixtures/match-on-child-of-plural.graphql"); let expected = include_str!("match_transform/fixtures/match-on-child-of-plural.expected"); - test_fixture(transform_fixture, "match-on-child-of-plural.graphql", "match_transform/fixtures/match-on-child-of-plural.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-on-child-of-plural.graphql", "match_transform/fixtures/match-on-child-of-plural.expected", input, expected).await; } -#[test] -fn match_with_explicit_support_arg_invalid() { +#[tokio::test] +async fn match_with_explicit_support_arg_invalid() { let input = include_str!("match_transform/fixtures/match-with-explicit-support-arg.invalid.graphql"); let expected = include_str!("match_transform/fixtures/match-with-explicit-support-arg.invalid.expected"); - test_fixture(transform_fixture, "match-with-explicit-support-arg.invalid.graphql", "match_transform/fixtures/match-with-explicit-support-arg.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-with-explicit-support-arg.invalid.graphql", "match_transform/fixtures/match-with-explicit-support-arg.invalid.expected", input, expected).await; } -#[test] -fn match_with_extra_args() { +#[tokio::test] +async fn match_with_extra_args() { let input = include_str!("match_transform/fixtures/match-with-extra-args.graphql"); let expected = include_str!("match_transform/fixtures/match-with-extra-args.expected"); - test_fixture(transform_fixture, "match-with-extra-args.graphql", "match_transform/fixtures/match-with-extra-args.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-with-extra-args.graphql", "match_transform/fixtures/match-with-extra-args.expected", input, expected).await; } -#[test] -fn module_on_inline_data_invalid() { +#[tokio::test] +async fn module_on_inline_data_invalid() { let input = include_str!("match_transform/fixtures/module-on-inline-data.invalid.graphql"); let expected = include_str!("match_transform/fixtures/module-on-inline-data.invalid.expected"); - test_fixture(transform_fixture, "module-on-inline-data.invalid.graphql", "match_transform/fixtures/module-on-inline-data.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "module-on-inline-data.invalid.graphql", "match_transform/fixtures/module-on-inline-data.invalid.expected", input, expected).await; } -#[test] -fn module_with_other_directives_invalid() { +#[tokio::test] +async fn module_with_other_directives_invalid() { let input = include_str!("match_transform/fixtures/module-with-other-directives.invalid.graphql"); let expected = include_str!("match_transform/fixtures/module-with-other-directives.invalid.expected"); - test_fixture(transform_fixture, "module-with-other-directives.invalid.graphql", "match_transform/fixtures/module-with-other-directives.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "module-with-other-directives.invalid.graphql", "match_transform/fixtures/module-with-other-directives.invalid.expected", input, expected).await; } -#[test] -fn module_without_match() { +#[tokio::test] +async fn module_without_match() { let input = include_str!("match_transform/fixtures/module-without-match.graphql"); let expected = include_str!("match_transform/fixtures/module-without-match.expected"); - test_fixture(transform_fixture, "module-without-match.graphql", "match_transform/fixtures/module-without-match.expected", input, expected); + test_fixture(transform_fixture, file!(), "module-without-match.graphql", "match_transform/fixtures/module-without-match.expected", input, expected).await; } -#[test] -fn no_server_supported_arg_invalid() { +#[tokio::test] +async fn no_server_supported_arg_invalid() { let input = include_str!("match_transform/fixtures/no-server-supported-arg.invalid.graphql"); let expected = include_str!("match_transform/fixtures/no-server-supported-arg.invalid.expected"); - test_fixture(transform_fixture, "no-server-supported-arg.invalid.graphql", "match_transform/fixtures/no-server-supported-arg.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "no-server-supported-arg.invalid.graphql", "match_transform/fixtures/no-server-supported-arg.invalid.expected", input, expected).await; } -#[test] -fn relay_match_on_interface() { +#[tokio::test] +async fn relay_match_on_interface() { let input = include_str!("match_transform/fixtures/relay-match-on-interface.graphql"); let expected = include_str!("match_transform/fixtures/relay-match-on-interface.expected"); - test_fixture(transform_fixture, "relay-match-on-interface.graphql", "match_transform/fixtures/relay-match-on-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-match-on-interface.graphql", "match_transform/fixtures/relay-match-on-interface.expected", input, expected).await; } -#[test] -fn relay_match_on_union() { +#[tokio::test] +async fn relay_match_on_union() { let input = include_str!("match_transform/fixtures/relay-match-on-union.graphql"); let expected = include_str!("match_transform/fixtures/relay-match-on-union.expected"); - test_fixture(transform_fixture, "relay-match-on-union.graphql", "match_transform/fixtures/relay-match-on-union.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-match-on-union.graphql", "match_transform/fixtures/relay-match-on-union.expected", input, expected).await; } -#[test] -fn relay_match_on_union_plural() { +#[tokio::test] +async fn relay_match_on_union_plural() { let input = include_str!("match_transform/fixtures/relay-match-on-union-plural.graphql"); let expected = include_str!("match_transform/fixtures/relay-match-on-union-plural.expected"); - test_fixture(transform_fixture, "relay-match-on-union-plural.graphql", "match_transform/fixtures/relay-match-on-union-plural.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-match-on-union-plural.graphql", "match_transform/fixtures/relay-match-on-union-plural.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/provided_variable_fragment_transform.rs b/compiler/crates/relay-transforms/tests/provided_variable_fragment_transform.rs new file mode 100644 index 0000000000000..c32af8b98d375 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/provided_variable_fragment_transform.rs @@ -0,0 +1,16 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::provided_variable_fragment_transform; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, |program| { + provided_variable_fragment_transform(program) + }) +} diff --git a/compiler/crates/relay-transforms/tests/provided_variable_fragment_transform/mod.rs b/compiler/crates/relay-transforms/tests/provided_variable_fragment_transform/mod.rs deleted file mode 100644 index e3c60eaaf930c..0000000000000 --- a/compiler/crates/relay-transforms/tests/provided_variable_fragment_transform/mod.rs +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::provided_variable_fragment_transform; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| { - provided_variable_fragment_transform(program) - }) -} diff --git a/compiler/crates/relay-transforms/tests/provided_variable_fragment_transform_test.rs b/compiler/crates/relay-transforms/tests/provided_variable_fragment_transform_test.rs index b6c8551bdd1e7..16b25e17d3e43 100644 --- a/compiler/crates/relay-transforms/tests/provided_variable_fragment_transform_test.rs +++ b/compiler/crates/relay-transforms/tests/provided_variable_fragment_transform_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<> */ mod provided_variable_fragment_transform; @@ -12,51 +12,51 @@ mod provided_variable_fragment_transform; use provided_variable_fragment_transform::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn anon_fragment_spread() { +#[tokio::test] +async fn anon_fragment_spread() { let input = include_str!("provided_variable_fragment_transform/fixtures/anon_fragment_spread.graphql"); let expected = include_str!("provided_variable_fragment_transform/fixtures/anon_fragment_spread.expected"); - test_fixture(transform_fixture, "anon_fragment_spread.graphql", "provided_variable_fragment_transform/fixtures/anon_fragment_spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "anon_fragment_spread.graphql", "provided_variable_fragment_transform/fixtures/anon_fragment_spread.expected", input, expected).await; } -#[test] -fn conflict_warn_infrequent_definitions() { +#[tokio::test] +async fn conflict_warn_infrequent_definitions() { let input = include_str!("provided_variable_fragment_transform/fixtures/conflict_warn_infrequent_definitions.graphql"); let expected = include_str!("provided_variable_fragment_transform/fixtures/conflict_warn_infrequent_definitions.expected"); - test_fixture(transform_fixture, "conflict_warn_infrequent_definitions.graphql", "provided_variable_fragment_transform/fixtures/conflict_warn_infrequent_definitions.expected", input, expected); + test_fixture(transform_fixture, file!(), "conflict_warn_infrequent_definitions.graphql", "provided_variable_fragment_transform/fixtures/conflict_warn_infrequent_definitions.expected", input, expected).await; } -#[test] -fn conflicting_modules_invalid() { +#[tokio::test] +async fn conflicting_modules_invalid() { let input = include_str!("provided_variable_fragment_transform/fixtures/conflicting_modules_invalid.graphql"); let expected = include_str!("provided_variable_fragment_transform/fixtures/conflicting_modules_invalid.expected"); - test_fixture(transform_fixture, "conflicting_modules_invalid.graphql", "provided_variable_fragment_transform/fixtures/conflicting_modules_invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "conflicting_modules_invalid.graphql", "provided_variable_fragment_transform/fixtures/conflicting_modules_invalid.expected", input, expected).await; } -#[test] -fn conflicting_types_invalid() { +#[tokio::test] +async fn conflicting_types_invalid() { let input = include_str!("provided_variable_fragment_transform/fixtures/conflicting_types_invalid.graphql"); let expected = include_str!("provided_variable_fragment_transform/fixtures/conflicting_types_invalid.expected"); - test_fixture(transform_fixture, "conflicting_types_invalid.graphql", "provided_variable_fragment_transform/fixtures/conflicting_types_invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "conflicting_types_invalid.graphql", "provided_variable_fragment_transform/fixtures/conflicting_types_invalid.expected", input, expected).await; } -#[test] -fn multiple_fragments() { +#[tokio::test] +async fn multiple_fragments() { let input = include_str!("provided_variable_fragment_transform/fixtures/multiple_fragments.graphql"); let expected = include_str!("provided_variable_fragment_transform/fixtures/multiple_fragments.expected"); - test_fixture(transform_fixture, "multiple_fragments.graphql", "provided_variable_fragment_transform/fixtures/multiple_fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple_fragments.graphql", "provided_variable_fragment_transform/fixtures/multiple_fragments.expected", input, expected).await; } -#[test] -fn rename_provided_variables() { +#[tokio::test] +async fn rename_provided_variables() { let input = include_str!("provided_variable_fragment_transform/fixtures/rename_provided_variables.graphql"); let expected = include_str!("provided_variable_fragment_transform/fixtures/rename_provided_variables.expected"); - test_fixture(transform_fixture, "rename_provided_variables.graphql", "provided_variable_fragment_transform/fixtures/rename_provided_variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "rename_provided_variables.graphql", "provided_variable_fragment_transform/fixtures/rename_provided_variables.expected", input, expected).await; } -#[test] -fn single_fragment() { +#[tokio::test] +async fn single_fragment() { let input = include_str!("provided_variable_fragment_transform/fixtures/single_fragment.graphql"); let expected = include_str!("provided_variable_fragment_transform/fixtures/single_fragment.expected"); - test_fixture(transform_fixture, "single_fragment.graphql", "provided_variable_fragment_transform/fixtures/single_fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "single_fragment.graphql", "provided_variable_fragment_transform/fixtures/single_fragment.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment.rs b/compiler/crates/relay-transforms/tests/refetchable_fragment.rs new file mode 100644 index 0000000000000..0d3d7988f2f62 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment.rs @@ -0,0 +1,25 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_config::DeferStreamInterface; +use relay_transforms::transform_connections; +use relay_transforms::transform_refetchable_fragment; +use relay_transforms::ConnectionInterface; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, |program| { + let program = transform_connections( + program, + &ConnectionInterface::default(), + &DeferStreamInterface::default(), + ); + let base_fragments = Default::default(); + transform_refetchable_fragment(&program, &Default::default(), &base_fragments, false) + }) +} diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-interface-which-implementations-implement-node.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-interface-which-implementations-implement-node.expected index 4926ff054656e..3b47429b478f6 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-interface-which-implementations-implement-node.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-interface-which-implementations-implement-node.expected @@ -43,8 +43,11 @@ fragment RefetchableFragment on Actor @refetchable(queryName: "RefetchableFragme # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-interface-without-id.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-interface-without-id.expected index 24fcc9c8c238e..e9cc99a3138ca 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-interface-without-id.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-interface-without-id.expected @@ -26,8 +26,11 @@ fragment RefetchableFragment on AllConcreteTypesImplementNode @refetchable(query # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-node-interface-without-id.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-node-interface-without-id.expected index 41b3809f57666..96227bdc60b02 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-node-interface-without-id.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-node-interface-without-id.expected @@ -42,8 +42,11 @@ fragment RefetchableFragment on Node @refetchable(queryName: "RefetchableFragmen # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-node-interface.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-node-interface.expected index 56c23e5b6356e..719edd1457366 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-node-interface.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-node-interface.expected @@ -43,8 +43,11 @@ fragment RefetchableFragment on Node @refetchable(queryName: "RefetchableFragmen # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-non-node-fetchable-type.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-non-node-fetchable-type.expected index 9dd87caa451ef..672e971ecbd52 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-non-node-fetchable-type.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-non-node-fetchable-type.expected @@ -41,8 +41,11 @@ fragment RefetchableFragment on NonNodeStory @refetchable(queryName: "Refetchabl # path: [ # "fetch__NonNodeStory", # ], -# identifier_field: Some( -# "fetch_id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "fetch_id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface-with-alias-id.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface-with-alias-id.expected index 3575ea631bc28..3e7a1933617d2 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface-with-alias-id.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface-with-alias-id.expected @@ -41,8 +41,11 @@ fragment RefetchableFragment on User @refetchable(queryName: "RefetchableFragmen # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface.expected index 90fe9a85f5572..1b53815a0f20a 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface.expected @@ -41,8 +41,11 @@ fragment RefetchableFragment on User @refetchable(queryName: "RefetchableFragmen # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-query-with-cycle.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-query-with-cycle.expected index 5c5d20f3a1dd9..27799c67a20d0 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-query-with-cycle.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-query-with-cycle.expected @@ -68,7 +68,7 @@ fragment RefetchableFragment on Query @refetchable(queryName: "RefetchableFragme # "RefetchableFragmentQuery", # ), # path: [], -# identifier_field: None, +# identifier_info: None, # } { node(id: $id) { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-query.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-query.expected index d0f990fdaac31..7153d53d31403 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-query.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-query.expected @@ -44,7 +44,7 @@ fragment RefetchableFragment on Query @refetchable(queryName: "RefetchableFragme # "RefetchableFragmentQuery", # ), # path: [], -# identifier_field: None, +# identifier_info: None, # } { node(id: $id) { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-viewer.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-viewer.expected index 09f0c9d6ac7f7..37dd9ffe8e551 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-viewer.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-on-viewer.expected @@ -42,7 +42,7 @@ fragment RefetchableFragment on Viewer @refetchable(queryName: "RefetchableFragm # path: [ # "viewer", # ], -# identifier_field: None, +# identifier_info: None, # } { actor { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-with-args-on-object-implementing-node-interface.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-with-args-on-object-implementing-node-interface.expected index 125a1ff64e197..deb5551f00753 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-with-args-on-object-implementing-node-interface.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-with-args-on-object-implementing-node-interface.expected @@ -53,8 +53,11 @@ fragment RefetchableFragment on User @refetchable(queryName: "RefetchableFragmen # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-with-args-on-query.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-with-args-on-query.expected index 6ed24b57b60ad..360dd792018e8 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-with-args-on-query.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-with-args-on-query.expected @@ -54,7 +54,7 @@ fragment RefetchableFragment on Query @refetchable(queryName: "RefetchableFragme # "RefetchableFragmentQuery", # ), # path: [], -# identifier_field: None, +# identifier_info: None, # } { node(id: $id) { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-with-args-on-viewer.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-with-args-on-viewer.expected index fc3f6e3900796..1205eb4c93672 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-with-args-on-viewer.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/fragment-with-args-on-viewer.expected @@ -54,7 +54,7 @@ fragment RefetchableFragment on Viewer @refetchable(queryName: "RefetchableFragm # path: [ # "viewer", # ], -# identifier_field: None, +# identifier_info: None, # } { actor { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-fragment-with-connection-bidirectional.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-fragment-with-connection-bidirectional.expected index 1b6b932b5b44d..c880926364b69 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-fragment-with-connection-bidirectional.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-fragment-with-connection-bidirectional.expected @@ -78,8 +78,11 @@ fragment PaginationFragment on Node @refetchable(queryName: "RefetchableFragment # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-fragment-with-connection-with-stream.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-fragment-with-connection-with-stream.expected index be80f33782b6b..4d7f7a261c324 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-fragment-with-connection-with-stream.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-fragment-with-connection-with-stream.expected @@ -68,8 +68,11 @@ fragment PaginationFragment on Node @refetchable(queryName: "RefetchableFragment # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { @@ -77,7 +80,7 @@ fragment PaginationFragment on Node @refetchable(queryName: "RefetchableFragment ... on User { name friends(after: $cursor, first: $count) @__clientField(key: "PaginationFragment_friends", handle: "connection", filters: null, dynamicKey_UNSTABLE: null) { - edges @stream(label: "PaginationFragment_friends", initial_count: 1) { + edges @stream(label: "PaginationFragment_friends", initialCount: 1) { node { id } diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-fragment-with-connection.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-fragment-with-connection.expected index 37eb65f619c1e..a6264c1c6394b 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-fragment-with-connection.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-fragment-with-connection.expected @@ -68,8 +68,11 @@ fragment PaginationFragment on Node @refetchable(queryName: "RefetchableFragment # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface-all-implementing-types-impl-node.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface-all-implementing-types-impl-node.expected index cd1ca7de5cb43..30357a05c64a1 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface-all-implementing-types-impl-node.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface-all-implementing-types-impl-node.expected @@ -40,8 +40,11 @@ fragment RefetchableFragment on RefetchableInterface @refetchable(queryName: "Re # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface-but-no-implementing-types.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface-but-no-implementing-types.expected index e847c8de938fb..6c5666e3fb179 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface-but-no-implementing-types.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface-but-no-implementing-types.expected @@ -39,8 +39,11 @@ fragment RefetchableFragmentFoo on RefetchableInterfaceFoo @refetchable(queryNam # path: [ # "node", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface-some-types-impl-node.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface-some-types-impl-node.expected index 84d3aef63c5b2..2995e4f9dd26d 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface-some-types-impl-node.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface-some-types-impl-node.expected @@ -44,8 +44,11 @@ fragment RefetchableFragment on RefetchableInterface @refetchable(queryName: "Re # path: [ # "fetch__RefetchableInterface", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface.expected b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface.expected index 111bc31ee4b08..35264388f9240 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface.expected +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment/fixtures/refetchable-interface.expected @@ -68,8 +68,11 @@ fragment RefetchableFragment on RefetchableInterface @refetchable(queryName: "Re # path: [ # "fetch__RefetchableInterface", # ], -# identifier_field: Some( -# "id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "id", +# identifier_query_variable_name: "id", +# }, # ), # } { @@ -85,8 +88,11 @@ fragment RefetchableFragment2 on RefetchableInterface2 @refetchable(queryName: " # path: [ # "fetch__RefetchableInterface2", # ], -# identifier_field: Some( -# "not_id", +# identifier_info: Some( +# RefetchableIdentifierInfo { +# identifier_field: "not_id", +# identifier_query_variable_name: "id", +# }, # ), # } { diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment/mod.rs b/compiler/crates/relay-transforms/tests/refetchable_fragment/mod.rs deleted file mode 100644 index e80c437cc1547..0000000000000 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment/mod.rs +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::transform_connections; -use relay_transforms::transform_refetchable_fragment; -use relay_transforms::ConnectionInterface; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| { - let program = transform_connections(program, &ConnectionInterface::default()); - let base_fragments = Default::default(); - transform_refetchable_fragment(&program, &Default::default(), &base_fragments, false) - }) -} diff --git a/compiler/crates/relay-transforms/tests/refetchable_fragment_test.rs b/compiler/crates/relay-transforms/tests/refetchable_fragment_test.rs index 1a4cd55cfc1b5..f8da9737b5468 100644 --- a/compiler/crates/relay-transforms/tests/refetchable_fragment_test.rs +++ b/compiler/crates/relay-transforms/tests/refetchable_fragment_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<5d6010d9b0356775854a925e90881577>> + * @generated SignedSource<> */ mod refetchable_fragment; @@ -12,191 +12,191 @@ mod refetchable_fragment; use refetchable_fragment::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn fragment_on_interface_which_implementations_implement_node() { +#[tokio::test] +async fn fragment_on_interface_which_implementations_implement_node() { let input = include_str!("refetchable_fragment/fixtures/fragment-on-interface-which-implementations-implement-node.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-on-interface-which-implementations-implement-node.expected"); - test_fixture(transform_fixture, "fragment-on-interface-which-implementations-implement-node.graphql", "refetchable_fragment/fixtures/fragment-on-interface-which-implementations-implement-node.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-interface-which-implementations-implement-node.graphql", "refetchable_fragment/fixtures/fragment-on-interface-which-implementations-implement-node.expected", input, expected).await; } -#[test] -fn fragment_on_interface_which_implementations_not_implement_node_invalid() { +#[tokio::test] +async fn fragment_on_interface_which_implementations_not_implement_node_invalid() { let input = include_str!("refetchable_fragment/fixtures/fragment-on-interface-which-implementations-not-implement-node.invalid.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-on-interface-which-implementations-not-implement-node.invalid.expected"); - test_fixture(transform_fixture, "fragment-on-interface-which-implementations-not-implement-node.invalid.graphql", "refetchable_fragment/fixtures/fragment-on-interface-which-implementations-not-implement-node.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-interface-which-implementations-not-implement-node.invalid.graphql", "refetchable_fragment/fixtures/fragment-on-interface-which-implementations-not-implement-node.invalid.expected", input, expected).await; } -#[test] -fn fragment_on_interface_without_id() { +#[tokio::test] +async fn fragment_on_interface_without_id() { let input = include_str!("refetchable_fragment/fixtures/fragment-on-interface-without-id.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-on-interface-without-id.expected"); - test_fixture(transform_fixture, "fragment-on-interface-without-id.graphql", "refetchable_fragment/fixtures/fragment-on-interface-without-id.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-interface-without-id.graphql", "refetchable_fragment/fixtures/fragment-on-interface-without-id.expected", input, expected).await; } -#[test] -fn fragment_on_node_interface() { +#[tokio::test] +async fn fragment_on_node_interface() { let input = include_str!("refetchable_fragment/fixtures/fragment-on-node-interface.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-on-node-interface.expected"); - test_fixture(transform_fixture, "fragment-on-node-interface.graphql", "refetchable_fragment/fixtures/fragment-on-node-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-node-interface.graphql", "refetchable_fragment/fixtures/fragment-on-node-interface.expected", input, expected).await; } -#[test] -fn fragment_on_node_interface_without_id() { +#[tokio::test] +async fn fragment_on_node_interface_without_id() { let input = include_str!("refetchable_fragment/fixtures/fragment-on-node-interface-without-id.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-on-node-interface-without-id.expected"); - test_fixture(transform_fixture, "fragment-on-node-interface-without-id.graphql", "refetchable_fragment/fixtures/fragment-on-node-interface-without-id.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-node-interface-without-id.graphql", "refetchable_fragment/fixtures/fragment-on-node-interface-without-id.expected", input, expected).await; } -#[test] -fn fragment_on_node_with_id_argument_used_invalid() { +#[tokio::test] +async fn fragment_on_node_with_id_argument_used_invalid() { let input = include_str!("refetchable_fragment/fixtures/fragment-on-node-with-id-argument-used.invalid.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-on-node-with-id-argument-used.invalid.expected"); - test_fixture(transform_fixture, "fragment-on-node-with-id-argument-used.invalid.graphql", "refetchable_fragment/fixtures/fragment-on-node-with-id-argument-used.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-node-with-id-argument-used.invalid.graphql", "refetchable_fragment/fixtures/fragment-on-node-with-id-argument-used.invalid.expected", input, expected).await; } -#[test] -fn fragment_on_non_node_fetchable_type() { +#[tokio::test] +async fn fragment_on_non_node_fetchable_type() { let input = include_str!("refetchable_fragment/fixtures/fragment-on-non-node-fetchable-type.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-on-non-node-fetchable-type.expected"); - test_fixture(transform_fixture, "fragment-on-non-node-fetchable-type.graphql", "refetchable_fragment/fixtures/fragment-on-non-node-fetchable-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-non-node-fetchable-type.graphql", "refetchable_fragment/fixtures/fragment-on-non-node-fetchable-type.expected", input, expected).await; } -#[test] -fn fragment_on_object_implementing_node_interface() { +#[tokio::test] +async fn fragment_on_object_implementing_node_interface() { let input = include_str!("refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface.expected"); - test_fixture(transform_fixture, "fragment-on-object-implementing-node-interface.graphql", "refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-object-implementing-node-interface.graphql", "refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface.expected", input, expected).await; } -#[test] -fn fragment_on_object_implementing_node_interface_with_alias_id() { +#[tokio::test] +async fn fragment_on_object_implementing_node_interface_with_alias_id() { let input = include_str!("refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface-with-alias-id.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface-with-alias-id.expected"); - test_fixture(transform_fixture, "fragment-on-object-implementing-node-interface-with-alias-id.graphql", "refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface-with-alias-id.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-object-implementing-node-interface-with-alias-id.graphql", "refetchable_fragment/fixtures/fragment-on-object-implementing-node-interface-with-alias-id.expected", input, expected).await; } -#[test] -fn fragment_on_query() { +#[tokio::test] +async fn fragment_on_query() { let input = include_str!("refetchable_fragment/fixtures/fragment-on-query.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-on-query.expected"); - test_fixture(transform_fixture, "fragment-on-query.graphql", "refetchable_fragment/fixtures/fragment-on-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-query.graphql", "refetchable_fragment/fixtures/fragment-on-query.expected", input, expected).await; } -#[test] -fn fragment_on_query_with_cycle() { +#[tokio::test] +async fn fragment_on_query_with_cycle() { let input = include_str!("refetchable_fragment/fixtures/fragment-on-query-with-cycle.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-on-query-with-cycle.expected"); - test_fixture(transform_fixture, "fragment-on-query-with-cycle.graphql", "refetchable_fragment/fixtures/fragment-on-query-with-cycle.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-query-with-cycle.graphql", "refetchable_fragment/fixtures/fragment-on-query-with-cycle.expected", input, expected).await; } -#[test] -fn fragment_on_query_without_query_name_invalid() { +#[tokio::test] +async fn fragment_on_query_without_query_name_invalid() { let input = include_str!("refetchable_fragment/fixtures/fragment-on-query-without-query-name.invalid.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-on-query-without-query-name.invalid.expected"); - test_fixture(transform_fixture, "fragment-on-query-without-query-name.invalid.graphql", "refetchable_fragment/fixtures/fragment-on-query-without-query-name.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-query-without-query-name.invalid.graphql", "refetchable_fragment/fixtures/fragment-on-query-without-query-name.invalid.expected", input, expected).await; } -#[test] -fn fragment_on_viewer() { +#[tokio::test] +async fn fragment_on_viewer() { let input = include_str!("refetchable_fragment/fixtures/fragment-on-viewer.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-on-viewer.expected"); - test_fixture(transform_fixture, "fragment-on-viewer.graphql", "refetchable_fragment/fixtures/fragment-on-viewer.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-on-viewer.graphql", "refetchable_fragment/fixtures/fragment-on-viewer.expected", input, expected).await; } -#[test] -fn fragment_with_args_on_object_implementing_node_interface() { +#[tokio::test] +async fn fragment_with_args_on_object_implementing_node_interface() { let input = include_str!("refetchable_fragment/fixtures/fragment-with-args-on-object-implementing-node-interface.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-with-args-on-object-implementing-node-interface.expected"); - test_fixture(transform_fixture, "fragment-with-args-on-object-implementing-node-interface.graphql", "refetchable_fragment/fixtures/fragment-with-args-on-object-implementing-node-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-args-on-object-implementing-node-interface.graphql", "refetchable_fragment/fixtures/fragment-with-args-on-object-implementing-node-interface.expected", input, expected).await; } -#[test] -fn fragment_with_args_on_query() { +#[tokio::test] +async fn fragment_with_args_on_query() { let input = include_str!("refetchable_fragment/fixtures/fragment-with-args-on-query.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-with-args-on-query.expected"); - test_fixture(transform_fixture, "fragment-with-args-on-query.graphql", "refetchable_fragment/fixtures/fragment-with-args-on-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-args-on-query.graphql", "refetchable_fragment/fixtures/fragment-with-args-on-query.expected", input, expected).await; } -#[test] -fn fragment_with_args_on_viewer() { +#[tokio::test] +async fn fragment_with_args_on_viewer() { let input = include_str!("refetchable_fragment/fixtures/fragment-with-args-on-viewer.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-with-args-on-viewer.expected"); - test_fixture(transform_fixture, "fragment-with-args-on-viewer.graphql", "refetchable_fragment/fixtures/fragment-with-args-on-viewer.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-args-on-viewer.graphql", "refetchable_fragment/fixtures/fragment-with-args-on-viewer.expected", input, expected).await; } -#[test] -fn fragment_with_relay_plural_invalid() { +#[tokio::test] +async fn fragment_with_relay_plural_invalid() { let input = include_str!("refetchable_fragment/fixtures/fragment-with-relay-plural.invalid.graphql"); let expected = include_str!("refetchable_fragment/fixtures/fragment-with-relay-plural.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-relay-plural.invalid.graphql", "refetchable_fragment/fixtures/fragment-with-relay-plural.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-relay-plural.invalid.graphql", "refetchable_fragment/fixtures/fragment-with-relay-plural.invalid.expected", input, expected).await; } -#[test] -fn refetchable_fragment_with_connection() { +#[tokio::test] +async fn refetchable_fragment_with_connection() { let input = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection.graphql"); let expected = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection.expected"); - test_fixture(transform_fixture, "refetchable-fragment-with-connection.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-fragment-with-connection.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection.expected", input, expected).await; } -#[test] -fn refetchable_fragment_with_connection_bidirectional() { +#[tokio::test] +async fn refetchable_fragment_with_connection_bidirectional() { let input = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-bidirectional.graphql"); let expected = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-bidirectional.expected"); - test_fixture(transform_fixture, "refetchable-fragment-with-connection-bidirectional.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-bidirectional.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-fragment-with-connection-bidirectional.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-bidirectional.expected", input, expected).await; } -#[test] -fn refetchable_fragment_with_connection_literal_count_invalid() { +#[tokio::test] +async fn refetchable_fragment_with_connection_literal_count_invalid() { let input = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-literal-count.invalid.graphql"); let expected = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-literal-count.invalid.expected"); - test_fixture(transform_fixture, "refetchable-fragment-with-connection-literal-count.invalid.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-literal-count.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-fragment-with-connection-literal-count.invalid.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-literal-count.invalid.expected", input, expected).await; } -#[test] -fn refetchable_fragment_with_connection_no_cursor_invalid() { +#[tokio::test] +async fn refetchable_fragment_with_connection_no_cursor_invalid() { let input = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-no-cursor.invalid.graphql"); let expected = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-no-cursor.invalid.expected"); - test_fixture(transform_fixture, "refetchable-fragment-with-connection-no-cursor.invalid.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-no-cursor.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-fragment-with-connection-no-cursor.invalid.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-no-cursor.invalid.expected", input, expected).await; } -#[test] -fn refetchable_fragment_with_connection_unstable_path_invalid() { +#[tokio::test] +async fn refetchable_fragment_with_connection_unstable_path_invalid() { let input = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-unstable-path.invalid.graphql"); let expected = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-unstable-path.invalid.expected"); - test_fixture(transform_fixture, "refetchable-fragment-with-connection-unstable-path.invalid.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-unstable-path.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-fragment-with-connection-unstable-path.invalid.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-unstable-path.invalid.expected", input, expected).await; } -#[test] -fn refetchable_fragment_with_connection_with_stream() { +#[tokio::test] +async fn refetchable_fragment_with_connection_with_stream() { let input = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-with-stream.graphql"); let expected = include_str!("refetchable_fragment/fixtures/refetchable-fragment-with-connection-with-stream.expected"); - test_fixture(transform_fixture, "refetchable-fragment-with-connection-with-stream.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-with-stream.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-fragment-with-connection-with-stream.graphql", "refetchable_fragment/fixtures/refetchable-fragment-with-connection-with-stream.expected", input, expected).await; } -#[test] -fn refetchable_interface() { +#[tokio::test] +async fn refetchable_interface() { let input = include_str!("refetchable_fragment/fixtures/refetchable-interface.graphql"); let expected = include_str!("refetchable_fragment/fixtures/refetchable-interface.expected"); - test_fixture(transform_fixture, "refetchable-interface.graphql", "refetchable_fragment/fixtures/refetchable-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-interface.graphql", "refetchable_fragment/fixtures/refetchable-interface.expected", input, expected).await; } -#[test] -fn refetchable_interface_all_implementing_types_impl_node() { +#[tokio::test] +async fn refetchable_interface_all_implementing_types_impl_node() { let input = include_str!("refetchable_fragment/fixtures/refetchable-interface-all-implementing-types-impl-node.graphql"); let expected = include_str!("refetchable_fragment/fixtures/refetchable-interface-all-implementing-types-impl-node.expected"); - test_fixture(transform_fixture, "refetchable-interface-all-implementing-types-impl-node.graphql", "refetchable_fragment/fixtures/refetchable-interface-all-implementing-types-impl-node.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-interface-all-implementing-types-impl-node.graphql", "refetchable_fragment/fixtures/refetchable-interface-all-implementing-types-impl-node.expected", input, expected).await; } -#[test] -fn refetchable_interface_but_no_implementing_types() { +#[tokio::test] +async fn refetchable_interface_but_no_implementing_types() { let input = include_str!("refetchable_fragment/fixtures/refetchable-interface-but-no-implementing-types.graphql"); let expected = include_str!("refetchable_fragment/fixtures/refetchable-interface-but-no-implementing-types.expected"); - test_fixture(transform_fixture, "refetchable-interface-but-no-implementing-types.graphql", "refetchable_fragment/fixtures/refetchable-interface-but-no-implementing-types.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-interface-but-no-implementing-types.graphql", "refetchable_fragment/fixtures/refetchable-interface-but-no-implementing-types.expected", input, expected).await; } -#[test] -fn refetchable_interface_some_types_impl_node() { +#[tokio::test] +async fn refetchable_interface_some_types_impl_node() { let input = include_str!("refetchable_fragment/fixtures/refetchable-interface-some-types-impl-node.graphql"); let expected = include_str!("refetchable_fragment/fixtures/refetchable-interface-some-types-impl-node.expected"); - test_fixture(transform_fixture, "refetchable-interface-some-types-impl-node.graphql", "refetchable_fragment/fixtures/refetchable-interface-some-types-impl-node.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-interface-some-types-impl-node.graphql", "refetchable_fragment/fixtures/refetchable-interface-some-types-impl-node.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/relay_actor_change.rs b/compiler/crates/relay-transforms/tests/relay_actor_change.rs new file mode 100644 index 0000000000000..d0c4246742c85 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_actor_change.rs @@ -0,0 +1,17 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::FeatureFlag; +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::relay_actor_change_transform; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, |program| { + relay_actor_change_transform(program, &FeatureFlag::Enabled) + }) +} diff --git a/compiler/crates/relay-transforms/tests/relay_actor_change/mod.rs b/compiler/crates/relay-transforms/tests/relay_actor_change/mod.rs deleted file mode 100644 index 9af7e26d8e01c..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_actor_change/mod.rs +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::FeatureFlag; -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::relay_actor_change_transform; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| { - relay_actor_change_transform(program, &FeatureFlag::Enabled) - }) -} diff --git a/compiler/crates/relay-transforms/tests/relay_actor_change_test.rs b/compiler/crates/relay-transforms/tests/relay_actor_change_test.rs index 0b5f0e569127d..b62bbfe569a61 100644 --- a/compiler/crates/relay-transforms/tests/relay_actor_change_test.rs +++ b/compiler/crates/relay-transforms/tests/relay_actor_change_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<0ea689df4225c0f14523b6e1d62520dd>> + * @generated SignedSource<> */ mod relay_actor_change; @@ -12,37 +12,37 @@ mod relay_actor_change; use relay_actor_change::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn actor_change_invalid() { +#[tokio::test] +async fn actor_change_invalid() { let input = include_str!("relay_actor_change/fixtures/actor-change.invalid.graphql"); let expected = include_str!("relay_actor_change/fixtures/actor-change.invalid.expected"); - test_fixture(transform_fixture, "actor-change.invalid.graphql", "relay_actor_change/fixtures/actor-change.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "actor-change.invalid.graphql", "relay_actor_change/fixtures/actor-change.invalid.expected", input, expected).await; } -#[test] -fn actor_change_no_viewer_invalid() { +#[tokio::test] +async fn actor_change_no_viewer_invalid() { let input = include_str!("relay_actor_change/fixtures/actor-change-no-viewer.invalid.graphql"); let expected = include_str!("relay_actor_change/fixtures/actor-change-no-viewer.invalid.expected"); - test_fixture(transform_fixture, "actor-change-no-viewer.invalid.graphql", "relay_actor_change/fixtures/actor-change-no-viewer.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "actor-change-no-viewer.invalid.graphql", "relay_actor_change/fixtures/actor-change-no-viewer.invalid.expected", input, expected).await; } -#[test] -fn actor_change_pluarl_invalid() { +#[tokio::test] +async fn actor_change_pluarl_invalid() { let input = include_str!("relay_actor_change/fixtures/actor-change-pluarl.invalid.graphql"); let expected = include_str!("relay_actor_change/fixtures/actor-change-pluarl.invalid.expected"); - test_fixture(transform_fixture, "actor-change-pluarl.invalid.graphql", "relay_actor_change/fixtures/actor-change-pluarl.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "actor-change-pluarl.invalid.graphql", "relay_actor_change/fixtures/actor-change-pluarl.invalid.expected", input, expected).await; } -#[test] -fn actor_change_wrong_viewer_invalid() { +#[tokio::test] +async fn actor_change_wrong_viewer_invalid() { let input = include_str!("relay_actor_change/fixtures/actor-change-wrong-viewer.invalid.graphql"); let expected = include_str!("relay_actor_change/fixtures/actor-change-wrong-viewer.invalid.expected"); - test_fixture(transform_fixture, "actor-change-wrong-viewer.invalid.graphql", "relay_actor_change/fixtures/actor-change-wrong-viewer.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "actor-change-wrong-viewer.invalid.graphql", "relay_actor_change/fixtures/actor-change-wrong-viewer.invalid.expected", input, expected).await; } -#[test] -fn simple_query() { +#[tokio::test] +async fn simple_query() { let input = include_str!("relay_actor_change/fixtures/simple-query.graphql"); let expected = include_str!("relay_actor_change/fixtures/simple-query.expected"); - test_fixture(transform_fixture, "simple-query.graphql", "relay_actor_change/fixtures/simple-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "simple-query.graphql", "relay_actor_change/fixtures/simple-query.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-all-non-node-union.invalid.expected b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-all-non-node-union.invalid.expected deleted file mode 100644 index 66596302bfcd4..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-all-non-node-union.invalid.expected +++ /dev/null @@ -1,22 +0,0 @@ -==================================== INPUT ==================================== -# expected-to-throw -query QueryWithRelayClientComponent { - neverNode { - ... on FakeNode { - id - } - ...ClientFragmentOnNonNodeUnion @relay_client_component - } -} - -fragment ClientFragmentOnNonNodeUnion on NonNode { - name -} -==================================== ERROR ==================================== -✖︎ @relay_client_component can only be used on fragments on Viewer or Query, or whose type implements the Node interface. If the fragment's type is a union type, all members of that union must implement Node. - - fragment-on-all-non-node-union.invalid.graphql:11:10 - 10 │ - 11 │ fragment ClientFragmentOnNonNodeUnion on NonNode { - │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - 12 │ name diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-all-non-node-union.invalid.graphql b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-all-non-node-union.invalid.graphql deleted file mode 100644 index d34ce00f3094b..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-all-non-node-union.invalid.graphql +++ /dev/null @@ -1,13 +0,0 @@ -# expected-to-throw -query QueryWithRelayClientComponent { - neverNode { - ... on FakeNode { - id - } - ...ClientFragmentOnNonNodeUnion @relay_client_component - } -} - -fragment ClientFragmentOnNonNodeUnion on NonNode { - name -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-fake-node-type.invalid.expected b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-fake-node-type.invalid.expected deleted file mode 100644 index 824ce48732866..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-fake-node-type.invalid.expected +++ /dev/null @@ -1,22 +0,0 @@ -==================================== INPUT ==================================== -# expected-to-throw -query QueryWithRelayClientComponent($id: ID!) { - nonNodeStory(id: $id) { - id - ...ClientFragmentOnFakeNode @relay_client_component - } -} - -fragment ClientFragmentOnFakeNode on NonNodeStory { - message { - text - } -} -==================================== ERROR ==================================== -✖︎ @relay_client_component can only be used on fragments on Viewer or Query, or whose type implements the Node interface. If the fragment's type is a union type, all members of that union must implement Node. - - fragment-on-fake-node-type.invalid.graphql:9:10 - 8 │ - 9 │ fragment ClientFragmentOnFakeNode on NonNodeStory { - │ ^^^^^^^^^^^^^^^^^^^^^^^^ - 10 │ message { diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-fake-node-type.invalid.graphql b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-fake-node-type.invalid.graphql deleted file mode 100644 index 24708fb64c2c0..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-fake-node-type.invalid.graphql +++ /dev/null @@ -1,13 +0,0 @@ -# expected-to-throw -query QueryWithRelayClientComponent($id: ID!) { - nonNodeStory(id: $id) { - id - ...ClientFragmentOnFakeNode @relay_client_component - } -} - -fragment ClientFragmentOnFakeNode on NonNodeStory { - message { - text - } -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-non-node-type.invalid.expected b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-non-node-type.invalid.expected deleted file mode 100644 index 910072499aa5b..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-non-node-type.invalid.expected +++ /dev/null @@ -1,21 +0,0 @@ -==================================== INPUT ==================================== -# expected-to-throw -query QueryWithRelayClientComponent { - me { - tasks { - ...ClientFragmentOnNonNode @relay_client_component - } - } -} - -fragment ClientFragmentOnNonNode on Task { - title -} -==================================== ERROR ==================================== -✖︎ @relay_client_component can only be used on fragments on Viewer or Query, or whose type implements the Node interface. If the fragment's type is a union type, all members of that union must implement Node. - - fragment-on-non-node-type.invalid.graphql:10:10 - 9 │ - 10 │ fragment ClientFragmentOnNonNode on Task { - │ ^^^^^^^^^^^^^^^^^^^^^^^ - 11 │ title diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-non-node-type.invalid.graphql b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-non-node-type.invalid.graphql deleted file mode 100644 index 38e10963327af..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-non-node-type.invalid.graphql +++ /dev/null @@ -1,12 +0,0 @@ -# expected-to-throw -query QueryWithRelayClientComponent { - me { - tasks { - ...ClientFragmentOnNonNode @relay_client_component - } - } -} - -fragment ClientFragmentOnNonNode on Task { - title -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-some-non-node-union.invalid.expected b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-some-non-node-union.invalid.expected deleted file mode 100644 index 08c4ee6465a8b..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-some-non-node-union.invalid.expected +++ /dev/null @@ -1,38 +0,0 @@ -==================================== INPUT ==================================== -# expected-to-throw -query QueryWithRelayClientComponent { - maybeNode { - ...ClientFragment_one @relay_client_component - ...ClientFragment_two @relay_client_component - ...ClientFragment_three @relay_client_component - } -} - -fragment ClientFragment_one on NonNode { - name -} -fragment ClientFragment_two on FakeNode { - id -} -fragment ClientFragment_three on Story { - author { - name - } -} -==================================== ERROR ==================================== -✖︎ @relay_client_component can only be used on fragments on Viewer or Query, or whose type implements the Node interface. If the fragment's type is a union type, all members of that union must implement Node. - - fragment-on-some-non-node-union.invalid.graphql:10:10 - 9 │ - 10 │ fragment ClientFragment_one on NonNode { - │ ^^^^^^^^^^^^^^^^^^ - 11 │ name - - -✖︎ @relay_client_component can only be used on fragments on Viewer or Query, or whose type implements the Node interface. If the fragment's type is a union type, all members of that union must implement Node. - - fragment-on-some-non-node-union.invalid.graphql:13:10 - 12 │ } - 13 │ fragment ClientFragment_two on FakeNode { - │ ^^^^^^^^^^^^^^^^^^ - 14 │ id diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-some-non-node-union.invalid.graphql b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-some-non-node-union.invalid.graphql deleted file mode 100644 index 0019310249d46..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-on-some-non-node-union.invalid.graphql +++ /dev/null @@ -1,20 +0,0 @@ -# expected-to-throw -query QueryWithRelayClientComponent { - maybeNode { - ...ClientFragment_one @relay_client_component - ...ClientFragment_two @relay_client_component - ...ClientFragment_three @relay_client_component - } -} - -fragment ClientFragment_one on NonNode { - name -} -fragment ClientFragment_two on FakeNode { - id -} -fragment ClientFragment_three on Story { - author { - name - } -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-arguments.invalid.expected b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-arguments.invalid.expected deleted file mode 100644 index ea9670b905e38..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-arguments.invalid.expected +++ /dev/null @@ -1,24 +0,0 @@ -==================================== INPUT ==================================== -# expected-to-throw -query QueryWithRelayClientComponent { - me { - id - ...ClientFragmentWithArguments @relay_client_component @arguments(scale: 1.5) - } -} - -fragment ClientFragmentWithArguments on User @argumentDefinitions(scale: {type: "Float"}) { - id - name - profile_picture(scale: $scale) { - uri - } -} -==================================== ERROR ==================================== -✖︎ @relay_client_component is not compatible with @arguments. - - fragment-with-arguments.invalid.graphql:5:71 - 4 │ id - 5 │ ...ClientFragmentWithArguments @relay_client_component @arguments(scale: 1.5) - │ ^^^^^ - 6 │ } diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-arguments.invalid.graphql b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-arguments.invalid.graphql deleted file mode 100644 index adc22ee44a424..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-arguments.invalid.graphql +++ /dev/null @@ -1,15 +0,0 @@ -# expected-to-throw -query QueryWithRelayClientComponent { - me { - id - ...ClientFragmentWithArguments @relay_client_component @arguments(scale: 1.5) - } -} - -fragment ClientFragmentWithArguments on User @argumentDefinitions(scale: {type: "Float"}) { - id - name - profile_picture(scale: $scale) { - uri - } -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-incompatible-directives.invalid.expected b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-incompatible-directives.invalid.expected deleted file mode 100644 index 2a64563b1126f..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-incompatible-directives.invalid.expected +++ /dev/null @@ -1,21 +0,0 @@ -==================================== INPUT ==================================== -# expected-to-throw -query QueryWithRelayClientComponent { - me { - id - ...ClientFragment @relay_client_component @defer @relay(mask: false) - } -} - -fragment ClientFragment on User { - id - name -} -==================================== ERROR ==================================== -✖︎ @relay_client_component is not compatible with these directives: `defer`, `relay` - - fragment-with-incompatible-directives.invalid.graphql:5:8 - 4 │ id - 5 │ ...ClientFragment @relay_client_component @defer @relay(mask: false) - │ ^^^^^^^^^^^^^^ - 6 │ } diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-incompatible-directives.invalid.graphql b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-incompatible-directives.invalid.graphql deleted file mode 100644 index ed4f3a8c6ec0a..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-incompatible-directives.invalid.graphql +++ /dev/null @@ -1,12 +0,0 @@ -# expected-to-throw -query QueryWithRelayClientComponent { - me { - id - ...ClientFragment @relay_client_component @defer @relay(mask: false) - } -} - -fragment ClientFragment on User { - id - name -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component-on-query.expected b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component-on-query.expected deleted file mode 100644 index b5c522493d58d..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component-on-query.expected +++ /dev/null @@ -1,47 +0,0 @@ -==================================== INPUT ==================================== -fragment ServerComponentFragment on Query { - ...ClientFragment @relay_client_component -} - -fragment ClientFragment on Query { - viewer { - isFbEmployee - } -} -==================================== OUTPUT =================================== -query ClientFragment$normalization @__SplitOperationMetadata -# SplitOperationMetadata { -# derived_from: Some( -# FragmentDefinitionName( -# "ClientFragment", -# ), -# ), -# location: fragment-with-relay-client-component-on-query.graphql:100:114, -# parent_documents: { -# FragmentDefinitionName( -# FragmentDefinitionName( -# "ServerComponentFragment", -# ), -# ), -# }, -# raw_response_type_generation_mode: None, -# } - { - ...ClientFragment -} - -fragment ClientFragment on Query { - viewer { - isFbEmployee - } -} - -fragment ServerComponentFragment on Query @__RelayClientComponentMetadata -# RelayClientComponentMetadata { -# split_operation_filenames: [ -# "ClientFragment$normalization.graphql", -# ], -# } - { - ...ClientFragment @relay_client_component_server(module_id: "ClientFragment$normalization.graphql") -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component-on-query.graphql b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component-on-query.graphql deleted file mode 100644 index 98f06730607df..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component-on-query.graphql +++ /dev/null @@ -1,9 +0,0 @@ -fragment ServerComponentFragment on Query { - ...ClientFragment @relay_client_component -} - -fragment ClientFragment on Query { - viewer { - isFbEmployee - } -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component-on-viewer.expected b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component-on-viewer.expected deleted file mode 100644 index b8a32b309b3e2..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component-on-viewer.expected +++ /dev/null @@ -1,43 +0,0 @@ -==================================== INPUT ==================================== -fragment ServerComponentFragment on Viewer { - ...ClientFragment @relay_client_component -} - -fragment ClientFragment on Viewer { - isFbEmployee -} -==================================== OUTPUT =================================== -query ClientFragment$normalization @__SplitOperationMetadata -# SplitOperationMetadata { -# derived_from: Some( -# FragmentDefinitionName( -# "ClientFragment", -# ), -# ), -# location: fragment-with-relay-client-component-on-viewer.graphql:101:115, -# parent_documents: { -# FragmentDefinitionName( -# FragmentDefinitionName( -# "ServerComponentFragment", -# ), -# ), -# }, -# raw_response_type_generation_mode: None, -# } - { - ...ClientFragment -} - -fragment ClientFragment on Viewer { - isFbEmployee -} - -fragment ServerComponentFragment on Viewer @__RelayClientComponentMetadata -# RelayClientComponentMetadata { -# split_operation_filenames: [ -# "ClientFragment$normalization.graphql", -# ], -# } - { - ...ClientFragment @relay_client_component_server(module_id: "ClientFragment$normalization.graphql") -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component-on-viewer.graphql b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component-on-viewer.graphql deleted file mode 100644 index f77de0c37b19c..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component-on-viewer.graphql +++ /dev/null @@ -1,7 +0,0 @@ -fragment ServerComponentFragment on Viewer { - ...ClientFragment @relay_client_component -} - -fragment ClientFragment on Viewer { - isFbEmployee -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component.expected b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component.expected deleted file mode 100644 index ac4cfab05eb6e..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component.expected +++ /dev/null @@ -1,47 +0,0 @@ -==================================== INPUT ==================================== -fragment ServerComponentFragment on User { - id - ...ClientFragment @relay_client_component -} - -fragment ClientFragment on User { - id - name -} -==================================== OUTPUT =================================== -query ClientFragment$normalization @__SplitOperationMetadata -# SplitOperationMetadata { -# derived_from: Some( -# FragmentDefinitionName( -# "ClientFragment", -# ), -# ), -# location: fragment-with-relay-client-component.graphql:104:118, -# parent_documents: { -# FragmentDefinitionName( -# FragmentDefinitionName( -# "ServerComponentFragment", -# ), -# ), -# }, -# raw_response_type_generation_mode: None, -# } - { - ...ClientFragment -} - -fragment ClientFragment on User { - id - name -} - -fragment ServerComponentFragment on User @__RelayClientComponentMetadata -# RelayClientComponentMetadata { -# split_operation_filenames: [ -# "ClientFragment$normalization.graphql", -# ], -# } - { - id - ...ClientFragment @relay_client_component_server(module_id: "ClientFragment$normalization.graphql") -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component.graphql b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component.graphql deleted file mode 100644 index a7a1cbd32a8f1..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/fragment-with-relay-client-component.graphql +++ /dev/null @@ -1,9 +0,0 @@ -fragment ServerComponentFragment on User { - id - ...ClientFragment @relay_client_component -} - -fragment ClientFragment on User { - id - name -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/query-with-relay-client-component.expected b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/query-with-relay-client-component.expected deleted file mode 100644 index f32a1e1cf110c..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/query-with-relay-client-component.expected +++ /dev/null @@ -1,49 +0,0 @@ -==================================== INPUT ==================================== -query QueryWithRelayClientComponent { - me { - ...ClientFragment @relay_client_component - } -} - -fragment ClientFragment on User { - id - name -} -==================================== OUTPUT =================================== -query ClientFragment$normalization @__SplitOperationMetadata -# SplitOperationMetadata { -# derived_from: Some( -# FragmentDefinitionName( -# "ClientFragment", -# ), -# ), -# location: query-with-relay-client-component.graphql:107:121, -# parent_documents: { -# OperationDefinitionName( -# OperationDefinitionName( -# "QueryWithRelayClientComponent", -# ), -# ), -# }, -# raw_response_type_generation_mode: None, -# } - { - ...ClientFragment -} - -query QueryWithRelayClientComponent @__RelayClientComponentMetadata -# RelayClientComponentMetadata { -# split_operation_filenames: [ -# "ClientFragment$normalization.graphql", -# ], -# } - { - me { - ...ClientFragment @relay_client_component_server(module_id: "ClientFragment$normalization.graphql") - } -} - -fragment ClientFragment on User { - id - name -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/query-with-relay-client-component.graphql b/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/query-with-relay-client-component.graphql deleted file mode 100644 index 243d60e3ea125..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/fixtures/query-with-relay-client-component.graphql +++ /dev/null @@ -1,10 +0,0 @@ -query QueryWithRelayClientComponent { - me { - ...ClientFragment @relay_client_component - } -} - -fragment ClientFragment on User { - id - name -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component/mod.rs b/compiler/crates/relay-transforms/tests/relay_client_component/mod.rs deleted file mode 100644 index 76ec3f1bada0b..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component/mod.rs +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::FeatureFlags; -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::relay_client_component; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let flags = FeatureFlags::default(); - apply_transform_for_test(fixture, |program| relay_client_component(program, &flags)) -} diff --git a/compiler/crates/relay-transforms/tests/relay_client_component_test.rs b/compiler/crates/relay-transforms/tests/relay_client_component_test.rs deleted file mode 100644 index 816d6ea242b88..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_client_component_test.rs +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @generated SignedSource<> - */ - -mod relay_client_component; - -use relay_client_component::transform_fixture; -use fixture_tests::test_fixture; - -#[test] -fn fragment_on_all_non_node_union_invalid() { - let input = include_str!("relay_client_component/fixtures/fragment-on-all-non-node-union.invalid.graphql"); - let expected = include_str!("relay_client_component/fixtures/fragment-on-all-non-node-union.invalid.expected"); - test_fixture(transform_fixture, "fragment-on-all-non-node-union.invalid.graphql", "relay_client_component/fixtures/fragment-on-all-non-node-union.invalid.expected", input, expected); -} - -#[test] -fn fragment_on_fake_node_type_invalid() { - let input = include_str!("relay_client_component/fixtures/fragment-on-fake-node-type.invalid.graphql"); - let expected = include_str!("relay_client_component/fixtures/fragment-on-fake-node-type.invalid.expected"); - test_fixture(transform_fixture, "fragment-on-fake-node-type.invalid.graphql", "relay_client_component/fixtures/fragment-on-fake-node-type.invalid.expected", input, expected); -} - -#[test] -fn fragment_on_non_node_type_invalid() { - let input = include_str!("relay_client_component/fixtures/fragment-on-non-node-type.invalid.graphql"); - let expected = include_str!("relay_client_component/fixtures/fragment-on-non-node-type.invalid.expected"); - test_fixture(transform_fixture, "fragment-on-non-node-type.invalid.graphql", "relay_client_component/fixtures/fragment-on-non-node-type.invalid.expected", input, expected); -} - -#[test] -fn fragment_on_some_non_node_union_invalid() { - let input = include_str!("relay_client_component/fixtures/fragment-on-some-non-node-union.invalid.graphql"); - let expected = include_str!("relay_client_component/fixtures/fragment-on-some-non-node-union.invalid.expected"); - test_fixture(transform_fixture, "fragment-on-some-non-node-union.invalid.graphql", "relay_client_component/fixtures/fragment-on-some-non-node-union.invalid.expected", input, expected); -} - -#[test] -fn fragment_with_arguments_invalid() { - let input = include_str!("relay_client_component/fixtures/fragment-with-arguments.invalid.graphql"); - let expected = include_str!("relay_client_component/fixtures/fragment-with-arguments.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-arguments.invalid.graphql", "relay_client_component/fixtures/fragment-with-arguments.invalid.expected", input, expected); -} - -#[test] -fn fragment_with_incompatible_directives_invalid() { - let input = include_str!("relay_client_component/fixtures/fragment-with-incompatible-directives.invalid.graphql"); - let expected = include_str!("relay_client_component/fixtures/fragment-with-incompatible-directives.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-incompatible-directives.invalid.graphql", "relay_client_component/fixtures/fragment-with-incompatible-directives.invalid.expected", input, expected); -} - -#[test] -fn fragment_with_relay_client_component() { - let input = include_str!("relay_client_component/fixtures/fragment-with-relay-client-component.graphql"); - let expected = include_str!("relay_client_component/fixtures/fragment-with-relay-client-component.expected"); - test_fixture(transform_fixture, "fragment-with-relay-client-component.graphql", "relay_client_component/fixtures/fragment-with-relay-client-component.expected", input, expected); -} - -#[test] -fn fragment_with_relay_client_component_on_query() { - let input = include_str!("relay_client_component/fixtures/fragment-with-relay-client-component-on-query.graphql"); - let expected = include_str!("relay_client_component/fixtures/fragment-with-relay-client-component-on-query.expected"); - test_fixture(transform_fixture, "fragment-with-relay-client-component-on-query.graphql", "relay_client_component/fixtures/fragment-with-relay-client-component-on-query.expected", input, expected); -} - -#[test] -fn fragment_with_relay_client_component_on_viewer() { - let input = include_str!("relay_client_component/fixtures/fragment-with-relay-client-component-on-viewer.graphql"); - let expected = include_str!("relay_client_component/fixtures/fragment-with-relay-client-component-on-viewer.expected"); - test_fixture(transform_fixture, "fragment-with-relay-client-component-on-viewer.graphql", "relay_client_component/fixtures/fragment-with-relay-client-component-on-viewer.expected", input, expected); -} - -#[test] -fn query_with_relay_client_component() { - let input = include_str!("relay_client_component/fixtures/query-with-relay-client-component.graphql"); - let expected = include_str!("relay_client_component/fixtures/query-with-relay-client-component.expected"); - test_fixture(transform_fixture, "query-with-relay-client-component.graphql", "relay_client_component/fixtures/query-with-relay-client-component.expected", input, expected); -} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers.rs b/compiler/crates/relay-transforms/tests/relay_resolvers.rs new file mode 100644 index 0000000000000..dd343d6bafba0 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers.rs @@ -0,0 +1,90 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::Diagnostic; +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use graphql_cli::DiagnosticPrinter; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_text_printer::print_fragment; +use graphql_text_printer::print_operation; +use graphql_text_printer::PrinterOptions; +use relay_config::ProjectName; +use relay_test_schema::get_test_schema_with_located_extensions; +use relay_transforms::fragment_alias_directive; +use relay_transforms::relay_resolvers; +use relay_transforms::validate_resolver_fragments; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + if let [base, extensions] = parts.as_slice() { + let graphql_location = SourceLocationKey::embedded(fixture.file_name, 0); + let extension_location = SourceLocationKey::embedded(fixture.file_name, 1); + + let ast = parse_executable(base, graphql_location).unwrap(); + let schema = get_test_schema_with_located_extensions(extensions, extension_location); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + + validate_resolver_fragments(&program) + .map_err(|diagnostics| diagnostics_to_sorted_string(base, extensions, &diagnostics))?; + + // Run `fragment_alias_directive` first because we want to ensure we + // correctly generate paths for named inline fragment spreads. + let next_program = fragment_alias_directive(&program, true, true) + .and_then(|program| relay_resolvers(ProjectName::default(), &program, true)) + .map_err(|diagnostics| diagnostics_to_sorted_string(base, extensions, &diagnostics))?; + + let printer_options = PrinterOptions { + debug_directive_data: true, + ..Default::default() + }; + let mut printed = next_program + .operations() + .map(|def| print_operation(&schema, def, printer_options.clone())) + .chain( + next_program + .fragments() + .map(|def| print_fragment(&schema, def, printer_options.clone())), + ) + .collect::>(); + printed.sort(); + + Ok(printed.join("\n\n")) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} + +pub fn diagnostics_to_sorted_string( + source: &str, + extensions: &str, + diagnostics: &[Diagnostic], +) -> String { + let printer = DiagnosticPrinter::new(|source_location| match source_location { + SourceLocationKey::Embedded { index, .. } => { + Some(TextSource::from_whole_document(match index { + 0 => source, + 1 => extensions, + _ => panic!("Expected index to be 0 or 1"), + })) + } + SourceLocationKey::Standalone { .. } => None, + SourceLocationKey::Generated => None, + }); + let mut printed = diagnostics + .iter() + .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) + .collect::>(); + printed.sort(); + printed.join("\n\n") +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/field-alias.expected b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/field-alias.expected index c67061845c3e9..ea2491a0c867c 100644 --- a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/field-alias.expected +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/field-alias.expected @@ -22,7 +22,7 @@ extend type User { fragment Foo_user on User { ...PopStarNameResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "PopStarNameResolver", # import_name: None, # field_alias: Some( diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/missing-fragment-name.expected b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/missing-fragment-name.expected index 583126e86a61b..2ac9cab0db850 100644 --- a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/missing-fragment-name.expected +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/missing-fragment-name.expected @@ -12,7 +12,7 @@ extend type User { fragment Foo_user on User { __id @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "PopStarNameResolver", # import_name: None, # field_alias: None, diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/multiple-relay-resolvers.expected b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/multiple-relay-resolvers.expected index 0bcd333705f3d..18fbf389c0a27 100644 --- a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/multiple-relay-resolvers.expected +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/multiple-relay-resolvers.expected @@ -28,7 +28,7 @@ extend type User { fragment Foo_user on User { ...PopStarNameResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "PopStarNameResolver", # import_name: None, # field_alias: None, @@ -41,7 +41,7 @@ fragment Foo_user on User { ...HobbitNameResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(518), + # field_id: FieldID(527), # import_path: "HobbitNameResolver", # import_name: None, # field_alias: None, diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/nested-relay-resolver.expected b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/nested-relay-resolver.expected index bcbea93f32955..02598545d003d 100644 --- a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/nested-relay-resolver.expected +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/nested-relay-resolver.expected @@ -28,7 +28,7 @@ extend type User { fragment Foo_user on User { ...HobbitNameResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(518), + # field_id: FieldID(527), # import_path: "HobbitNameResolver", # import_name: None, # field_alias: None, @@ -45,7 +45,7 @@ fragment HobbitNameResolverFragment_name on User { name ...PopStarNameResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "PopStarNameResolver", # import_name: None, # field_alias: None, diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-backing-client-edge.expected b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-backing-client-edge.expected index 9582060adad17..ad9fd2d567065 100644 --- a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-backing-client-edge.expected +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-backing-client-edge.expected @@ -26,7 +26,7 @@ fragment BestFriendResolverFragment on User { fragment Foo_user on User { ...BestFriendResolverFragment @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "BestFriendResolver", # import_name: None, # field_alias: None, diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-field-and-fragment-arguments.expected b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-field-and-fragment-arguments.expected index 4c8ce635404c4..e08fd8e6b183d 100644 --- a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-field-and-fragment-arguments.expected +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-field-and-fragment-arguments.expected @@ -17,7 +17,7 @@ extend type User { fragment Foo_user on User { ...PopStarNameResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "PopStarNameResolver", # import_name: None, # field_alias: None, diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-model.expected b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-model.expected index b966f5d99b148..2ff5b7d873a8d 100644 --- a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-model.expected +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-model.expected @@ -22,7 +22,7 @@ extend type User { fragment Foo_user on User { ...PopStarNameResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "PopStarNameResolver", # import_name: None, # field_alias: None, diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-named-import.expected b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-named-import.expected index 6c603b684ac62..1d5ab392f6d2b 100644 --- a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-named-import.expected +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-named-import.expected @@ -22,7 +22,7 @@ extend type User { fragment Foo_user on User { ...PopStarNameResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "PopStarNameResolver", # import_name: Some( # "pop_star_name", diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-required.expected b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-required.expected index 700c94d618d26..013f7a33d693c 100644 --- a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-required.expected +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-required.expected @@ -22,7 +22,7 @@ extend type User { fragment Foo_user on User { ...PopStarNameResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "PopStarNameResolver", # import_name: None, # field_alias: None, diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-scalar-field-arguments-with-alias.expected b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-scalar-field-arguments-with-alias.expected new file mode 100644 index 0000000000000..73a505a9ca8e2 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-scalar-field-arguments-with-alias.expected @@ -0,0 +1,76 @@ +==================================== INPUT ==================================== +fragment Foo_user on User { + pop_star_name(field_arg: 2) + big_pop_star_name: pop_star_name(field_arg: 3) +} + +# %extensions% + +extend type User { + pop_star_name(field_arg: Int): String @relay_resolver(import_path: "PopStarNameResolver") +} +==================================== OUTPUT =================================== +fragment Foo_user on User { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(526), + # import_path: "PopStarNameResolver", + # import_name: None, + # field_alias: None, + # field_path: "pop_star_name", + # field_arguments: [ + # Argument { + # name: WithLocation { + # location: relay-resolver-scalar-field-arguments-with-alias.graphql:44:53, + # item: ArgumentName( + # "field_arg", + # ), + # }, + # value: WithLocation { + # location: relay-resolver-scalar-field-arguments-with-alias.graphql:55:56, + # item: Constant( + # Int( + # 2, + # ), + # ), + # }, + # }, + # ], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(526), + # import_path: "PopStarNameResolver", + # import_name: None, + # field_alias: Some( + # "big_pop_star_name", + # ), + # field_path: "big_pop_star_name", + # field_arguments: [ + # Argument { + # name: WithLocation { + # location: relay-resolver-scalar-field-arguments-with-alias.graphql:93:102, + # item: ArgumentName( + # "field_arg", + # ), + # }, + # value: WithLocation { + # location: relay-resolver-scalar-field-arguments-with-alias.graphql:104:105, + # item: Constant( + # Int( + # 3, + # ), + # ), + # }, + # }, + # ], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-scalar-field-arguments-with-alias.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-scalar-field-arguments-with-alias.graphql new file mode 100644 index 0000000000000..a4f1d6441cca1 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-scalar-field-arguments-with-alias.graphql @@ -0,0 +1,10 @@ +fragment Foo_user on User { + pop_star_name(field_arg: 2) + big_pop_star_name: pop_star_name(field_arg: 3) +} + +# %extensions% + +extend type User { + pop_star_name(field_arg: Int): String @relay_resolver(import_path: "PopStarNameResolver") +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-scalar-field-arguments.expected b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-scalar-field-arguments.expected new file mode 100644 index 0000000000000..64391111173f7 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-scalar-field-arguments.expected @@ -0,0 +1,43 @@ +==================================== INPUT ==================================== +fragment Foo_user on User { + pop_star_name(field_arg: 2) +} + +# %extensions% + +extend type User { + pop_star_name(field_arg: Int): String @relay_resolver(import_path: "PopStarNameResolver") +} +==================================== OUTPUT =================================== +fragment Foo_user on User { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(526), + # import_path: "PopStarNameResolver", + # import_name: None, + # field_alias: None, + # field_path: "pop_star_name", + # field_arguments: [ + # Argument { + # name: WithLocation { + # location: relay-resolver-scalar-field-arguments.graphql:44:53, + # item: ArgumentName( + # "field_arg", + # ), + # }, + # value: WithLocation { + # location: relay-resolver-scalar-field-arguments.graphql:55:56, + # item: Constant( + # Int( + # 2, + # ), + # ), + # }, + # }, + # ], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-scalar-field-arguments.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-scalar-field-arguments.graphql new file mode 100644 index 0000000000000..b9400730a988c --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-scalar-field-arguments.graphql @@ -0,0 +1,9 @@ +fragment Foo_user on User { + pop_star_name(field_arg: 2) +} + +# %extensions% + +extend type User { + pop_star_name(field_arg: Int): String @relay_resolver(import_path: "PopStarNameResolver") +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-within-named-inline-fragment.expected b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-within-named-inline-fragment.expected index 1f1151fc776ed..64c4dc853068d 100644 --- a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-within-named-inline-fragment.expected +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver-within-named-inline-fragment.expected @@ -19,21 +19,22 @@ extend type User { } ==================================== OUTPUT =================================== fragment Foo_user on Node { - ... on User @__FragmentAliasMetadata + ... on User @alias(as: "aliased_fragment") @__FragmentAliasMetadata # FragmentAliasMetadata { # alias: WithLocation { - # location: relay-resolver-within-named-inline-fragment.graphql:49:51, + # location: relay-resolver-within-named-inline-fragment.graphql:53:71, # item: "aliased_fragment", # }, # type_condition: Some( - # Object(69), + # Object(70), # ), - # selection_type: Object(69), + # non_nullable: false, + # selection_type: Object(70), # } { ...PopStarNameResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "PopStarNameResolver", # import_name: None, # field_alias: None, diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver.expected b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver.expected index 5feb036641d31..4de12b6bcee12 100644 --- a/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver.expected +++ b/compiler/crates/relay-transforms/tests/relay_resolvers/fixtures/relay-resolver.expected @@ -22,7 +22,7 @@ extend type User { fragment Foo_user on User { ...PopStarNameResolverFragment_name @__RelayResolverMetadata # RelayResolverMetadata { - # field_id: FieldID(517), + # field_id: FieldID(526), # import_path: "PopStarNameResolver", # import_name: None, # field_alias: None, diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers/mod.rs b/compiler/crates/relay-transforms/tests/relay_resolvers/mod.rs deleted file mode 100644 index ca33b4aa9d9c6..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_resolvers/mod.rs +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::Diagnostic; -use common::FeatureFlag; -use common::SourceLocationKey; -use common::TextSource; -use fixture_tests::Fixture; -use graphql_cli::DiagnosticPrinter; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_text_printer::print_fragment; -use graphql_text_printer::print_operation; -use graphql_text_printer::PrinterOptions; -use relay_test_schema::get_test_schema_with_located_extensions; -use relay_transforms::fragment_alias_directive; -use relay_transforms::relay_resolvers; -use relay_transforms::validate_resolver_fragments; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - if let [base, extensions] = parts.as_slice() { - let graphql_location = SourceLocationKey::embedded(fixture.file_name, 0); - let extension_location = SourceLocationKey::embedded(fixture.file_name, 1); - - let ast = parse_executable(base, graphql_location).unwrap(); - let schema = get_test_schema_with_located_extensions(extensions, extension_location); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - - validate_resolver_fragments(&program) - .map_err(|diagnostics| diagnostics_to_sorted_string(base, extensions, &diagnostics))?; - - // Run `fragment_alias_directive` first because we want to ensure we - // correctly generate paths for named inline fragment spreads. - let next_program = fragment_alias_directive(&program, &FeatureFlag::Enabled) - .and_then(|program| relay_resolvers(&program, true)) - .map_err(|diagnostics| diagnostics_to_sorted_string(base, extensions, &diagnostics))?; - - let printer_options = PrinterOptions { - debug_directive_data: true, - ..Default::default() - }; - let mut printed = next_program - .operations() - .map(|def| print_operation(&schema, def, printer_options.clone())) - .chain( - next_program - .fragments() - .map(|def| print_fragment(&schema, def, printer_options.clone())), - ) - .collect::>(); - printed.sort(); - - Ok(printed.join("\n\n")) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} - -pub fn diagnostics_to_sorted_string( - source: &str, - extensions: &str, - diagnostics: &[Diagnostic], -) -> String { - let printer = DiagnosticPrinter::new(|source_location| match source_location { - SourceLocationKey::Embedded { index, .. } => { - Some(TextSource::from_whole_document(match index { - 0 => source, - 1 => extensions, - _ => panic!("Expected index to be 0 or 1"), - })) - } - SourceLocationKey::Standalone { .. } => None, - SourceLocationKey::Generated => None, - }); - let mut printed = diagnostics - .iter() - .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) - .collect::>(); - printed.sort(); - printed.join("\n\n") -} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types.rs b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types.rs new file mode 100644 index 0000000000000..23ae0f4957e84 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types.rs @@ -0,0 +1,69 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::FeatureFlag; +use common::FeatureFlags; +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use graphql_text_printer::print_fragment; +use graphql_text_printer::print_operation; +use graphql_text_printer::PrinterOptions; +use relay_config::ProjectName; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::relay_resolvers; +use relay_transforms::relay_resolvers_abstract_types; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + if let [base, extensions] = parts.as_slice() { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(base, source_location).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + let relay_resolver_enable_interface_output_type = if fixture + .content + .contains("# relay-resolver-enable-interface-output-type") + { + FeatureFlag::Enabled + } else { + FeatureFlag::Disabled + }; + let feature_flags = FeatureFlags { + relay_resolver_enable_interface_output_type, + ..Default::default() + }; + let program_pass_one = relay_resolvers_abstract_types(&program, &feature_flags) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let program_pass_two = relay_resolvers(ProjectName::default(), &program_pass_one, true) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + let printer_options = PrinterOptions { + debug_directive_data: true, + ..Default::default() + }; + let mut printed = program_pass_two + .operations() + .map(|def| print_operation(&schema, def, printer_options.clone())) + .chain( + program_pass_two + .fragments() + .map(|def| print_fragment(&schema, def, printer_options.clone())), + ) + .collect::>(); + printed.sort(); + + Ok(printed.join("\n\n")) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/client_field_on_abstract_type_without_resolver.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/client_field_on_abstract_type_without_resolver.expected new file mode 100644 index 0000000000000..4e6d8658174da --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/client_field_on_abstract_type_without_resolver.expected @@ -0,0 +1,41 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +query clientFieldOnAbstractTypeWithoutResolver { + cat { + ... on Tabby { + parent { + description + } + } + } +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String + parent: Cat +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +query clientFieldOnAbstractTypeWithoutResolver { + cat { + ... on Tabby { + parent { + description + } + } + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/client_field_on_abstract_type_without_resolver.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/client_field_on_abstract_type_without_resolver.graphql new file mode 100644 index 0000000000000..9bd06766944a0 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/client_field_on_abstract_type_without_resolver.graphql @@ -0,0 +1,30 @@ +# relay-resolver-enable-interface-output-type + +query clientFieldOnAbstractTypeWithoutResolver { + cat { + ... on Tabby { + parent { + description + } + } + } +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String + parent: Cat +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/condition_on_inline_fragment_without_type_on_interface.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/condition_on_inline_fragment_without_type_on_interface.expected new file mode 100644 index 0000000000000..29eeda975b497 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/condition_on_inline_fragment_without_type_on_interface.expected @@ -0,0 +1,63 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment conditionOnInlineFragmentWithoutTypeOnInterfaceFragment on Cat { + ... @include(if: $has_owner) { + name + description + } +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + name: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian implements Cat { + description: String + name: String +} +==================================== OUTPUT =================================== +fragment conditionOnInlineFragmentWithoutTypeOnInterfaceFragment on Cat { + ... @include(if: $has_owner) { + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(529), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "name", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(528), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + name + description + } + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/condition_on_inline_fragment_without_type_on_interface.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/condition_on_inline_fragment_without_type_on_interface.graphql new file mode 100644 index 0000000000000..332ffb4a59127 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/condition_on_inline_fragment_without_type_on_interface.graphql @@ -0,0 +1,25 @@ +# relay-resolver-enable-interface-output-type + +fragment conditionOnInlineFragmentWithoutTypeOnInterfaceFragment on Cat { + ... @include(if: $has_owner) { + name + description + } +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + name: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian implements Cat { + description: String + name: String +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/condition_on_selection_on_interface_without_resolver.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/condition_on_selection_on_interface_without_resolver.expected new file mode 100644 index 0000000000000..49678734ab33f --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/condition_on_selection_on_interface_without_resolver.expected @@ -0,0 +1,33 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment conditionOnSelectionOnInterfaceWithoutResolverFragment on Cat { + description @include(if: $has_owner) + name +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String + name: String +} + +type Persian implements Cat { + description: String + name: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +fragment conditionOnSelectionOnInterfaceWithoutResolverFragment on Cat { + description @include(if: $has_owner) + name +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/condition_on_selection_on_interface_without_resolver.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/condition_on_selection_on_interface_without_resolver.graphql new file mode 100644 index 0000000000000..97ee79ce8456f --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/condition_on_selection_on_interface_without_resolver.graphql @@ -0,0 +1,27 @@ +# relay-resolver-enable-interface-output-type + +fragment conditionOnSelectionOnInterfaceWithoutResolverFragment on Cat { + description @include(if: $has_owner) + name +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String + name: String +} + +type Persian implements Cat { + description: String + name: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/conditions_on_nested_selections_on_interface.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/conditions_on_nested_selections_on_interface.expected new file mode 100644 index 0000000000000..354d2786e52e0 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/conditions_on_nested_selections_on_interface.expected @@ -0,0 +1,84 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment conditionsOnNestedSelectionsOnInterfaceFragment on Cat { + name @include(if: $has_owner) + description @include(if: $has_owner) + favorite_toy { + brand @include(if: $happy_cat) + } +} + +# %extensions% + +interface Cat { + description: String + favorite_toy: Toy + name: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + favorite_toy: Toy + name: String +} + +type Persian implements Cat { + description: String + favorite_toy: Toy + name: String +} + +interface Toy { + brand: String +} + +type Yarn implements Toy { + brand: String @relay_resolver(import_path: "YarnResolver") +} + +type Ball implements Toy { + brand: String +} +==================================== OUTPUT =================================== +fragment conditionsOnNestedSelectionsOnInterfaceFragment on Cat { + name @include(if: $has_owner) + ... on Tabby @include(if: $has_owner) { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(529), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian @include(if: $has_owner) { + description + } + favorite_toy { + ... on Yarn @include(if: $happy_cat) { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(536), + # import_path: "YarnResolver", + # import_name: None, + # field_alias: None, + # field_path: "favorite_toy.brand", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Ball @include(if: $happy_cat) { + brand + } + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/conditions_on_nested_selections_on_interface.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/conditions_on_nested_selections_on_interface.graphql new file mode 100644 index 0000000000000..a65e2a63b5f83 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/conditions_on_nested_selections_on_interface.graphql @@ -0,0 +1,41 @@ +# relay-resolver-enable-interface-output-type + +fragment conditionsOnNestedSelectionsOnInterfaceFragment on Cat { + name @include(if: $has_owner) + description @include(if: $has_owner) + favorite_toy { + brand @include(if: $happy_cat) + } +} + +# %extensions% + +interface Cat { + description: String + favorite_toy: Toy + name: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + favorite_toy: Toy + name: String +} + +type Persian implements Cat { + description: String + favorite_toy: Toy + name: String +} + +interface Toy { + brand: String +} + +type Yarn implements Toy { + brand: String @relay_resolver(import_path: "YarnResolver") +} + +type Ball implements Toy { + brand: String +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/conditions_on_selections_on_interface.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/conditions_on_selections_on_interface.expected new file mode 100644 index 0000000000000..1739e4b89a9ca --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/conditions_on_selections_on_interface.expected @@ -0,0 +1,67 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment conditionsOnSelectionsOnInterfaceFragment on Cat { + description @include(if: $has_owner) + name +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + name: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian implements Cat { + description: String + name: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +fragment conditionsOnSelectionsOnInterfaceFragment on Cat { + ... on Tabby @include(if: $has_owner) { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(528), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian @include(if: $has_owner) { + description + } + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(529), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "name", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + name + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/conditions_on_selections_on_interface.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/conditions_on_selections_on_interface.graphql new file mode 100644 index 0000000000000..71c27bfbea887 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/conditions_on_selections_on_interface.graphql @@ -0,0 +1,27 @@ +# relay-resolver-enable-interface-output-type + +fragment conditionsOnSelectionsOnInterfaceFragment on Cat { + description @include(if: $has_owner) + name +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + name: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian implements Cat { + description: String + name: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type.expected new file mode 100644 index 0000000000000..72ad482199960 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type.expected @@ -0,0 +1,49 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +query edgeToAbstractTypeQuery { + cat { + description + } +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +query edgeToAbstractTypeQuery { + cat { + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(527), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "cat.description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type.graphql new file mode 100644 index 0000000000000..4e7782b9e4796 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type.graphql @@ -0,0 +1,25 @@ +# relay-resolver-enable-interface-output-type + +query edgeToAbstractTypeQuery { + cat { + description + } +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_disabled.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_disabled.expected new file mode 100644 index 0000000000000..edcfa2d2f3535 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_disabled.expected @@ -0,0 +1,30 @@ +==================================== INPUT ==================================== +query edgeToAbstractTypeDisabledQuery { + cat { + description + } +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +query edgeToAbstractTypeDisabledQuery { + cat { + description + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_disabled.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_disabled.graphql new file mode 100644 index 0000000000000..a356db6ff4156 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_disabled.graphql @@ -0,0 +1,23 @@ +query edgeToAbstractTypeDisabledQuery { + cat { + description + } +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment.expected new file mode 100644 index 0000000000000..e8678b239fcf8 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment.expected @@ -0,0 +1,65 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +query edgeToAbstractTypeWithInlineFragmentQuery { + cat { + ... { + name + description + } + ... on Tabby { + name + } + } +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + name: String + parent: Cat +} + +type Persian implements Cat { + description: String + name: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +query edgeToAbstractTypeWithInlineFragmentQuery { + cat { + ... { + name + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(528), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "cat.description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } + } + ... on Tabby { + name + } + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment.graphql new file mode 100644 index 0000000000000..133a907bb25cb --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment.graphql @@ -0,0 +1,35 @@ +# relay-resolver-enable-interface-output-type + +query edgeToAbstractTypeWithInlineFragmentQuery { + cat { + ... { + name + description + } + ... on Tabby { + name + } + } +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + name: String + parent: Cat +} + +type Persian implements Cat { + description: String + name: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment_on_abstract_type.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment_on_abstract_type.expected new file mode 100644 index 0000000000000..77de2e2b761e2 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment_on_abstract_type.expected @@ -0,0 +1,58 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +query edgeToAbstractTypeWithInlineFragmentOnAbstractTypeQuery { + cat { + name + ... on Cat { + description + } + } +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + name: String +} + +type Persian implements Cat { + description: String + name: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +query edgeToAbstractTypeWithInlineFragmentOnAbstractTypeQuery { + cat { + name + ... on Cat { + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(528), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "cat.description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } + } + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment_on_abstract_type.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment_on_abstract_type.graphql new file mode 100644 index 0000000000000..d5d36cf8618bb --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment_on_abstract_type.graphql @@ -0,0 +1,31 @@ +# relay-resolver-enable-interface-output-type + +query edgeToAbstractTypeWithInlineFragmentOnAbstractTypeQuery { + cat { + name + ... on Cat { + description + } + } +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + name: String +} + +type Persian implements Cat { + description: String + name: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/extend_server_defined_concrete_type.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/extend_server_defined_concrete_type.expected new file mode 100644 index 0000000000000..cb8704c8f77a5 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/extend_server_defined_concrete_type.expected @@ -0,0 +1,34 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment extendServerDefinedConcreteTypeFragment on FeedUnit { + tracking + ... on PhotoStory { + description + } +} + +# %extensions% + +extend type PhotoStory { + description: String @relay_resolver(import_path: "TabbyResolver") +} +==================================== OUTPUT =================================== +fragment extendServerDefinedConcreteTypeFragment on FeedUnit { + tracking + ... on PhotoStory { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(526), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/extend_server_defined_concrete_type.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/extend_server_defined_concrete_type.graphql new file mode 100644 index 0000000000000..d6b631818ec1c --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/extend_server_defined_concrete_type.graphql @@ -0,0 +1,14 @@ +# relay-resolver-enable-interface-output-type + +fragment extendServerDefinedConcreteTypeFragment on FeedUnit { + tracking + ... on PhotoStory { + description + } +} + +# %extensions% + +extend type PhotoStory { + description: String @relay_resolver(import_path: "TabbyResolver") +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_disabled.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_disabled.expected new file mode 100644 index 0000000000000..ff927e92ff09b --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_disabled.expected @@ -0,0 +1,22 @@ +==================================== INPUT ==================================== +fragment fragmentOnAbstractTypeDisabledFragment on Cat { + description +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian implements Cat { + description: String +} +==================================== OUTPUT =================================== +fragment fragmentOnAbstractTypeDisabledFragment on Cat { + description +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_disabled.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_disabled.graphql new file mode 100644 index 0000000000000..95d86527bf216 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_disabled.graphql @@ -0,0 +1,17 @@ +fragment fragmentOnAbstractTypeDisabledFragment on Cat { + description +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian implements Cat { + description: String +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_enabled.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_enabled.expected new file mode 100644 index 0000000000000..9698f4b8a03ae --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_enabled.expected @@ -0,0 +1,55 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment fragmentOnAbstractTypeEnabledFragment on Cat { + description +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian implements Cat { + description: String +} + +type Siberian implements Cat { + description: String +} + +type Aegean implements Cat { + description: String +} +==================================== OUTPUT =================================== +fragment fragmentOnAbstractTypeEnabledFragment on Cat { + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(527), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } + ... on Siberian { + description + } + ... on Aegean { + description + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_enabled.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_enabled.graphql new file mode 100644 index 0000000000000..9005f20de28c0 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_enabled.graphql @@ -0,0 +1,27 @@ +# relay-resolver-enable-interface-output-type + +fragment fragmentOnAbstractTypeEnabledFragment on Cat { + description +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian implements Cat { + description: String +} + +type Siberian implements Cat { + description: String +} + +type Aegean implements Cat { + description: String +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/inline_fragment_no_type_without_resolver_selections_on_interface.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/inline_fragment_no_type_without_resolver_selections_on_interface.expected new file mode 100644 index 0000000000000..98e3b2442d9d2 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/inline_fragment_no_type_without_resolver_selections_on_interface.expected @@ -0,0 +1,37 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment inlineFragmentNoTypeWithoutResolverSelectionsOnInterfaceFragment on Cat { + ... { + description + name + } +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String + name: String +} + +type Persian implements Cat { + description: String + name: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +fragment inlineFragmentNoTypeWithoutResolverSelectionsOnInterfaceFragment on Cat { + ... { + description + name + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/inline_fragment_no_type_without_resolver_selections_on_interface.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/inline_fragment_no_type_without_resolver_selections_on_interface.graphql new file mode 100644 index 0000000000000..604d5a327ad58 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/inline_fragment_no_type_without_resolver_selections_on_interface.graphql @@ -0,0 +1,29 @@ +# relay-resolver-enable-interface-output-type + +fragment inlineFragmentNoTypeWithoutResolverSelectionsOnInterfaceFragment on Cat { + ... { + description + name + } +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String + name: String +} + +type Persian implements Cat { + description: String + name: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/inline_fragment_without_type_condition_on_interface.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/inline_fragment_without_type_condition_on_interface.expected new file mode 100644 index 0000000000000..a70509c3c835f --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/inline_fragment_without_type_condition_on_interface.expected @@ -0,0 +1,54 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment inlineFragmentWithoutTypeConditionOnInterfaceFragment on Cat { + ... { + description + name + } +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + name: String +} + +type Persian implements Cat { + description: String + name: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +fragment inlineFragmentWithoutTypeConditionOnInterfaceFragment on Cat { + ... { + name + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(528), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/inline_fragment_without_type_condition_on_interface.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/inline_fragment_without_type_condition_on_interface.graphql new file mode 100644 index 0000000000000..1b961ea562773 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/inline_fragment_without_type_condition_on_interface.graphql @@ -0,0 +1,29 @@ +# relay-resolver-enable-interface-output-type + +fragment inlineFragmentWithoutTypeConditionOnInterfaceFragment on Cat { + ... { + description + name + } +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + name: String +} + +type Persian implements Cat { + description: String + name: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_fragment.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_fragment.expected new file mode 100644 index 0000000000000..bb318c835958a --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_fragment.expected @@ -0,0 +1,54 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment nestedAbstractTypeFragment on Cat { + ... on Tabby { + parent { + description + } + } +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + parent: Cat +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +fragment nestedAbstractTypeFragment on Cat { + ... on Tabby { + parent { + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(527), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "parent.description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } + } + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_fragment.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_fragment.graphql new file mode 100644 index 0000000000000..41025b023c024 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_fragment.graphql @@ -0,0 +1,28 @@ +# relay-resolver-enable-interface-output-type + +fragment nestedAbstractTypeFragment on Cat { + ... on Tabby { + parent { + description + } + } +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + parent: Cat +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_query.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_query.expected new file mode 100644 index 0000000000000..c2fb713742ece --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_query.expected @@ -0,0 +1,58 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +query nestedAbstractTypeQuery { + cat { + ... on Tabby { + parent { + description + } + } + } +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + parent: Cat +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +query nestedAbstractTypeQuery { + cat { + ... on Tabby { + parent { + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(527), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "cat.parent.description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } + } + } + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_query.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_query.graphql new file mode 100644 index 0000000000000..6143eb323d6a0 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_query.graphql @@ -0,0 +1,30 @@ +# relay-resolver-enable-interface-output-type + +query nestedAbstractTypeQuery { + cat { + ... on Tabby { + parent { + description + } + } + } +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + parent: Cat +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_selection_on_inline_fragment_without_type.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_selection_on_inline_fragment_without_type.expected new file mode 100644 index 0000000000000..c420e7a983dfe --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_selection_on_inline_fragment_without_type.expected @@ -0,0 +1,77 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment nestedAbstractTypeSelectionOnInlineFragmentWithoutTypeFragment on Cat { + ... { + description + ... on Tabby { + parent { + description + } + } + } +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + parent: Cat +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +fragment nestedAbstractTypeSelectionOnInlineFragmentWithoutTypeFragment on Cat { + ... { + ... on Tabby { + parent { + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(527), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "parent.description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } + } + } + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(527), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_selection_on_inline_fragment_without_type.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_selection_on_inline_fragment_without_type.graphql new file mode 100644 index 0000000000000..7b514f36dd4ff --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_abstract_type_selection_on_inline_fragment_without_type.graphql @@ -0,0 +1,31 @@ +# relay-resolver-enable-interface-output-type + +fragment nestedAbstractTypeSelectionOnInlineFragmentWithoutTypeFragment on Cat { + ... { + description + ... on Tabby { + parent { + description + } + } + } +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + parent: Cat +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_condition_on_inline_fragment_on_interface.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_condition_on_inline_fragment_on_interface.expected new file mode 100644 index 0000000000000..87275c184602b --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_condition_on_inline_fragment_on_interface.expected @@ -0,0 +1,54 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment nestedConditionOnInlineFragmentOnInterfaceFragment on Cat { + ... @include(if: $has_owner) { + description + name @include(if: $has_name) + } +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + name: String +} + +type Persian implements Cat { + description: String + name: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +fragment nestedConditionOnInlineFragmentOnInterfaceFragment on Cat { + ... @include(if: $has_owner) { + name @include(if: $has_name) + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(528), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_condition_on_inline_fragment_on_interface.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_condition_on_inline_fragment_on_interface.graphql new file mode 100644 index 0000000000000..57b289d864e7a --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_condition_on_inline_fragment_on_interface.graphql @@ -0,0 +1,29 @@ +# relay-resolver-enable-interface-output-type + +fragment nestedConditionOnInlineFragmentOnInterfaceFragment on Cat { + ... @include(if: $has_owner) { + description + name @include(if: $has_name) + } +} + +# %extensions% + +interface Cat { + description: String + name: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + name: String +} + +type Persian implements Cat { + description: String + name: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_fragment_spread_on_abstract_type.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_fragment_spread_on_abstract_type.expected new file mode 100644 index 0000000000000..36476d4ce1115 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_fragment_spread_on_abstract_type.expected @@ -0,0 +1,85 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +query nestedFragmentSpreadOnAbstractTypeQuery { + cat { + ... on Tabby { + parent { + ...nestedFragmentSpreadOnAbstractTypeFragment + } + } + description + } +} + +fragment nestedFragmentSpreadOnAbstractTypeFragment on Cat { + description +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + parent: Cat +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +fragment nestedFragmentSpreadOnAbstractTypeFragment on Cat { + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(527), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } +} + +query nestedFragmentSpreadOnAbstractTypeQuery { + cat { + ... on Tabby { + parent { + ...nestedFragmentSpreadOnAbstractTypeFragment + } + } + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(527), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "cat.description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_fragment_spread_on_abstract_type.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_fragment_spread_on_abstract_type.graphql new file mode 100644 index 0000000000000..38d74c577ec43 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/nested_fragment_spread_on_abstract_type.graphql @@ -0,0 +1,35 @@ +# relay-resolver-enable-interface-output-type + +query nestedFragmentSpreadOnAbstractTypeQuery { + cat { + ... on Tabby { + parent { + ...nestedFragmentSpreadOnAbstractTypeFragment + } + } + description + } +} + +fragment nestedFragmentSpreadOnAbstractTypeFragment on Cat { + description +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + parent: Cat +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/plural_fragment_on_abstract_type.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/plural_fragment_on_abstract_type.expected new file mode 100644 index 0000000000000..0a60e7e0046ec --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/plural_fragment_on_abstract_type.expected @@ -0,0 +1,46 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment pluralFragmentOnAbstractTypeFragment on Cat @relay(plural: true) { + description +} + +# %extensions% + +interface Cat { + id: ID! + description: String +} + +type Tabby implements Cat @__RelayResolverModel { + id: ID! + description: String @relay_resolver(import_path: "TabbyResolver") + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "TabbyResolver", fragment_name: "Tabby__id", inject_fragment_data: "id") +} + +type Persian implements Cat @__RelayResolverModel { + id: ID! + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "PersianResolver", fragment_name: "Persian__id", inject_fragment_data: "id") +} +==================================== OUTPUT =================================== +fragment pluralFragmentOnAbstractTypeFragment on Cat @relay(plural: true) { + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(529), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/plural_fragment_on_abstract_type.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/plural_fragment_on_abstract_type.graphql new file mode 100644 index 0000000000000..208c8029f0732 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/plural_fragment_on_abstract_type.graphql @@ -0,0 +1,24 @@ +# relay-resolver-enable-interface-output-type + +fragment pluralFragmentOnAbstractTypeFragment on Cat @relay(plural: true) { + description +} + +# %extensions% + +interface Cat { + id: ID! + description: String +} + +type Tabby implements Cat @__RelayResolverModel { + id: ID! + description: String @relay_resolver(import_path: "TabbyResolver") + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "TabbyResolver", fragment_name: "Tabby__id", inject_fragment_data: "id") +} + +type Persian implements Cat @__RelayResolverModel { + id: ID! + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "PersianResolver", fragment_name: "Persian__id", inject_fragment_data: "id") +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/resolver_field_on_client_interface.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/resolver_field_on_client_interface.expected new file mode 100644 index 0000000000000..8ffc7d3112f07 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/resolver_field_on_client_interface.expected @@ -0,0 +1,36 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment resolverFieldOnClientInterfaceFragment on Cat { + description +} + +# %extensions% + +interface Cat { + description: String @relay_resolver(import_path: "CatResolver") +} + +type Tabby implements Cat { + description: String +} + +type Persian implements Cat { + description: String +} +==================================== OUTPUT =================================== +fragment resolverFieldOnClientInterfaceFragment on Cat { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(526), + # import_path: "CatResolver", + # import_name: None, + # field_alias: None, + # field_path: "description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/resolver_field_on_client_interface.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/resolver_field_on_client_interface.graphql new file mode 100644 index 0000000000000..0c25c706b0d17 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/resolver_field_on_client_interface.graphql @@ -0,0 +1,19 @@ +# relay-resolver-enable-interface-output-type + +fragment resolverFieldOnClientInterfaceFragment on Cat { + description +} + +# %extensions% + +interface Cat { + description: String @relay_resolver(import_path: "CatResolver") +} + +type Tabby implements Cat { + description: String +} + +type Persian implements Cat { + description: String +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/resolver_field_on_client_type_implementing_server_interface.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/resolver_field_on_client_type_implementing_server_interface.expected new file mode 100644 index 0000000000000..7520e2df93b82 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/resolver_field_on_client_type_implementing_server_interface.expected @@ -0,0 +1,46 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment resolverFieldOnClientTypeImplementingServerInterfaceFragment on FeedUnit { + tracking +} + +# %extensions% + +type NewsStory implements FeedUnit { + # Interface fields + actor: Actor + actorCount: Int + feedback: Feedback + id: ID! + message: Text + tracking: String @relay_resolver(import_path: "TrackingResolver", live: true) + actor_key: ID! +} +==================================== OUTPUT =================================== +fragment resolverFieldOnClientTypeImplementingServerInterfaceFragment on FeedUnit { + ... on NonNodeStory { + tracking + } + ... on PhotoStory { + tracking + } + ... on Story { + tracking + } + ... on NewsStory { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(531), + # import_path: "TrackingResolver", + # import_name: None, + # field_alias: None, + # field_path: "tracking", + # field_arguments: [], + # live: true, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/resolver_field_on_client_type_implementing_server_interface.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/resolver_field_on_client_type_implementing_server_interface.graphql new file mode 100644 index 0000000000000..3ce26bb6c03da --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/resolver_field_on_client_type_implementing_server_interface.graphql @@ -0,0 +1,18 @@ +# relay-resolver-enable-interface-output-type + +fragment resolverFieldOnClientTypeImplementingServerInterfaceFragment on FeedUnit { + tracking +} + +# %extensions% + +type NewsStory implements FeedUnit { + # Interface fields + actor: Actor + actorCount: Int + feedback: Feedback + id: ID! + message: Text + tracking: String @relay_resolver(import_path: "TrackingResolver", live: true) + actor_key: ID! +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/selections_on_node.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/selections_on_node.expected new file mode 100644 index 0000000000000..d77a43e9c236f --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/selections_on_node.expected @@ -0,0 +1,16 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +query selectionsOnNodeQuery { + node(id: 4) { + __typename + } +} + +# %extensions% +==================================== OUTPUT =================================== +query selectionsOnNodeQuery { + node(id: 4) { + __typename + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/selections_on_node.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/selections_on_node.graphql new file mode 100644 index 0000000000000..4be723c40abb4 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/selections_on_node.graphql @@ -0,0 +1,9 @@ +# relay-resolver-enable-interface-output-type + +query selectionsOnNodeQuery { + node(id: 4) { + __typename + } +} + +# %extensions% diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/selections_on_node_with_client_concrete_type.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/selections_on_node_with_client_concrete_type.expected new file mode 100644 index 0000000000000..c77bec40739f7 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/selections_on_node_with_client_concrete_type.expected @@ -0,0 +1,22 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +query selectionsOnNodeQuery { + node(id: 4) { + __typename + id + } +} + +# %extensions% + +type ClientType implements Node { + id: ID! +} +==================================== OUTPUT =================================== +query selectionsOnNodeQuery { + node(id: 4) { + __typename + id + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/selections_on_node_with_client_concrete_type.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/selections_on_node_with_client_concrete_type.graphql new file mode 100644 index 0000000000000..77da961b48847 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/selections_on_node_with_client_concrete_type.graphql @@ -0,0 +1,14 @@ +# relay-resolver-enable-interface-output-type + +query selectionsOnNodeQuery { + node(id: 4) { + __typename + id + } +} + +# %extensions% + +type ClientType implements Node { + id: ID! +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/server_field_on_abstract_type.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/server_field_on_abstract_type.expected new file mode 100644 index 0000000000000..6e33de986f08f --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/server_field_on_abstract_type.expected @@ -0,0 +1,34 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment serverFieldOnAbstractTypeFragment on FeedUnit { # abstract + actor { # abstract + birthdate { # concrete + day + month + year + } + ... on Page { + body { + text + } + } + } +} + +# %extensions% +==================================== OUTPUT =================================== +fragment serverFieldOnAbstractTypeFragment on FeedUnit { + actor { + birthdate { + day + month + year + } + ... on Page { + body { + text + } + } + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/server_field_on_abstract_type.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/server_field_on_abstract_type.graphql new file mode 100644 index 0000000000000..920bd496853cd --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/server_field_on_abstract_type.graphql @@ -0,0 +1,18 @@ +# relay-resolver-enable-interface-output-type + +fragment serverFieldOnAbstractTypeFragment on FeedUnit { # abstract + actor { # abstract + birthdate { # concrete + day + month + year + } + ... on Page { + body { + text + } + } + } +} + +# %extensions% diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/spread_fragment_into_interface_on_concrete_type.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/spread_fragment_into_interface_on_concrete_type.expected new file mode 100644 index 0000000000000..864a2425dd786 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/spread_fragment_into_interface_on_concrete_type.expected @@ -0,0 +1,60 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment spreadFragmentIntoInterfaceOnConcreteTypeFragment on Cat { + description +} + +query Query { + tabby { + ...spreadFragmentIntoInterfaceOnConcreteTypeFragment + is_orange + } +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + is_orange: Boolean +} + +type Persian implements Cat { + description: String +} + +extend type Query { + tabby: Tabby +} +==================================== OUTPUT =================================== +fragment spreadFragmentIntoInterfaceOnConcreteTypeFragment on Cat { + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(527), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } +} + +query Query { + tabby { + ...spreadFragmentIntoInterfaceOnConcreteTypeFragment + is_orange + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/spread_fragment_into_interface_on_concrete_type.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/spread_fragment_into_interface_on_concrete_type.graphql new file mode 100644 index 0000000000000..d0e4979584b01 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/spread_fragment_into_interface_on_concrete_type.graphql @@ -0,0 +1,31 @@ +# relay-resolver-enable-interface-output-type + +fragment spreadFragmentIntoInterfaceOnConcreteTypeFragment on Cat { + description +} + +query Query { + tabby { + ...spreadFragmentIntoInterfaceOnConcreteTypeFragment + is_orange + } +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") + is_orange: Boolean +} + +type Persian implements Cat { + description: String +} + +extend type Query { + tabby: Tabby +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/spread_fragment_on_abstract_type.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/spread_fragment_on_abstract_type.expected new file mode 100644 index 0000000000000..2286565ac744d --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/spread_fragment_on_abstract_type.expected @@ -0,0 +1,57 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +query spreadFragmentOnAbstractTypeQuery { + cat { + ...spreadFragmentOnAbstractTypeFragment + } +} + +fragment spreadFragmentOnAbstractTypeFragment on Cat { + description +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} +==================================== OUTPUT =================================== +fragment spreadFragmentOnAbstractTypeFragment on Cat { + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(527), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } +} + +query spreadFragmentOnAbstractTypeQuery { + cat { + ...spreadFragmentOnAbstractTypeFragment + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/spread_fragment_on_abstract_type.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/spread_fragment_on_abstract_type.graphql new file mode 100644 index 0000000000000..b6cac926d77b5 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/spread_fragment_on_abstract_type.graphql @@ -0,0 +1,29 @@ +# relay-resolver-enable-interface-output-type + +query spreadFragmentOnAbstractTypeQuery { + cat { + ...spreadFragmentOnAbstractTypeFragment + } +} + +fragment spreadFragmentOnAbstractTypeFragment on Cat { + description +} + +# %extensions% + +interface Cat { + description: String +} + +type Tabby implements Cat { + description: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian implements Cat { + description: String +} + +extend type Query { + cat: Cat +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/union_types_are_skipped.expected b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/union_types_are_skipped.expected new file mode 100644 index 0000000000000..2c7e509942f9d --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/union_types_are_skipped.expected @@ -0,0 +1,44 @@ +==================================== INPUT ==================================== +# relay-resolver-enable-interface-output-type + +fragment spreadFragmentOnAbstractTypeFragment on Cat { + ... on Tabby { + description + } + ... on Persian { + description + } +} + +# %extensions% + +union Cat = Tabby | Persian + +type Tabby { + description: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian { + description: String +} +==================================== OUTPUT =================================== +fragment spreadFragmentOnAbstractTypeFragment on Cat { + ... on Tabby { + __id @__RelayResolverMetadata + # RelayResolverMetadata { + # field_id: FieldID(526), + # import_path: "TabbyResolver", + # import_name: None, + # field_alias: None, + # field_path: "description", + # field_arguments: [], + # live: false, + # output_type_info: Legacy, + # fragment_data_injection_mode: None, + # } + + } + ... on Persian { + description + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/union_types_are_skipped.graphql b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/union_types_are_skipped.graphql new file mode 100644 index 0000000000000..5c1a4b033e03b --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types/fixtures/union_types_are_skipped.graphql @@ -0,0 +1,22 @@ +# relay-resolver-enable-interface-output-type + +fragment spreadFragmentOnAbstractTypeFragment on Cat { + ... on Tabby { + description + } + ... on Persian { + description + } +} + +# %extensions% + +union Cat = Tabby | Persian + +type Tabby { + description: String @relay_resolver(import_path: "TabbyResolver") +} + +type Persian { + description: String +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types_test.rs b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types_test.rs new file mode 100644 index 0000000000000..775bd7f576af8 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_abstract_types_test.rs @@ -0,0 +1,209 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @generated SignedSource<> + */ + +mod relay_resolvers_abstract_types; + +use relay_resolvers_abstract_types::transform_fixture; +use fixture_tests::test_fixture; + +#[tokio::test] +async fn client_field_on_abstract_type_without_resolver() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/client_field_on_abstract_type_without_resolver.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/client_field_on_abstract_type_without_resolver.expected"); + test_fixture(transform_fixture, file!(), "client_field_on_abstract_type_without_resolver.graphql", "relay_resolvers_abstract_types/fixtures/client_field_on_abstract_type_without_resolver.expected", input, expected).await; +} + +#[tokio::test] +async fn condition_on_inline_fragment_without_type_on_interface() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/condition_on_inline_fragment_without_type_on_interface.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/condition_on_inline_fragment_without_type_on_interface.expected"); + test_fixture(transform_fixture, file!(), "condition_on_inline_fragment_without_type_on_interface.graphql", "relay_resolvers_abstract_types/fixtures/condition_on_inline_fragment_without_type_on_interface.expected", input, expected).await; +} + +#[tokio::test] +async fn condition_on_selection_on_interface_without_resolver() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/condition_on_selection_on_interface_without_resolver.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/condition_on_selection_on_interface_without_resolver.expected"); + test_fixture(transform_fixture, file!(), "condition_on_selection_on_interface_without_resolver.graphql", "relay_resolvers_abstract_types/fixtures/condition_on_selection_on_interface_without_resolver.expected", input, expected).await; +} + +#[tokio::test] +async fn conditions_on_nested_selections_on_interface() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/conditions_on_nested_selections_on_interface.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/conditions_on_nested_selections_on_interface.expected"); + test_fixture(transform_fixture, file!(), "conditions_on_nested_selections_on_interface.graphql", "relay_resolvers_abstract_types/fixtures/conditions_on_nested_selections_on_interface.expected", input, expected).await; +} + +#[tokio::test] +async fn conditions_on_selections_on_interface() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/conditions_on_selections_on_interface.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/conditions_on_selections_on_interface.expected"); + test_fixture(transform_fixture, file!(), "conditions_on_selections_on_interface.graphql", "relay_resolvers_abstract_types/fixtures/conditions_on_selections_on_interface.expected", input, expected).await; +} + +#[tokio::test] +async fn edge_to_abstract_type() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/edge_to_abstract_type.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/edge_to_abstract_type.expected"); + test_fixture(transform_fixture, file!(), "edge_to_abstract_type.graphql", "relay_resolvers_abstract_types/fixtures/edge_to_abstract_type.expected", input, expected).await; +} + +#[tokio::test] +async fn edge_to_abstract_type_disabled() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_disabled.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_disabled.expected"); + test_fixture(transform_fixture, file!(), "edge_to_abstract_type_disabled.graphql", "relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_disabled.expected", input, expected).await; +} + +#[tokio::test] +async fn edge_to_abstract_type_with_inline_fragment() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment.expected"); + test_fixture(transform_fixture, file!(), "edge_to_abstract_type_with_inline_fragment.graphql", "relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn edge_to_abstract_type_with_inline_fragment_on_abstract_type() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment_on_abstract_type.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment_on_abstract_type.expected"); + test_fixture(transform_fixture, file!(), "edge_to_abstract_type_with_inline_fragment_on_abstract_type.graphql", "relay_resolvers_abstract_types/fixtures/edge_to_abstract_type_with_inline_fragment_on_abstract_type.expected", input, expected).await; +} + +#[tokio::test] +async fn extend_server_defined_concrete_type() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/extend_server_defined_concrete_type.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/extend_server_defined_concrete_type.expected"); + test_fixture(transform_fixture, file!(), "extend_server_defined_concrete_type.graphql", "relay_resolvers_abstract_types/fixtures/extend_server_defined_concrete_type.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_on_abstract_type_disabled() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_disabled.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_disabled.expected"); + test_fixture(transform_fixture, file!(), "fragment_on_abstract_type_disabled.graphql", "relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_disabled.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_on_abstract_type_enabled() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_enabled.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_enabled.expected"); + test_fixture(transform_fixture, file!(), "fragment_on_abstract_type_enabled.graphql", "relay_resolvers_abstract_types/fixtures/fragment_on_abstract_type_enabled.expected", input, expected).await; +} + +#[tokio::test] +async fn inline_fragment_no_type_without_resolver_selections_on_interface() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/inline_fragment_no_type_without_resolver_selections_on_interface.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/inline_fragment_no_type_without_resolver_selections_on_interface.expected"); + test_fixture(transform_fixture, file!(), "inline_fragment_no_type_without_resolver_selections_on_interface.graphql", "relay_resolvers_abstract_types/fixtures/inline_fragment_no_type_without_resolver_selections_on_interface.expected", input, expected).await; +} + +#[tokio::test] +async fn inline_fragment_without_type_condition_on_interface() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/inline_fragment_without_type_condition_on_interface.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/inline_fragment_without_type_condition_on_interface.expected"); + test_fixture(transform_fixture, file!(), "inline_fragment_without_type_condition_on_interface.graphql", "relay_resolvers_abstract_types/fixtures/inline_fragment_without_type_condition_on_interface.expected", input, expected).await; +} + +#[tokio::test] +async fn nested_abstract_type_fragment() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/nested_abstract_type_fragment.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/nested_abstract_type_fragment.expected"); + test_fixture(transform_fixture, file!(), "nested_abstract_type_fragment.graphql", "relay_resolvers_abstract_types/fixtures/nested_abstract_type_fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn nested_abstract_type_query() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/nested_abstract_type_query.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/nested_abstract_type_query.expected"); + test_fixture(transform_fixture, file!(), "nested_abstract_type_query.graphql", "relay_resolvers_abstract_types/fixtures/nested_abstract_type_query.expected", input, expected).await; +} + +#[tokio::test] +async fn nested_abstract_type_selection_on_inline_fragment_without_type() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/nested_abstract_type_selection_on_inline_fragment_without_type.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/nested_abstract_type_selection_on_inline_fragment_without_type.expected"); + test_fixture(transform_fixture, file!(), "nested_abstract_type_selection_on_inline_fragment_without_type.graphql", "relay_resolvers_abstract_types/fixtures/nested_abstract_type_selection_on_inline_fragment_without_type.expected", input, expected).await; +} + +#[tokio::test] +async fn nested_condition_on_inline_fragment_on_interface() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/nested_condition_on_inline_fragment_on_interface.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/nested_condition_on_inline_fragment_on_interface.expected"); + test_fixture(transform_fixture, file!(), "nested_condition_on_inline_fragment_on_interface.graphql", "relay_resolvers_abstract_types/fixtures/nested_condition_on_inline_fragment_on_interface.expected", input, expected).await; +} + +#[tokio::test] +async fn nested_fragment_spread_on_abstract_type() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/nested_fragment_spread_on_abstract_type.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/nested_fragment_spread_on_abstract_type.expected"); + test_fixture(transform_fixture, file!(), "nested_fragment_spread_on_abstract_type.graphql", "relay_resolvers_abstract_types/fixtures/nested_fragment_spread_on_abstract_type.expected", input, expected).await; +} + +#[tokio::test] +async fn plural_fragment_on_abstract_type() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/plural_fragment_on_abstract_type.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/plural_fragment_on_abstract_type.expected"); + test_fixture(transform_fixture, file!(), "plural_fragment_on_abstract_type.graphql", "relay_resolvers_abstract_types/fixtures/plural_fragment_on_abstract_type.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_field_on_client_interface() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/resolver_field_on_client_interface.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/resolver_field_on_client_interface.expected"); + test_fixture(transform_fixture, file!(), "resolver_field_on_client_interface.graphql", "relay_resolvers_abstract_types/fixtures/resolver_field_on_client_interface.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_field_on_client_type_implementing_server_interface() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/resolver_field_on_client_type_implementing_server_interface.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/resolver_field_on_client_type_implementing_server_interface.expected"); + test_fixture(transform_fixture, file!(), "resolver_field_on_client_type_implementing_server_interface.graphql", "relay_resolvers_abstract_types/fixtures/resolver_field_on_client_type_implementing_server_interface.expected", input, expected).await; +} + +#[tokio::test] +async fn selections_on_node() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/selections_on_node.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/selections_on_node.expected"); + test_fixture(transform_fixture, file!(), "selections_on_node.graphql", "relay_resolvers_abstract_types/fixtures/selections_on_node.expected", input, expected).await; +} + +#[tokio::test] +async fn selections_on_node_with_client_concrete_type() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/selections_on_node_with_client_concrete_type.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/selections_on_node_with_client_concrete_type.expected"); + test_fixture(transform_fixture, file!(), "selections_on_node_with_client_concrete_type.graphql", "relay_resolvers_abstract_types/fixtures/selections_on_node_with_client_concrete_type.expected", input, expected).await; +} + +#[tokio::test] +async fn server_field_on_abstract_type() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/server_field_on_abstract_type.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/server_field_on_abstract_type.expected"); + test_fixture(transform_fixture, file!(), "server_field_on_abstract_type.graphql", "relay_resolvers_abstract_types/fixtures/server_field_on_abstract_type.expected", input, expected).await; +} + +#[tokio::test] +async fn spread_fragment_into_interface_on_concrete_type() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/spread_fragment_into_interface_on_concrete_type.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/spread_fragment_into_interface_on_concrete_type.expected"); + test_fixture(transform_fixture, file!(), "spread_fragment_into_interface_on_concrete_type.graphql", "relay_resolvers_abstract_types/fixtures/spread_fragment_into_interface_on_concrete_type.expected", input, expected).await; +} + +#[tokio::test] +async fn spread_fragment_on_abstract_type() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/spread_fragment_on_abstract_type.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/spread_fragment_on_abstract_type.expected"); + test_fixture(transform_fixture, file!(), "spread_fragment_on_abstract_type.graphql", "relay_resolvers_abstract_types/fixtures/spread_fragment_on_abstract_type.expected", input, expected).await; +} + +#[tokio::test] +async fn union_types_are_skipped() { + let input = include_str!("relay_resolvers_abstract_types/fixtures/union_types_are_skipped.graphql"); + let expected = include_str!("relay_resolvers_abstract_types/fixtures/union_types_are_skipped.expected"); + test_fixture(transform_fixture, file!(), "union_types_are_skipped.graphql", "relay_resolvers_abstract_types/fixtures/union_types_are_skipped.expected", input, expected).await; +} diff --git a/compiler/crates/relay-transforms/tests/relay_resolvers_test.rs b/compiler/crates/relay-transforms/tests/relay_resolvers_test.rs index 8a377f8690978..6dd04e8002099 100644 --- a/compiler/crates/relay-transforms/tests/relay_resolvers_test.rs +++ b/compiler/crates/relay-transforms/tests/relay_resolvers_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<1d04afc08ee7547cc923bc7dab37713c>> + * @generated SignedSource<<5ed09766c9f906b6da4f1eb1e6082c33>> */ mod relay_resolvers; @@ -12,121 +12,135 @@ mod relay_resolvers; use relay_resolvers::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn field_alias() { +#[tokio::test] +async fn field_alias() { let input = include_str!("relay_resolvers/fixtures/field-alias.graphql"); let expected = include_str!("relay_resolvers/fixtures/field-alias.expected"); - test_fixture(transform_fixture, "field-alias.graphql", "relay_resolvers/fixtures/field-alias.expected", input, expected); + test_fixture(transform_fixture, file!(), "field-alias.graphql", "relay_resolvers/fixtures/field-alias.expected", input, expected).await; } -#[test] -fn fragment_spread_usage_invalid() { +#[tokio::test] +async fn fragment_spread_usage_invalid() { let input = include_str!("relay_resolvers/fixtures/fragment-spread-usage.invalid.graphql"); let expected = include_str!("relay_resolvers/fixtures/fragment-spread-usage.invalid.expected"); - test_fixture(transform_fixture, "fragment-spread-usage.invalid.graphql", "relay_resolvers/fixtures/fragment-spread-usage.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-spread-usage.invalid.graphql", "relay_resolvers/fixtures/fragment-spread-usage.invalid.expected", input, expected).await; } -#[test] -fn missing_fragment_invalid() { +#[tokio::test] +async fn missing_fragment_invalid() { let input = include_str!("relay_resolvers/fixtures/missing-fragment.invalid.graphql"); let expected = include_str!("relay_resolvers/fixtures/missing-fragment.invalid.expected"); - test_fixture(transform_fixture, "missing-fragment.invalid.graphql", "relay_resolvers/fixtures/missing-fragment.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "missing-fragment.invalid.graphql", "relay_resolvers/fixtures/missing-fragment.invalid.expected", input, expected).await; } -#[test] -fn missing_fragment_name() { +#[tokio::test] +async fn missing_fragment_name() { let input = include_str!("relay_resolvers/fixtures/missing-fragment-name.graphql"); let expected = include_str!("relay_resolvers/fixtures/missing-fragment-name.expected"); - test_fixture(transform_fixture, "missing-fragment-name.graphql", "relay_resolvers/fixtures/missing-fragment-name.expected", input, expected); + test_fixture(transform_fixture, file!(), "missing-fragment-name.graphql", "relay_resolvers/fixtures/missing-fragment-name.expected", input, expected).await; } -#[test] -fn missing_import_path_invalid() { +#[tokio::test] +async fn missing_import_path_invalid() { let input = include_str!("relay_resolvers/fixtures/missing-import-path.invalid.graphql"); let expected = include_str!("relay_resolvers/fixtures/missing-import-path.invalid.expected"); - test_fixture(transform_fixture, "missing-import-path.invalid.graphql", "relay_resolvers/fixtures/missing-import-path.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "missing-import-path.invalid.graphql", "relay_resolvers/fixtures/missing-import-path.invalid.expected", input, expected).await; } -#[test] -fn multiple_relay_resolvers() { +#[tokio::test] +async fn multiple_relay_resolvers() { let input = include_str!("relay_resolvers/fixtures/multiple-relay-resolvers.graphql"); let expected = include_str!("relay_resolvers/fixtures/multiple-relay-resolvers.expected"); - test_fixture(transform_fixture, "multiple-relay-resolvers.graphql", "relay_resolvers/fixtures/multiple-relay-resolvers.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple-relay-resolvers.graphql", "relay_resolvers/fixtures/multiple-relay-resolvers.expected", input, expected).await; } -#[test] -fn nested_relay_resolver() { +#[tokio::test] +async fn nested_relay_resolver() { let input = include_str!("relay_resolvers/fixtures/nested-relay-resolver.graphql"); let expected = include_str!("relay_resolvers/fixtures/nested-relay-resolver.expected"); - test_fixture(transform_fixture, "nested-relay-resolver.graphql", "relay_resolvers/fixtures/nested-relay-resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "nested-relay-resolver.graphql", "relay_resolvers/fixtures/nested-relay-resolver.expected", input, expected).await; } -#[test] -fn relay_resolver() { +#[tokio::test] +async fn relay_resolver() { let input = include_str!("relay_resolvers/fixtures/relay-resolver.graphql"); let expected = include_str!("relay_resolvers/fixtures/relay-resolver.expected"); - test_fixture(transform_fixture, "relay-resolver.graphql", "relay_resolvers/fixtures/relay-resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver.graphql", "relay_resolvers/fixtures/relay-resolver.expected", input, expected).await; } -#[test] -fn relay_resolver_backing_client_edge() { +#[tokio::test] +async fn relay_resolver_backing_client_edge() { let input = include_str!("relay_resolvers/fixtures/relay-resolver-backing-client-edge.graphql"); let expected = include_str!("relay_resolvers/fixtures/relay-resolver-backing-client-edge.expected"); - test_fixture(transform_fixture, "relay-resolver-backing-client-edge.graphql", "relay_resolvers/fixtures/relay-resolver-backing-client-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-backing-client-edge.graphql", "relay_resolvers/fixtures/relay-resolver-backing-client-edge.expected", input, expected).await; } -#[test] -fn relay_resolver_field_and_fragment_arguments() { +#[tokio::test] +async fn relay_resolver_field_and_fragment_arguments() { let input = include_str!("relay_resolvers/fixtures/relay-resolver-field-and-fragment-arguments.graphql"); let expected = include_str!("relay_resolvers/fixtures/relay-resolver-field-and-fragment-arguments.expected"); - test_fixture(transform_fixture, "relay-resolver-field-and-fragment-arguments.graphql", "relay_resolvers/fixtures/relay-resolver-field-and-fragment-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-field-and-fragment-arguments.graphql", "relay_resolvers/fixtures/relay-resolver-field-and-fragment-arguments.expected", input, expected).await; } -#[test] -fn relay_resolver_model() { +#[tokio::test] +async fn relay_resolver_model() { let input = include_str!("relay_resolvers/fixtures/relay-resolver-model.graphql"); let expected = include_str!("relay_resolvers/fixtures/relay-resolver-model.expected"); - test_fixture(transform_fixture, "relay-resolver-model.graphql", "relay_resolvers/fixtures/relay-resolver-model.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-model.graphql", "relay_resolvers/fixtures/relay-resolver-model.expected", input, expected).await; } -#[test] -fn relay_resolver_named_import() { +#[tokio::test] +async fn relay_resolver_named_import() { let input = include_str!("relay_resolvers/fixtures/relay-resolver-named-import.graphql"); let expected = include_str!("relay_resolvers/fixtures/relay-resolver-named-import.expected"); - test_fixture(transform_fixture, "relay-resolver-named-import.graphql", "relay_resolvers/fixtures/relay-resolver-named-import.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-named-import.graphql", "relay_resolvers/fixtures/relay-resolver-named-import.expected", input, expected).await; } -#[test] -fn relay_resolver_required() { +#[tokio::test] +async fn relay_resolver_required() { let input = include_str!("relay_resolvers/fixtures/relay-resolver-required.graphql"); let expected = include_str!("relay_resolvers/fixtures/relay-resolver-required.expected"); - test_fixture(transform_fixture, "relay-resolver-required.graphql", "relay_resolvers/fixtures/relay-resolver-required.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-required.graphql", "relay_resolvers/fixtures/relay-resolver-required.expected", input, expected).await; } -#[test] -fn relay_resolver_with_global_vars_directive_invalid() { +#[tokio::test] +async fn relay_resolver_scalar_field_arguments() { + let input = include_str!("relay_resolvers/fixtures/relay-resolver-scalar-field-arguments.graphql"); + let expected = include_str!("relay_resolvers/fixtures/relay-resolver-scalar-field-arguments.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-scalar-field-arguments.graphql", "relay_resolvers/fixtures/relay-resolver-scalar-field-arguments.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_scalar_field_arguments_with_alias() { + let input = include_str!("relay_resolvers/fixtures/relay-resolver-scalar-field-arguments-with-alias.graphql"); + let expected = include_str!("relay_resolvers/fixtures/relay-resolver-scalar-field-arguments-with-alias.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-scalar-field-arguments-with-alias.graphql", "relay_resolvers/fixtures/relay-resolver-scalar-field-arguments-with-alias.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_with_global_vars_directive_invalid() { let input = include_str!("relay_resolvers/fixtures/relay-resolver-with-global-vars-directive.invalid.graphql"); let expected = include_str!("relay_resolvers/fixtures/relay-resolver-with-global-vars-directive.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-with-global-vars-directive.invalid.graphql", "relay_resolvers/fixtures/relay-resolver-with-global-vars-directive.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-global-vars-directive.invalid.graphql", "relay_resolvers/fixtures/relay-resolver-with-global-vars-directive.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_with_global_vars_invalid() { +#[tokio::test] +async fn relay_resolver_with_global_vars_invalid() { let input = include_str!("relay_resolvers/fixtures/relay-resolver-with-global-vars.invalid.graphql"); let expected = include_str!("relay_resolvers/fixtures/relay-resolver-with-global-vars.invalid.expected"); - test_fixture(transform_fixture, "relay-resolver-with-global-vars.invalid.graphql", "relay_resolvers/fixtures/relay-resolver-with-global-vars.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-global-vars.invalid.graphql", "relay_resolvers/fixtures/relay-resolver-with-global-vars.invalid.expected", input, expected).await; } -#[test] -fn relay_resolver_within_named_inline_fragment() { +#[tokio::test] +async fn relay_resolver_within_named_inline_fragment() { let input = include_str!("relay_resolvers/fixtures/relay-resolver-within-named-inline-fragment.graphql"); let expected = include_str!("relay_resolvers/fixtures/relay-resolver-within-named-inline-fragment.expected"); - test_fixture(transform_fixture, "relay-resolver-within-named-inline-fragment.graphql", "relay_resolvers/fixtures/relay-resolver-within-named-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-within-named-inline-fragment.graphql", "relay_resolvers/fixtures/relay-resolver-within-named-inline-fragment.expected", input, expected).await; } -#[test] -fn unexpected_directive_invalid() { +#[tokio::test] +async fn unexpected_directive_invalid() { let input = include_str!("relay_resolvers/fixtures/unexpected-directive.invalid.graphql"); let expected = include_str!("relay_resolvers/fixtures/unexpected-directive.invalid.expected"); - test_fixture(transform_fixture, "unexpected-directive.invalid.graphql", "relay_resolvers/fixtures/unexpected-directive.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unexpected-directive.invalid.graphql", "relay_resolvers/fixtures/unexpected-directive.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/relay_test_operation.rs b/compiler/crates/relay-transforms/tests/relay_test_operation.rs new file mode 100644 index 0000000000000..5645356d8effe --- /dev/null +++ b/compiler/crates/relay-transforms/tests/relay_test_operation.rs @@ -0,0 +1,58 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use graphql_text_printer::print_fragment; +use graphql_text_printer::print_operation; +use graphql_text_printer::PrinterOptions; +use regex::Regex; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::generate_test_operation_metadata; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + + if let [base, extensions] = parts.as_slice() { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(base, source_location).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + + let test_path_regex = Some(Regex::new(r#"^test"#).unwrap()); + + let next_program = generate_test_operation_metadata(&program, &test_path_regex) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let printer_options = PrinterOptions { + debug_directive_data: true, + ..Default::default() + }; + + let mut printed = next_program + .operations() + .map(|def| print_operation(&schema, def, printer_options.clone())) + .chain( + next_program + .fragments() + .map(|def| print_fragment(&schema, def, printer_options.clone())), + ) + .collect::>(); + printed.sort(); + Ok(printed.join("\n\n")) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-transforms/tests/relay_test_operation/mod.rs b/compiler/crates/relay-transforms/tests/relay_test_operation/mod.rs deleted file mode 100644 index f45e895266216..0000000000000 --- a/compiler/crates/relay-transforms/tests/relay_test_operation/mod.rs +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use graphql_text_printer::print_fragment; -use graphql_text_printer::print_operation; -use graphql_text_printer::PrinterOptions; -use regex::Regex; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::generate_test_operation_metadata; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - - if let [base, extensions] = parts.as_slice() { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(base, source_location).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - - let test_path_regex = Some(Regex::new(r#"^test"#).unwrap()); - - let next_program = generate_test_operation_metadata(&program, &test_path_regex) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let printer_options = PrinterOptions { - debug_directive_data: true, - ..Default::default() - }; - - let mut printed = next_program - .operations() - .map(|def| print_operation(&schema, def, printer_options.clone())) - .chain( - next_program - .fragments() - .map(|def| print_fragment(&schema, def, printer_options.clone())), - ) - .collect::>(); - printed.sort(); - Ok(printed.join("\n\n")) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} diff --git a/compiler/crates/relay-transforms/tests/relay_test_operation_test.rs b/compiler/crates/relay-transforms/tests/relay_test_operation_test.rs index 2352c30b69ba4..c00e5c351b941 100644 --- a/compiler/crates/relay-transforms/tests/relay_test_operation_test.rs +++ b/compiler/crates/relay-transforms/tests/relay_test_operation_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<8fc748d04078d4e5388f4005efcca2ad>> + * @generated SignedSource<<98280a3ab84e4e03b31bbd484423c775>> */ mod relay_test_operation; @@ -12,30 +12,30 @@ mod relay_test_operation; use relay_test_operation::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn prod_query_invalid() { +#[tokio::test] +async fn prod_query_invalid() { let input = include_str!("relay_test_operation/fixtures/prod_query.invalid.graphql"); let expected = include_str!("relay_test_operation/fixtures/prod_query.invalid.expected"); - test_fixture(transform_fixture, "prod_query.invalid.graphql", "relay_test_operation/fixtures/prod_query.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "prod_query.invalid.graphql", "relay_test_operation/fixtures/prod_query.invalid.expected", input, expected).await; } -#[test] -fn test_client_fields_query() { +#[tokio::test] +async fn test_client_fields_query() { let input = include_str!("relay_test_operation/fixtures/test_client_fields_query.graphql"); let expected = include_str!("relay_test_operation/fixtures/test_client_fields_query.expected"); - test_fixture(transform_fixture, "test_client_fields_query.graphql", "relay_test_operation/fixtures/test_client_fields_query.expected", input, expected); + test_fixture(transform_fixture, file!(), "test_client_fields_query.graphql", "relay_test_operation/fixtures/test_client_fields_query.expected", input, expected).await; } -#[test] -fn test_query_with_enums() { +#[tokio::test] +async fn test_query_with_enums() { let input = include_str!("relay_test_operation/fixtures/test_query_with_enums.graphql"); let expected = include_str!("relay_test_operation/fixtures/test_query_with_enums.expected"); - test_fixture(transform_fixture, "test_query_with_enums.graphql", "relay_test_operation/fixtures/test_query_with_enums.expected", input, expected); + test_fixture(transform_fixture, file!(), "test_query_with_enums.graphql", "relay_test_operation/fixtures/test_query_with_enums.expected", input, expected).await; } -#[test] -fn test_simple_query() { +#[tokio::test] +async fn test_simple_query() { let input = include_str!("relay_test_operation/fixtures/test_simple_query.graphql"); let expected = include_str!("relay_test_operation/fixtures/test_simple_query.expected"); - test_fixture(transform_fixture, "test_simple_query.graphql", "relay_test_operation/fixtures/test_simple_query.expected", input, expected); + test_fixture(transform_fixture, file!(), "test_simple_query.graphql", "relay_test_operation/fixtures/test_simple_query.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/required_directive.rs b/compiler/crates/relay-transforms/tests/required_directive.rs new file mode 100644 index 0000000000000..888148aace653 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/required_directive.rs @@ -0,0 +1,21 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::DiagnosticsResult; +use fixture_tests::Fixture; +use graphql_ir::Program; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::fragment_alias_directive; +use relay_transforms::required_directive; + +fn transform(program: &Program) -> DiagnosticsResult { + required_directive(&fragment_alias_directive(program, true, false)?) +} + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, transform) +} diff --git a/compiler/crates/relay-transforms/tests/required_directive/fixtures/conflicting-required-status-across-aliased-inline-fragments.expected b/compiler/crates/relay-transforms/tests/required_directive/fixtures/conflicting-required-status-across-aliased-inline-fragments.expected index e387c71a9108f..a1874c4dbf877 100644 --- a/compiler/crates/relay-transforms/tests/required_directive/fixtures/conflicting-required-status-across-aliased-inline-fragments.expected +++ b/compiler/crates/relay-transforms/tests/required_directive/fixtures/conflicting-required-status-across-aliased-inline-fragments.expected @@ -6,19 +6,20 @@ fragment Foo on Node { name } ==================================== OUTPUT =================================== -fragment Foo on Node @__childrenCanBubbleNull { - ... on MaybeNodeInterface @__FragmentAliasMetadata +fragment Foo on Node { + ... on MaybeNodeInterface @alias(as: "aliased_fragment") @__FragmentAliasMetadata # FragmentAliasMetadata { # alias: WithLocation { - # location: conflicting-required-status-across-aliased-inline-fragments.graphql:60:62, + # location: conflicting-required-status-across-aliased-inline-fragments.graphql:64:82, # item: "aliased_fragment", # }, # type_condition: Some( # Interface(0), # ), + # non_nullable: false, # selection_type: Interface(0), # } - { + @__childrenCanBubbleNull { name @required(action: NONE) @__RequiredMetadataDirective # RequiredMetadataDirective { # action: None, diff --git a/compiler/crates/relay-transforms/tests/required_directive/mod.rs b/compiler/crates/relay-transforms/tests/required_directive/mod.rs deleted file mode 100644 index 0315fab29aa65..0000000000000 --- a/compiler/crates/relay-transforms/tests/required_directive/mod.rs +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::DiagnosticsResult; -use common::FeatureFlag; -use fixture_tests::Fixture; -use graphql_ir::Program; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::fragment_alias_directive; -use relay_transforms::required_directive; - -fn transform(program: &Program) -> DiagnosticsResult { - required_directive(&fragment_alias_directive(program, &FeatureFlag::Enabled)?) -} - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, transform) -} diff --git a/compiler/crates/relay-transforms/tests/required_directive_test.rs b/compiler/crates/relay-transforms/tests/required_directive_test.rs index d394bc755d418..d1d56710b92cb 100644 --- a/compiler/crates/relay-transforms/tests/required_directive_test.rs +++ b/compiler/crates/relay-transforms/tests/required_directive_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<854adab7d4107de9ea7820c842e3868b>> */ mod required_directive; @@ -12,219 +12,219 @@ mod required_directive; use required_directive::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn action_argument_omitted_invalid() { +#[tokio::test] +async fn action_argument_omitted_invalid() { let input = include_str!("required_directive/fixtures/action-argument-omitted.invalid.graphql"); let expected = include_str!("required_directive/fixtures/action-argument-omitted.invalid.expected"); - test_fixture(transform_fixture, "action-argument-omitted.invalid.graphql", "required_directive/fixtures/action-argument-omitted.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "action-argument-omitted.invalid.graphql", "required_directive/fixtures/action-argument-omitted.invalid.expected", input, expected).await; } -#[test] -fn conflicting_required_status_across_aliased_inline_fragments() { +#[tokio::test] +async fn conflicting_required_status_across_aliased_inline_fragments() { let input = include_str!("required_directive/fixtures/conflicting-required-status-across-aliased-inline-fragments.graphql"); let expected = include_str!("required_directive/fixtures/conflicting-required-status-across-aliased-inline-fragments.expected"); - test_fixture(transform_fixture, "conflicting-required-status-across-aliased-inline-fragments.graphql", "required_directive/fixtures/conflicting-required-status-across-aliased-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "conflicting-required-status-across-aliased-inline-fragments.graphql", "required_directive/fixtures/conflicting-required-status-across-aliased-inline-fragments.expected", input, expected).await; } -#[test] -fn duplicate_field_different_actions_invalid() { +#[tokio::test] +async fn duplicate_field_different_actions_invalid() { let input = include_str!("required_directive/fixtures/duplicate-field-different-actions.invalid.graphql"); let expected = include_str!("required_directive/fixtures/duplicate-field-different-actions.invalid.expected"); - test_fixture(transform_fixture, "duplicate-field-different-actions.invalid.graphql", "required_directive/fixtures/duplicate-field-different-actions.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "duplicate-field-different-actions.invalid.graphql", "required_directive/fixtures/duplicate-field-different-actions.invalid.expected", input, expected).await; } -#[test] -fn duplicate_field_include_directive_invalid() { +#[tokio::test] +async fn duplicate_field_include_directive_invalid() { let input = include_str!("required_directive/fixtures/duplicate-field-include-directive.invalid.graphql"); let expected = include_str!("required_directive/fixtures/duplicate-field-include-directive.invalid.expected"); - test_fixture(transform_fixture, "duplicate-field-include-directive.invalid.graphql", "required_directive/fixtures/duplicate-field-include-directive.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "duplicate-field-include-directive.invalid.graphql", "required_directive/fixtures/duplicate-field-include-directive.invalid.expected", input, expected).await; } -#[test] -fn duplicate_field_invalid() { +#[tokio::test] +async fn duplicate_field_invalid() { let input = include_str!("required_directive/fixtures/duplicate-field.invalid.graphql"); let expected = include_str!("required_directive/fixtures/duplicate-field.invalid.expected"); - test_fixture(transform_fixture, "duplicate-field.invalid.graphql", "required_directive/fixtures/duplicate-field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "duplicate-field.invalid.graphql", "required_directive/fixtures/duplicate-field.invalid.expected", input, expected).await; } -#[test] -fn duplicate_field_nullable_parent_invalid() { +#[tokio::test] +async fn duplicate_field_nullable_parent_invalid() { let input = include_str!("required_directive/fixtures/duplicate-field-nullable-parent.invalid.graphql"); let expected = include_str!("required_directive/fixtures/duplicate-field-nullable-parent.invalid.expected"); - test_fixture(transform_fixture, "duplicate-field-nullable-parent.invalid.graphql", "required_directive/fixtures/duplicate-field-nullable-parent.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "duplicate-field-nullable-parent.invalid.graphql", "required_directive/fixtures/duplicate-field-nullable-parent.invalid.expected", input, expected).await; } -#[test] -fn duplicate_field_nullable_parent_missing_first_invalid() { +#[tokio::test] +async fn duplicate_field_nullable_parent_missing_first_invalid() { let input = include_str!("required_directive/fixtures/duplicate-field-nullable-parent-missing-first.invalid.graphql"); let expected = include_str!("required_directive/fixtures/duplicate-field-nullable-parent-missing-first.invalid.expected"); - test_fixture(transform_fixture, "duplicate-field-nullable-parent-missing-first.invalid.graphql", "required_directive/fixtures/duplicate-field-nullable-parent-missing-first.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "duplicate-field-nullable-parent-missing-first.invalid.graphql", "required_directive/fixtures/duplicate-field-nullable-parent-missing-first.invalid.expected", input, expected).await; } -#[test] -fn duplicate_field_ussage_alias() { +#[tokio::test] +async fn duplicate_field_ussage_alias() { let input = include_str!("required_directive/fixtures/duplicate-field-ussage-alias.graphql"); let expected = include_str!("required_directive/fixtures/duplicate-field-ussage-alias.expected"); - test_fixture(transform_fixture, "duplicate-field-ussage-alias.graphql", "required_directive/fixtures/duplicate-field-ussage-alias.expected", input, expected); + test_fixture(transform_fixture, file!(), "duplicate-field-ussage-alias.graphql", "required_directive/fixtures/duplicate-field-ussage-alias.expected", input, expected).await; } -#[test] -fn duplicate_linked_field_different_actions_invalid() { +#[tokio::test] +async fn duplicate_linked_field_different_actions_invalid() { let input = include_str!("required_directive/fixtures/duplicate-linked-field-different-actions.invalid.graphql"); let expected = include_str!("required_directive/fixtures/duplicate-linked-field-different-actions.invalid.expected"); - test_fixture(transform_fixture, "duplicate-linked-field-different-actions.invalid.graphql", "required_directive/fixtures/duplicate-linked-field-different-actions.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "duplicate-linked-field-different-actions.invalid.graphql", "required_directive/fixtures/duplicate-linked-field-different-actions.invalid.expected", input, expected).await; } -#[test] -fn duplicate_linked_field_nullable_parent_invalid() { +#[tokio::test] +async fn duplicate_linked_field_nullable_parent_invalid() { let input = include_str!("required_directive/fixtures/duplicate-linked-field-nullable-parent.invalid.graphql"); let expected = include_str!("required_directive/fixtures/duplicate-linked-field-nullable-parent.invalid.expected"); - test_fixture(transform_fixture, "duplicate-linked-field-nullable-parent.invalid.graphql", "required_directive/fixtures/duplicate-linked-field-nullable-parent.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "duplicate-linked-field-nullable-parent.invalid.graphql", "required_directive/fixtures/duplicate-linked-field-nullable-parent.invalid.expected", input, expected).await; } -#[test] -fn fragments_are_isolated() { +#[tokio::test] +async fn fragments_are_isolated() { let input = include_str!("required_directive/fixtures/fragments-are-isolated.graphql"); let expected = include_str!("required_directive/fixtures/fragments-are-isolated.expected"); - test_fixture(transform_fixture, "fragments-are-isolated.graphql", "required_directive/fixtures/fragments-are-isolated.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragments-are-isolated.graphql", "required_directive/fixtures/fragments-are-isolated.expected", input, expected).await; } -#[test] -fn inline_directive_invalid() { +#[tokio::test] +async fn inline_directive_invalid() { let input = include_str!("required_directive/fixtures/inline-directive.invalid.graphql"); let expected = include_str!("required_directive/fixtures/inline-directive.invalid.expected"); - test_fixture(transform_fixture, "inline-directive.invalid.graphql", "required_directive/fixtures/inline-directive.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-directive.invalid.graphql", "required_directive/fixtures/inline-directive.invalid.expected", input, expected).await; } -#[test] -fn inline_fragment_on_concrete_in_interface_invalid() { +#[tokio::test] +async fn inline_fragment_on_concrete_in_interface_invalid() { let input = include_str!("required_directive/fixtures/inline-fragment-on-concrete-in-interface.invalid.graphql"); let expected = include_str!("required_directive/fixtures/inline-fragment-on-concrete-in-interface.invalid.expected"); - test_fixture(transform_fixture, "inline-fragment-on-concrete-in-interface.invalid.graphql", "required_directive/fixtures/inline-fragment-on-concrete-in-interface.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-on-concrete-in-interface.invalid.graphql", "required_directive/fixtures/inline-fragment-on-concrete-in-interface.invalid.expected", input, expected).await; } -#[test] -fn inline_fragment_on_interface_in_concrete_invalid() { +#[tokio::test] +async fn inline_fragment_on_interface_in_concrete_invalid() { let input = include_str!("required_directive/fixtures/inline-fragment-on-interface-in-concrete.invalid.graphql"); let expected = include_str!("required_directive/fixtures/inline-fragment-on-interface-in-concrete.invalid.expected"); - test_fixture(transform_fixture, "inline-fragment-on-interface-in-concrete.invalid.graphql", "required_directive/fixtures/inline-fragment-on-interface-in-concrete.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-on-interface-in-concrete.invalid.graphql", "required_directive/fixtures/inline-fragment-on-interface-in-concrete.invalid.expected", input, expected).await; } -#[test] -fn inline_fragment_on_interface_invalid() { +#[tokio::test] +async fn inline_fragment_on_interface_invalid() { let input = include_str!("required_directive/fixtures/inline-fragment-on-interface.invalid.graphql"); let expected = include_str!("required_directive/fixtures/inline-fragment-on-interface.invalid.expected"); - test_fixture(transform_fixture, "inline-fragment-on-interface.invalid.graphql", "required_directive/fixtures/inline-fragment-on-interface.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-on-interface.invalid.graphql", "required_directive/fixtures/inline-fragment-on-interface.invalid.expected", input, expected).await; } -#[test] -fn inline_fragment_on_interface_with_linked_field_invalid() { +#[tokio::test] +async fn inline_fragment_on_interface_with_linked_field_invalid() { let input = include_str!("required_directive/fixtures/inline-fragment-on-interface-with-linked-field.invalid.graphql"); let expected = include_str!("required_directive/fixtures/inline-fragment-on-interface-with-linked-field.invalid.expected"); - test_fixture(transform_fixture, "inline-fragment-on-interface-with-linked-field.invalid.graphql", "required_directive/fixtures/inline-fragment-on-interface-with-linked-field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-on-interface-with-linked-field.invalid.graphql", "required_directive/fixtures/inline-fragment-on-interface-with-linked-field.invalid.expected", input, expected).await; } -#[test] -fn inline_fragment_on_interface_within_linked_field() { +#[tokio::test] +async fn inline_fragment_on_interface_within_linked_field() { let input = include_str!("required_directive/fixtures/inline-fragment-on-interface-within-linked-field.graphql"); let expected = include_str!("required_directive/fixtures/inline-fragment-on-interface-within-linked-field.expected"); - test_fixture(transform_fixture, "inline-fragment-on-interface-within-linked-field.graphql", "required_directive/fixtures/inline-fragment-on-interface-within-linked-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-on-interface-within-linked-field.graphql", "required_directive/fixtures/inline-fragment-on-interface-within-linked-field.expected", input, expected).await; } -#[test] -fn inline_fragment_on_union_invalid() { +#[tokio::test] +async fn inline_fragment_on_union_invalid() { let input = include_str!("required_directive/fixtures/inline-fragment-on-union.invalid.graphql"); let expected = include_str!("required_directive/fixtures/inline-fragment-on-union.invalid.expected"); - test_fixture(transform_fixture, "inline-fragment-on-union.invalid.graphql", "required_directive/fixtures/inline-fragment-on-union.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-on-union.invalid.graphql", "required_directive/fixtures/inline-fragment-on-union.invalid.expected", input, expected).await; } -#[test] -fn linked_field_log() { +#[tokio::test] +async fn linked_field_log() { let input = include_str!("required_directive/fixtures/linked-field-log.graphql"); let expected = include_str!("required_directive/fixtures/linked-field-log.expected"); - test_fixture(transform_fixture, "linked-field-log.graphql", "required_directive/fixtures/linked-field-log.expected", input, expected); + test_fixture(transform_fixture, file!(), "linked-field-log.graphql", "required_directive/fixtures/linked-field-log.expected", input, expected).await; } -#[test] -fn linked_field_no_log() { +#[tokio::test] +async fn linked_field_no_log() { let input = include_str!("required_directive/fixtures/linked-field-no-log.graphql"); let expected = include_str!("required_directive/fixtures/linked-field-no-log.expected"); - test_fixture(transform_fixture, "linked-field-no-log.graphql", "required_directive/fixtures/linked-field-no-log.expected", input, expected); + test_fixture(transform_fixture, file!(), "linked-field-no-log.graphql", "required_directive/fixtures/linked-field-no-log.expected", input, expected).await; } -#[test] -fn linked_field_throw() { +#[tokio::test] +async fn linked_field_throw() { let input = include_str!("required_directive/fixtures/linked-field-throw.graphql"); let expected = include_str!("required_directive/fixtures/linked-field-throw.expected"); - test_fixture(transform_fixture, "linked-field-throw.graphql", "required_directive/fixtures/linked-field-throw.expected", input, expected); + test_fixture(transform_fixture, file!(), "linked-field-throw.graphql", "required_directive/fixtures/linked-field-throw.expected", input, expected).await; } -#[test] -fn log_action_bubble_to_throw_invalid() { +#[tokio::test] +async fn log_action_bubble_to_throw_invalid() { let input = include_str!("required_directive/fixtures/log-action-bubble-to-throw.invalid.graphql"); let expected = include_str!("required_directive/fixtures/log-action-bubble-to-throw.invalid.expected"); - test_fixture(transform_fixture, "log-action-bubble-to-throw.invalid.graphql", "required_directive/fixtures/log-action-bubble-to-throw.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "log-action-bubble-to-throw.invalid.graphql", "required_directive/fixtures/log-action-bubble-to-throw.invalid.expected", input, expected).await; } -#[test] -fn multiple_required_fields_invalid() { +#[tokio::test] +async fn multiple_required_fields_invalid() { let input = include_str!("required_directive/fixtures/multiple-required-fields.invalid.graphql"); let expected = include_str!("required_directive/fixtures/multiple-required-fields.invalid.expected"); - test_fixture(transform_fixture, "multiple-required-fields.invalid.graphql", "required_directive/fixtures/multiple-required-fields.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple-required-fields.invalid.graphql", "required_directive/fixtures/multiple-required-fields.invalid.expected", input, expected).await; } -#[test] -fn none_action_bubble_to_log_across_inline_fragment_invalid() { +#[tokio::test] +async fn none_action_bubble_to_log_across_inline_fragment_invalid() { let input = include_str!("required_directive/fixtures/none-action-bubble-to-log-across-inline-fragment.invalid.graphql"); let expected = include_str!("required_directive/fixtures/none-action-bubble-to-log-across-inline-fragment.invalid.expected"); - test_fixture(transform_fixture, "none-action-bubble-to-log-across-inline-fragment.invalid.graphql", "required_directive/fixtures/none-action-bubble-to-log-across-inline-fragment.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "none-action-bubble-to-log-across-inline-fragment.invalid.graphql", "required_directive/fixtures/none-action-bubble-to-log-across-inline-fragment.invalid.expected", input, expected).await; } -#[test] -fn none_action_bubble_to_log_invalid() { +#[tokio::test] +async fn none_action_bubble_to_log_invalid() { let input = include_str!("required_directive/fixtures/none-action-bubble-to-log.invalid.graphql"); let expected = include_str!("required_directive/fixtures/none-action-bubble-to-log.invalid.expected"); - test_fixture(transform_fixture, "none-action-bubble-to-log.invalid.graphql", "required_directive/fixtures/none-action-bubble-to-log.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "none-action-bubble-to-log.invalid.graphql", "required_directive/fixtures/none-action-bubble-to-log.invalid.expected", input, expected).await; } -#[test] -fn none_action_bubble_to_throw_invalid() { +#[tokio::test] +async fn none_action_bubble_to_throw_invalid() { let input = include_str!("required_directive/fixtures/none-action-bubble-to-throw.invalid.graphql"); let expected = include_str!("required_directive/fixtures/none-action-bubble-to-throw.invalid.expected"); - test_fixture(transform_fixture, "none-action-bubble-to-throw.invalid.graphql", "required_directive/fixtures/none-action-bubble-to-throw.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "none-action-bubble-to-throw.invalid.graphql", "required_directive/fixtures/none-action-bubble-to-throw.invalid.expected", input, expected).await; } -#[test] -fn required_paths() { +#[tokio::test] +async fn required_paths() { let input = include_str!("required_directive/fixtures/required-paths.graphql"); let expected = include_str!("required_directive/fixtures/required-paths.expected"); - test_fixture(transform_fixture, "required-paths.graphql", "required_directive/fixtures/required-paths.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-paths.graphql", "required_directive/fixtures/required-paths.expected", input, expected).await; } -#[test] -fn required_with_different_actions_invalid() { +#[tokio::test] +async fn required_with_different_actions_invalid() { let input = include_str!("required_directive/fixtures/required-with-different-actions.invalid.graphql"); let expected = include_str!("required_directive/fixtures/required-with-different-actions.invalid.expected"); - test_fixture(transform_fixture, "required-with-different-actions.invalid.graphql", "required_directive/fixtures/required-with-different-actions.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-with-different-actions.invalid.graphql", "required_directive/fixtures/required-with-different-actions.invalid.expected", input, expected).await; } -#[test] -fn scalar_field_log() { +#[tokio::test] +async fn scalar_field_log() { let input = include_str!("required_directive/fixtures/scalar-field-log.graphql"); let expected = include_str!("required_directive/fixtures/scalar-field-log.expected"); - test_fixture(transform_fixture, "scalar-field-log.graphql", "required_directive/fixtures/scalar-field-log.expected", input, expected); + test_fixture(transform_fixture, file!(), "scalar-field-log.graphql", "required_directive/fixtures/scalar-field-log.expected", input, expected).await; } -#[test] -fn scalar_field_no_log() { +#[tokio::test] +async fn scalar_field_no_log() { let input = include_str!("required_directive/fixtures/scalar-field-no-log.graphql"); let expected = include_str!("required_directive/fixtures/scalar-field-no-log.expected"); - test_fixture(transform_fixture, "scalar-field-no-log.graphql", "required_directive/fixtures/scalar-field-no-log.expected", input, expected); + test_fixture(transform_fixture, file!(), "scalar-field-no-log.graphql", "required_directive/fixtures/scalar-field-no-log.expected", input, expected).await; } -#[test] -fn scalar_field_throw() { +#[tokio::test] +async fn scalar_field_throw() { let input = include_str!("required_directive/fixtures/scalar-field-throw.graphql"); let expected = include_str!("required_directive/fixtures/scalar-field-throw.expected"); - test_fixture(transform_fixture, "scalar-field-throw.graphql", "required_directive/fixtures/scalar-field-throw.expected", input, expected); + test_fixture(transform_fixture, file!(), "scalar-field-throw.graphql", "required_directive/fixtures/scalar-field-throw.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/skip_client_extensions.rs b/compiler/crates/relay-transforms/tests/skip_client_extensions.rs new file mode 100644 index 0000000000000..2c6491f478659 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/skip_client_extensions.rs @@ -0,0 +1,49 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_text_printer::print_fragment; +use graphql_text_printer::print_operation; +use graphql_text_printer::PrinterOptions; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::skip_client_extensions; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + if let [base, extensions] = parts.as_slice() { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(base, source_location).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + let ir = build(&schema, &ast.definitions).unwrap(); + let context = Program::from_definitions(Arc::clone(&schema), ir); + let next_context = skip_client_extensions(&context); + + let printer_options = PrinterOptions { + debug_directive_data: true, + ..Default::default() + }; + let mut printed = next_context + .operations() + .map(|def| print_operation(&schema, def, printer_options.clone())) + .chain( + next_context + .fragments() + .map(|def| print_fragment(&schema, def, printer_options.clone())), + ) + .collect::>(); + printed.sort(); + Ok(printed.join("\n\n")) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-transforms/tests/skip_client_extensions/mod.rs b/compiler/crates/relay-transforms/tests/skip_client_extensions/mod.rs deleted file mode 100644 index 0df7ae30321c3..0000000000000 --- a/compiler/crates/relay-transforms/tests/skip_client_extensions/mod.rs +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_text_printer::print_fragment; -use graphql_text_printer::print_operation; -use graphql_text_printer::PrinterOptions; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::skip_client_extensions; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - if let [base, extensions] = parts.as_slice() { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(base, source_location).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - let ir = build(&schema, &ast.definitions).unwrap(); - let context = Program::from_definitions(Arc::clone(&schema), ir); - let next_context = skip_client_extensions(&context); - - let printer_options = PrinterOptions { - debug_directive_data: true, - ..Default::default() - }; - let mut printed = next_context - .operations() - .map(|def| print_operation(&schema, def, printer_options.clone())) - .chain( - next_context - .fragments() - .map(|def| print_fragment(&schema, def, printer_options.clone())), - ) - .collect::>(); - printed.sort(); - Ok(printed.join("\n\n")) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} diff --git a/compiler/crates/relay-transforms/tests/skip_client_extensions_test.rs b/compiler/crates/relay-transforms/tests/skip_client_extensions_test.rs index a69ccef9ef75c..a579a97a4ce63 100644 --- a/compiler/crates/relay-transforms/tests/skip_client_extensions_test.rs +++ b/compiler/crates/relay-transforms/tests/skip_client_extensions_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<63a5e6c07739f57b58aebaf6414d3d8f>> + * @generated SignedSource<<407d96fd654c98e1a8fa0ddc2161b75c>> */ mod skip_client_extensions; @@ -12,100 +12,100 @@ mod skip_client_extensions; use skip_client_extensions::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn client_conditions() { +#[tokio::test] +async fn client_conditions() { let input = include_str!("skip_client_extensions/fixtures/client-conditions.graphql"); let expected = include_str!("skip_client_extensions/fixtures/client-conditions.expected"); - test_fixture(transform_fixture, "client-conditions.graphql", "skip_client_extensions/fixtures/client-conditions.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-conditions.graphql", "skip_client_extensions/fixtures/client-conditions.expected", input, expected).await; } -#[test] -fn client_directives() { +#[tokio::test] +async fn client_directives() { let input = include_str!("skip_client_extensions/fixtures/client-directives.graphql"); let expected = include_str!("skip_client_extensions/fixtures/client-directives.expected"); - test_fixture(transform_fixture, "client-directives.graphql", "skip_client_extensions/fixtures/client-directives.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-directives.graphql", "skip_client_extensions/fixtures/client-directives.expected", input, expected).await; } -#[test] -fn client_fields_in_inline_fragments() { +#[tokio::test] +async fn client_fields_in_inline_fragments() { let input = include_str!("skip_client_extensions/fixtures/client-fields-in-inline-fragments.graphql"); let expected = include_str!("skip_client_extensions/fixtures/client-fields-in-inline-fragments.expected"); - test_fixture(transform_fixture, "client-fields-in-inline-fragments.graphql", "skip_client_extensions/fixtures/client-fields-in-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields-in-inline-fragments.graphql", "skip_client_extensions/fixtures/client-fields-in-inline-fragments.expected", input, expected).await; } -#[test] -fn client_fields_of_client_type() { +#[tokio::test] +async fn client_fields_of_client_type() { let input = include_str!("skip_client_extensions/fixtures/client-fields-of-client-type.graphql"); let expected = include_str!("skip_client_extensions/fixtures/client-fields-of-client-type.expected"); - test_fixture(transform_fixture, "client-fields-of-client-type.graphql", "skip_client_extensions/fixtures/client-fields-of-client-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields-of-client-type.graphql", "skip_client_extensions/fixtures/client-fields-of-client-type.expected", input, expected).await; } -#[test] -fn client_fields_on_roots() { +#[tokio::test] +async fn client_fields_on_roots() { let input = include_str!("skip_client_extensions/fixtures/client-fields-on-roots.graphql"); let expected = include_str!("skip_client_extensions/fixtures/client-fields-on-roots.expected"); - test_fixture(transform_fixture, "client-fields-on-roots.graphql", "skip_client_extensions/fixtures/client-fields-on-roots.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fields-on-roots.graphql", "skip_client_extensions/fixtures/client-fields-on-roots.expected", input, expected).await; } -#[test] -fn client_fragment_spreads() { +#[tokio::test] +async fn client_fragment_spreads() { let input = include_str!("skip_client_extensions/fixtures/client-fragment-spreads.graphql"); let expected = include_str!("skip_client_extensions/fixtures/client-fragment-spreads.expected"); - test_fixture(transform_fixture, "client-fragment-spreads.graphql", "skip_client_extensions/fixtures/client-fragment-spreads.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fragment-spreads.graphql", "skip_client_extensions/fixtures/client-fragment-spreads.expected", input, expected).await; } -#[test] -fn client_fragment_spreads_in_query() { +#[tokio::test] +async fn client_fragment_spreads_in_query() { let input = include_str!("skip_client_extensions/fixtures/client-fragment-spreads-in-query.graphql"); let expected = include_str!("skip_client_extensions/fixtures/client-fragment-spreads-in-query.expected"); - test_fixture(transform_fixture, "client-fragment-spreads-in-query.graphql", "skip_client_extensions/fixtures/client-fragment-spreads-in-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-fragment-spreads-in-query.graphql", "skip_client_extensions/fixtures/client-fragment-spreads-in-query.expected", input, expected).await; } -#[test] -fn client_inline_fragments() { +#[tokio::test] +async fn client_inline_fragments() { let input = include_str!("skip_client_extensions/fixtures/client-inline-fragments.graphql"); let expected = include_str!("skip_client_extensions/fixtures/client-inline-fragments.expected"); - test_fixture(transform_fixture, "client-inline-fragments.graphql", "skip_client_extensions/fixtures/client-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-inline-fragments.graphql", "skip_client_extensions/fixtures/client-inline-fragments.expected", input, expected).await; } -#[test] -fn client_inline_fragments_in_query() { +#[tokio::test] +async fn client_inline_fragments_in_query() { let input = include_str!("skip_client_extensions/fixtures/client-inline-fragments-in-query.graphql"); let expected = include_str!("skip_client_extensions/fixtures/client-inline-fragments-in-query.expected"); - test_fixture(transform_fixture, "client-inline-fragments-in-query.graphql", "skip_client_extensions/fixtures/client-inline-fragments-in-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-inline-fragments-in-query.graphql", "skip_client_extensions/fixtures/client-inline-fragments-in-query.expected", input, expected).await; } -#[test] -fn client_linked_fields() { +#[tokio::test] +async fn client_linked_fields() { let input = include_str!("skip_client_extensions/fixtures/client-linked-fields.graphql"); let expected = include_str!("skip_client_extensions/fixtures/client-linked-fields.expected"); - test_fixture(transform_fixture, "client-linked-fields.graphql", "skip_client_extensions/fixtures/client-linked-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-linked-fields.graphql", "skip_client_extensions/fixtures/client-linked-fields.expected", input, expected).await; } -#[test] -fn client_scalar_fields() { +#[tokio::test] +async fn client_scalar_fields() { let input = include_str!("skip_client_extensions/fixtures/client-scalar-fields.graphql"); let expected = include_str!("skip_client_extensions/fixtures/client-scalar-fields.expected"); - test_fixture(transform_fixture, "client-scalar-fields.graphql", "skip_client_extensions/fixtures/client-scalar-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-scalar-fields.graphql", "skip_client_extensions/fixtures/client-scalar-fields.expected", input, expected).await; } -#[test] -fn query_with_only_client_fields() { +#[tokio::test] +async fn query_with_only_client_fields() { let input = include_str!("skip_client_extensions/fixtures/query-with-only-client-fields.graphql"); let expected = include_str!("skip_client_extensions/fixtures/query-with-only-client-fields.expected"); - test_fixture(transform_fixture, "query-with-only-client-fields.graphql", "skip_client_extensions/fixtures/query-with-only-client-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-only-client-fields.graphql", "skip_client_extensions/fixtures/query-with-only-client-fields.expected", input, expected).await; } -#[test] -fn relay_resolver_metadata() { +#[tokio::test] +async fn relay_resolver_metadata() { let input = include_str!("skip_client_extensions/fixtures/relay-resolver-metadata.graphql"); let expected = include_str!("skip_client_extensions/fixtures/relay-resolver-metadata.expected"); - test_fixture(transform_fixture, "relay-resolver-metadata.graphql", "skip_client_extensions/fixtures/relay-resolver-metadata.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-metadata.graphql", "skip_client_extensions/fixtures/relay-resolver-metadata.expected", input, expected).await; } -#[test] -fn sibling_client_selections() { +#[tokio::test] +async fn sibling_client_selections() { let input = include_str!("skip_client_extensions/fixtures/sibling-client-selections.graphql"); let expected = include_str!("skip_client_extensions/fixtures/sibling-client-selections.expected"); - test_fixture(transform_fixture, "sibling-client-selections.graphql", "skip_client_extensions/fixtures/sibling-client-selections.expected", input, expected); + test_fixture(transform_fixture, file!(), "sibling-client-selections.graphql", "skip_client_extensions/fixtures/sibling-client-selections.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/skip_redundant_nodes.rs b/compiler/crates/relay-transforms/tests/skip_redundant_nodes.rs new file mode 100644 index 0000000000000..c76e0ae33721c --- /dev/null +++ b/compiler/crates/relay-transforms/tests/skip_redundant_nodes.rs @@ -0,0 +1,57 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_text_printer::print_operation; +use graphql_text_printer::PrinterOptions; +use relay_config::DeferStreamInterface; +use relay_test_schema::get_test_schema; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::inline_fragments; +use relay_transforms::skip_redundant_nodes; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + let printer_options = PrinterOptions { + debug_directive_data: true, + ..Default::default() + }; + let defer_stream_interface = DeferStreamInterface::default(); + let mut printed = if let [base, extensions] = parts.as_slice() { + let ast = parse_executable(base, source_location).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + let next_program = + skip_redundant_nodes(&inline_fragments(&program), defer_stream_interface); + next_program + .operations() + .map(|def| print_operation(&schema, def, printer_options.clone())) + .collect::>() + } else { + let schema = get_test_schema(); + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + let next_program = + skip_redundant_nodes(&inline_fragments(&program), defer_stream_interface); + next_program + .operations() + .map(|def| print_operation(&schema, def, printer_options.clone())) + .collect::>() + }; + + printed.sort(); + Ok(printed.join("\n\n")) +} diff --git a/compiler/crates/relay-transforms/tests/skip_redundant_nodes/mod.rs b/compiler/crates/relay-transforms/tests/skip_redundant_nodes/mod.rs deleted file mode 100644 index 69e7969098c85..0000000000000 --- a/compiler/crates/relay-transforms/tests/skip_redundant_nodes/mod.rs +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_text_printer::print_operation; -use graphql_text_printer::PrinterOptions; -use relay_test_schema::get_test_schema; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::inline_fragments; -use relay_transforms::skip_redundant_nodes; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - let printer_options = PrinterOptions { - debug_directive_data: true, - ..Default::default() - }; - let mut printed = if let [base, extensions] = parts.as_slice() { - let ast = parse_executable(base, source_location).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - let next_program = skip_redundant_nodes(&inline_fragments(&program)); - next_program - .operations() - .map(|def| print_operation(&schema, def, printer_options.clone())) - .collect::>() - } else { - let schema = get_test_schema(); - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - let next_program = skip_redundant_nodes(&inline_fragments(&program)); - next_program - .operations() - .map(|def| print_operation(&schema, def, printer_options.clone())) - .collect::>() - }; - - printed.sort(); - Ok(printed.join("\n\n")) -} diff --git a/compiler/crates/relay-transforms/tests/skip_redundant_nodes_test.rs b/compiler/crates/relay-transforms/tests/skip_redundant_nodes_test.rs index 9f0e8bb332d8c..176494aa4d1d4 100644 --- a/compiler/crates/relay-transforms/tests/skip_redundant_nodes_test.rs +++ b/compiler/crates/relay-transforms/tests/skip_redundant_nodes_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<550822f58e01f87929cf6bd8be28dc4a>> + * @generated SignedSource<<0f319903f07b43ca9d1fd638aae3ef31>> */ mod skip_redundant_nodes; @@ -12,79 +12,79 @@ mod skip_redundant_nodes; use skip_redundant_nodes::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn dont_skip_different_ids() { +#[tokio::test] +async fn dont_skip_different_ids() { let input = include_str!("skip_redundant_nodes/fixtures/dont-skip-different-ids.graphql"); let expected = include_str!("skip_redundant_nodes/fixtures/dont-skip-different-ids.expected"); - test_fixture(transform_fixture, "dont-skip-different-ids.graphql", "skip_redundant_nodes/fixtures/dont-skip-different-ids.expected", input, expected); + test_fixture(transform_fixture, file!(), "dont-skip-different-ids.graphql", "skip_redundant_nodes/fixtures/dont-skip-different-ids.expected", input, expected).await; } -#[test] -fn dont_skip_nested_fields_across_fragments() { +#[tokio::test] +async fn dont_skip_nested_fields_across_fragments() { let input = include_str!("skip_redundant_nodes/fixtures/dont-skip-nested-fields-across-fragments.graphql"); let expected = include_str!("skip_redundant_nodes/fixtures/dont-skip-nested-fields-across-fragments.expected"); - test_fixture(transform_fixture, "dont-skip-nested-fields-across-fragments.graphql", "skip_redundant_nodes/fixtures/dont-skip-nested-fields-across-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "dont-skip-nested-fields-across-fragments.graphql", "skip_redundant_nodes/fixtures/dont-skip-nested-fields-across-fragments.expected", input, expected).await; } -#[test] -fn dont_skip_with_inline_on_diffent_types() { +#[tokio::test] +async fn dont_skip_with_inline_on_diffent_types() { let input = include_str!("skip_redundant_nodes/fixtures/dont-skip-with-inline-on-diffent-types.graphql"); let expected = include_str!("skip_redundant_nodes/fixtures/dont-skip-with-inline-on-diffent-types.expected"); - test_fixture(transform_fixture, "dont-skip-with-inline-on-diffent-types.graphql", "skip_redundant_nodes/fixtures/dont-skip-with-inline-on-diffent-types.expected", input, expected); + test_fixture(transform_fixture, file!(), "dont-skip-with-inline-on-diffent-types.graphql", "skip_redundant_nodes/fixtures/dont-skip-with-inline-on-diffent-types.expected", input, expected).await; } -#[test] -fn redundant_selection_in_inline_fragments() { +#[tokio::test] +async fn redundant_selection_in_inline_fragments() { let input = include_str!("skip_redundant_nodes/fixtures/redundant-selection-in-inline-fragments.graphql"); let expected = include_str!("skip_redundant_nodes/fixtures/redundant-selection-in-inline-fragments.expected"); - test_fixture(transform_fixture, "redundant-selection-in-inline-fragments.graphql", "skip_redundant_nodes/fixtures/redundant-selection-in-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "redundant-selection-in-inline-fragments.graphql", "skip_redundant_nodes/fixtures/redundant-selection-in-inline-fragments.expected", input, expected).await; } -#[test] -fn skip_nested_linked_fields() { +#[tokio::test] +async fn skip_nested_linked_fields() { let input = include_str!("skip_redundant_nodes/fixtures/skip-nested-linked-fields.graphql"); let expected = include_str!("skip_redundant_nodes/fixtures/skip-nested-linked-fields.expected"); - test_fixture(transform_fixture, "skip-nested-linked-fields.graphql", "skip_redundant_nodes/fixtures/skip-nested-linked-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "skip-nested-linked-fields.graphql", "skip_redundant_nodes/fixtures/skip-nested-linked-fields.expected", input, expected).await; } -#[test] -fn skips_nested_fields() { +#[tokio::test] +async fn skips_nested_fields() { let input = include_str!("skip_redundant_nodes/fixtures/skips-nested-fields.graphql"); let expected = include_str!("skip_redundant_nodes/fixtures/skips-nested-fields.expected"); - test_fixture(transform_fixture, "skips-nested-fields.graphql", "skip_redundant_nodes/fixtures/skips-nested-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "skips-nested-fields.graphql", "skip_redundant_nodes/fixtures/skips-nested-fields.expected", input, expected).await; } -#[test] -fn skips_with_client_extensions() { +#[tokio::test] +async fn skips_with_client_extensions() { let input = include_str!("skip_redundant_nodes/fixtures/skips-with-client-extensions.graphql"); let expected = include_str!("skip_redundant_nodes/fixtures/skips-with-client-extensions.expected"); - test_fixture(transform_fixture, "skips-with-client-extensions.graphql", "skip_redundant_nodes/fixtures/skips-with-client-extensions.expected", input, expected); + test_fixture(transform_fixture, file!(), "skips-with-client-extensions.graphql", "skip_redundant_nodes/fixtures/skips-with-client-extensions.expected", input, expected).await; } -#[test] -fn skips_with_fragment() { +#[tokio::test] +async fn skips_with_fragment() { let input = include_str!("skip_redundant_nodes/fixtures/skips-with-fragment.graphql"); let expected = include_str!("skip_redundant_nodes/fixtures/skips-with-fragment.expected"); - test_fixture(transform_fixture, "skips-with-fragment.graphql", "skip_redundant_nodes/fixtures/skips-with-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "skips-with-fragment.graphql", "skip_redundant_nodes/fixtures/skips-with-fragment.expected", input, expected).await; } -#[test] -fn skips_with_module() { +#[tokio::test] +async fn skips_with_module() { let input = include_str!("skip_redundant_nodes/fixtures/skips-with-module.graphql"); let expected = include_str!("skip_redundant_nodes/fixtures/skips-with-module.expected"); - test_fixture(transform_fixture, "skips-with-module.graphql", "skip_redundant_nodes/fixtures/skips-with-module.expected", input, expected); + test_fixture(transform_fixture, file!(), "skips-with-module.graphql", "skip_redundant_nodes/fixtures/skips-with-module.expected", input, expected).await; } -#[test] -fn skips_with_outer_fields_first() { +#[tokio::test] +async fn skips_with_outer_fields_first() { let input = include_str!("skip_redundant_nodes/fixtures/skips-with-outer-fields-first.graphql"); let expected = include_str!("skip_redundant_nodes/fixtures/skips-with-outer-fields-first.expected"); - test_fixture(transform_fixture, "skips-with-outer-fields-first.graphql", "skip_redundant_nodes/fixtures/skips-with-outer-fields-first.expected", input, expected); + test_fixture(transform_fixture, file!(), "skips-with-outer-fields-first.graphql", "skip_redundant_nodes/fixtures/skips-with-outer-fields-first.expected", input, expected).await; } -#[test] -fn skips_with_outer_fields_last() { +#[tokio::test] +async fn skips_with_outer_fields_last() { let input = include_str!("skip_redundant_nodes/fixtures/skips-with-outer-fields-last.graphql"); let expected = include_str!("skip_redundant_nodes/fixtures/skips-with-outer-fields-last.expected"); - test_fixture(transform_fixture, "skips-with-outer-fields-last.graphql", "skip_redundant_nodes/fixtures/skips-with-outer-fields-last.expected", input, expected); + test_fixture(transform_fixture, file!(), "skips-with-outer-fields-last.graphql", "skip_redundant_nodes/fixtures/skips-with-outer-fields-last.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/skip_unreachable_nodes.rs b/compiler/crates/relay-transforms/tests/skip_unreachable_nodes.rs new file mode 100644 index 0000000000000..84c23c75d5cce --- /dev/null +++ b/compiler/crates/relay-transforms/tests/skip_unreachable_nodes.rs @@ -0,0 +1,18 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_config::DeferStreamInterface; +use relay_transforms::skip_unreachable_node_strict; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let defer_stream_interface = DeferStreamInterface::default(); + apply_transform_for_test(fixture, |program| { + skip_unreachable_node_strict(program, defer_stream_interface) + }) +} diff --git a/compiler/crates/relay-transforms/tests/skip_unreachable_nodes/mod.rs b/compiler/crates/relay-transforms/tests/skip_unreachable_nodes/mod.rs deleted file mode 100644 index f9a2d693dea79..0000000000000 --- a/compiler/crates/relay-transforms/tests/skip_unreachable_nodes/mod.rs +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::skip_unreachable_node_strict; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, skip_unreachable_node_strict) -} diff --git a/compiler/crates/relay-transforms/tests/skip_unreachable_nodes_test.rs b/compiler/crates/relay-transforms/tests/skip_unreachable_nodes_test.rs index 6a894de234aae..2b2ab44cc418b 100644 --- a/compiler/crates/relay-transforms/tests/skip_unreachable_nodes_test.rs +++ b/compiler/crates/relay-transforms/tests/skip_unreachable_nodes_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<92728b5919d760a08def5a55c9b9e71e>> + * @generated SignedSource<<256a993c631a9ebc61e38ac8a97f397a>> */ mod skip_unreachable_nodes; @@ -12,37 +12,37 @@ mod skip_unreachable_nodes; use skip_unreachable_nodes::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn keeps_other_fields() { +#[tokio::test] +async fn keeps_other_fields() { let input = include_str!("skip_unreachable_nodes/fixtures/keeps-other-fields.graphql"); let expected = include_str!("skip_unreachable_nodes/fixtures/keeps-other-fields.expected"); - test_fixture(transform_fixture, "keeps-other-fields.graphql", "skip_unreachable_nodes/fixtures/keeps-other-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "keeps-other-fields.graphql", "skip_unreachable_nodes/fixtures/keeps-other-fields.expected", input, expected).await; } -#[test] -fn removes_include_false() { +#[tokio::test] +async fn removes_include_false() { let input = include_str!("skip_unreachable_nodes/fixtures/removes-include-false.graphql"); let expected = include_str!("skip_unreachable_nodes/fixtures/removes-include-false.expected"); - test_fixture(transform_fixture, "removes-include-false.graphql", "skip_unreachable_nodes/fixtures/removes-include-false.expected", input, expected); + test_fixture(transform_fixture, file!(), "removes-include-false.graphql", "skip_unreachable_nodes/fixtures/removes-include-false.expected", input, expected).await; } -#[test] -fn removes_recursively_empty_definitions() { +#[tokio::test] +async fn removes_recursively_empty_definitions() { let input = include_str!("skip_unreachable_nodes/fixtures/removes-recursively-empty-definitions.graphql"); let expected = include_str!("skip_unreachable_nodes/fixtures/removes-recursively-empty-definitions.expected"); - test_fixture(transform_fixture, "removes-recursively-empty-definitions.graphql", "skip_unreachable_nodes/fixtures/removes-recursively-empty-definitions.expected", input, expected); + test_fixture(transform_fixture, file!(), "removes-recursively-empty-definitions.graphql", "skip_unreachable_nodes/fixtures/removes-recursively-empty-definitions.expected", input, expected).await; } -#[test] -fn removes_skip_true() { +#[tokio::test] +async fn removes_skip_true() { let input = include_str!("skip_unreachable_nodes/fixtures/removes-skip-true.graphql"); let expected = include_str!("skip_unreachable_nodes/fixtures/removes-skip-true.expected"); - test_fixture(transform_fixture, "removes-skip-true.graphql", "skip_unreachable_nodes/fixtures/removes-skip-true.expected", input, expected); + test_fixture(transform_fixture, file!(), "removes-skip-true.graphql", "skip_unreachable_nodes/fixtures/removes-skip-true.expected", input, expected).await; } -#[test] -fn skipped_fragment() { +#[tokio::test] +async fn skipped_fragment() { let input = include_str!("skip_unreachable_nodes/fixtures/skipped-fragment.graphql"); let expected = include_str!("skip_unreachable_nodes/fixtures/skipped-fragment.expected"); - test_fixture(transform_fixture, "skipped-fragment.graphql", "skip_unreachable_nodes/fixtures/skipped-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "skipped-fragment.graphql", "skip_unreachable_nodes/fixtures/skipped-fragment.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/skip_unused_variables.rs b/compiler/crates/relay-transforms/tests/skip_unused_variables.rs new file mode 100644 index 0000000000000..69a7acb2ba024 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/skip_unused_variables.rs @@ -0,0 +1,14 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::validate_operation_variables; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, validate_operation_variables) +} diff --git a/compiler/crates/relay-transforms/tests/skip_unused_variables/fixtures/kitchen-sink.expected b/compiler/crates/relay-transforms/tests/skip_unused_variables/fixtures/kitchen-sink.expected index 7cbd27d973980..3b954212f932a 100644 --- a/compiler/crates/relay-transforms/tests/skip_unused_variables/fixtures/kitchen-sink.expected +++ b/compiler/crates/relay-transforms/tests/skip_unused_variables/fixtures/kitchen-sink.expected @@ -54,7 +54,7 @@ query StreamQuerry($RELAY_INCREMENTAL_DELIVERY: Boolean!) { node(id: 4) { id ... on Feedback { - actors @stream(if: $RELAY_INCREMENTAL_DELIVERY, label: "foo", initial_count: 3) { + actors @stream(if: $RELAY_INCREMENTAL_DELIVERY, label: "foo", initialCount: 3) { name } } @@ -78,7 +78,7 @@ query StreamQuerry( node(id: 4) { id ... on Feedback { - actors @stream(if: $RELAY_INCREMENTAL_DELIVERY, label: "foo", initial_count: 3) { + actors @stream(if: $RELAY_INCREMENTAL_DELIVERY, label: "foo", initialCount: 3) { name } } diff --git a/compiler/crates/relay-transforms/tests/skip_unused_variables/fixtures/kitchen-sink.graphql b/compiler/crates/relay-transforms/tests/skip_unused_variables/fixtures/kitchen-sink.graphql index 16577bd68b6dd..19ad1dec6ad0e 100644 --- a/compiler/crates/relay-transforms/tests/skip_unused_variables/fixtures/kitchen-sink.graphql +++ b/compiler/crates/relay-transforms/tests/skip_unused_variables/fixtures/kitchen-sink.graphql @@ -53,7 +53,7 @@ query StreamQuerry($RELAY_INCREMENTAL_DELIVERY: Boolean!) { node(id: 4) { id ... on Feedback { - actors @stream(if: $RELAY_INCREMENTAL_DELIVERY, label: "foo", initial_count: 3) { + actors @stream(if: $RELAY_INCREMENTAL_DELIVERY, label: "foo", initialCount: 3) { name } } diff --git a/compiler/crates/relay-transforms/tests/skip_unused_variables/mod.rs b/compiler/crates/relay-transforms/tests/skip_unused_variables/mod.rs deleted file mode 100644 index d1e8a44f184b6..0000000000000 --- a/compiler/crates/relay-transforms/tests/skip_unused_variables/mod.rs +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::validate_operation_variables; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| validate_operation_variables(program)) -} diff --git a/compiler/crates/relay-transforms/tests/skip_unused_variables_test.rs b/compiler/crates/relay-transforms/tests/skip_unused_variables_test.rs index 523723389bdf5..82eea75b2842c 100644 --- a/compiler/crates/relay-transforms/tests/skip_unused_variables_test.rs +++ b/compiler/crates/relay-transforms/tests/skip_unused_variables_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<> */ mod skip_unused_variables; @@ -12,16 +12,16 @@ mod skip_unused_variables; use skip_unused_variables::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn kitchen_sink() { +#[tokio::test] +async fn kitchen_sink() { let input = include_str!("skip_unused_variables/fixtures/kitchen-sink.graphql"); let expected = include_str!("skip_unused_variables/fixtures/kitchen-sink.expected"); - test_fixture(transform_fixture, "kitchen-sink.graphql", "skip_unused_variables/fixtures/kitchen-sink.expected", input, expected); + test_fixture(transform_fixture, file!(), "kitchen-sink.graphql", "skip_unused_variables/fixtures/kitchen-sink.expected", input, expected).await; } -#[test] -fn non_nullable_variable_with_default_invalid() { +#[tokio::test] +async fn non_nullable_variable_with_default_invalid() { let input = include_str!("skip_unused_variables/fixtures/non-nullable-variable-with-default.invalid.graphql"); let expected = include_str!("skip_unused_variables/fixtures/non-nullable-variable-with-default.invalid.expected"); - test_fixture(transform_fixture, "non-nullable-variable-with-default.invalid.graphql", "skip_unused_variables/fixtures/non-nullable-variable-with-default.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "non-nullable-variable-with-default.invalid.graphql", "skip_unused_variables/fixtures/non-nullable-variable-with-default.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/sort_selections.rs b/compiler/crates/relay-transforms/tests/sort_selections.rs new file mode 100644 index 0000000000000..5e61d54480c38 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/sort_selections.rs @@ -0,0 +1,21 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::sort_selections; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, |program| { + let next_program = sort_selections(program); + assert_eq!( + next_program.fragments().count(), + program.fragments().count() + ); + Ok(next_program) + }) +} diff --git a/compiler/crates/relay-transforms/tests/sort_selections/mod.rs b/compiler/crates/relay-transforms/tests/sort_selections/mod.rs deleted file mode 100644 index 7b925b310e56b..0000000000000 --- a/compiler/crates/relay-transforms/tests/sort_selections/mod.rs +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::sort_selections; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| { - let next_program = sort_selections(program); - assert_eq!( - next_program.fragments().count(), - program.fragments().count() - ); - Ok(next_program) - }) -} diff --git a/compiler/crates/relay-transforms/tests/sort_selections_test.rs b/compiler/crates/relay-transforms/tests/sort_selections_test.rs index 1ab3efbe3c7e8..cda97b6eb3464 100644 --- a/compiler/crates/relay-transforms/tests/sort_selections_test.rs +++ b/compiler/crates/relay-transforms/tests/sort_selections_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<> */ mod sort_selections; @@ -12,9 +12,9 @@ mod sort_selections; use sort_selections::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn sort_selections_transform() { +#[tokio::test] +async fn sort_selections_transform() { let input = include_str!("sort_selections/fixtures/sort-selections-transform.graphql"); let expected = include_str!("sort_selections/fixtures/sort-selections-transform.expected"); - test_fixture(transform_fixture, "sort-selections-transform.graphql", "sort_selections/fixtures/sort-selections-transform.expected", input, expected); + test_fixture(transform_fixture, file!(), "sort-selections-transform.graphql", "sort_selections/fixtures/sort-selections-transform.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/subscription_transform.rs b/compiler/crates/relay-transforms/tests/subscription_transform.rs new file mode 100644 index 0000000000000..2312315219787 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/subscription_transform.rs @@ -0,0 +1,14 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::transform_subscriptions; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, transform_subscriptions) +} diff --git a/compiler/crates/relay-transforms/tests/subscription_transform/mod.rs b/compiler/crates/relay-transforms/tests/subscription_transform/mod.rs deleted file mode 100644 index c29c7edd80089..0000000000000 --- a/compiler/crates/relay-transforms/tests/subscription_transform/mod.rs +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::transform_subscriptions; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |program| transform_subscriptions(program)) -} diff --git a/compiler/crates/relay-transforms/tests/subscription_transform_test.rs b/compiler/crates/relay-transforms/tests/subscription_transform_test.rs index e582000bfc24a..9c36fba0ade11 100644 --- a/compiler/crates/relay-transforms/tests/subscription_transform_test.rs +++ b/compiler/crates/relay-transforms/tests/subscription_transform_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<3a17316c91cd48a125d5dee9541ceac1>> */ mod subscription_transform; @@ -12,30 +12,30 @@ mod subscription_transform; use subscription_transform::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn subscription_transform() { +#[tokio::test] +async fn subscription_transform() { let input = include_str!("subscription_transform/fixtures/subscription_transform.graphql"); let expected = include_str!("subscription_transform/fixtures/subscription_transform.expected"); - test_fixture(transform_fixture, "subscription_transform.graphql", "subscription_transform/fixtures/subscription_transform.expected", input, expected); + test_fixture(transform_fixture, file!(), "subscription_transform.graphql", "subscription_transform/fixtures/subscription_transform.expected", input, expected).await; } -#[test] -fn subscription_transform_noop_no_js_field() { +#[tokio::test] +async fn subscription_transform_noop_no_js_field() { let input = include_str!("subscription_transform/fixtures/subscription_transform_noop_no_js_field.graphql"); let expected = include_str!("subscription_transform/fixtures/subscription_transform_noop_no_js_field.expected"); - test_fixture(transform_fixture, "subscription_transform_noop_no_js_field.graphql", "subscription_transform/fixtures/subscription_transform_noop_no_js_field.expected", input, expected); + test_fixture(transform_fixture, file!(), "subscription_transform_noop_no_js_field.graphql", "subscription_transform/fixtures/subscription_transform_noop_no_js_field.expected", input, expected).await; } -#[test] -fn subscription_transform_noop_no_spread() { +#[tokio::test] +async fn subscription_transform_noop_no_spread() { let input = include_str!("subscription_transform/fixtures/subscription_transform_noop_no_spread.graphql"); let expected = include_str!("subscription_transform/fixtures/subscription_transform_noop_no_spread.expected"); - test_fixture(transform_fixture, "subscription_transform_noop_no_spread.graphql", "subscription_transform/fixtures/subscription_transform_noop_no_spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "subscription_transform_noop_no_spread.graphql", "subscription_transform/fixtures/subscription_transform_noop_no_spread.expected", input, expected).await; } -#[test] -fn subscription_transform_noop_two_selections() { +#[tokio::test] +async fn subscription_transform_noop_two_selections() { let input = include_str!("subscription_transform/fixtures/subscription_transform_noop_two_selections.graphql"); let expected = include_str!("subscription_transform/fixtures/subscription_transform_noop_two_selections.expected"); - test_fixture(transform_fixture, "subscription_transform_noop_two_selections.graphql", "subscription_transform/fixtures/subscription_transform_noop_two_selections.expected", input, expected); + test_fixture(transform_fixture, file!(), "subscription_transform_noop_two_selections.graphql", "subscription_transform/fixtures/subscription_transform_noop_two_selections.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/transform_connections.rs b/compiler/crates/relay-transforms/tests/transform_connections.rs new file mode 100644 index 0000000000000..169eb95210664 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/transform_connections.rs @@ -0,0 +1,60 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use graphql_text_printer::print_fragment; +use graphql_text_printer::print_operation; +use graphql_text_printer::PrinterOptions; +use relay_config::DeferStreamInterface; +use relay_test_schema::get_test_schema; +use relay_transforms::transform_connections; +use relay_transforms::validate_connections; +use relay_transforms::ConnectionInterface; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let schema = get_test_schema(); + + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir = build(&schema, &ast.definitions) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let program = Program::from_definitions(Arc::clone(&schema), ir); + + let connection_interface = ConnectionInterface::default(); + let defer_stream_interface = DeferStreamInterface::default(); + + validate_connections(&program, &connection_interface) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let next_program = + transform_connections(&program, &connection_interface, &defer_stream_interface); + + let printer_options = PrinterOptions { + debug_directive_data: true, + ..Default::default() + }; + let mut printed = next_program + .operations() + .map(|def| print_operation(&schema, def, printer_options.clone())) + .chain( + next_program + .fragments() + .map(|def| print_fragment(&schema, def, printer_options.clone())), + ) + .collect::>(); + printed.sort(); + Ok(printed.join("\n\n")) +} diff --git a/compiler/crates/relay-transforms/tests/transform_connections/fixtures/stream-connection-no-label.expected b/compiler/crates/relay-transforms/tests/transform_connections/fixtures/stream-connection-no-label.expected index 96ceba0afdd37..fbb0c3a357e52 100644 --- a/compiler/crates/relay-transforms/tests/transform_connections/fixtures/stream-connection-no-label.expected +++ b/compiler/crates/relay-transforms/tests/transform_connections/fixtures/stream-connection-no-label.expected @@ -47,7 +47,7 @@ query NodeQuery( id ... on Story { comments(first: 10) @__clientField(key: "NodeQuery_comments", handle: "connection", filters: null, dynamicKey_UNSTABLE: null) { - edges @stream(label: "NodeQuery_comments", initial_count: 0) { + edges @stream(label: "NodeQuery_comments", initialCount: 0) { node { actor { name diff --git a/compiler/crates/relay-transforms/tests/transform_connections/fixtures/stream-connection.expected b/compiler/crates/relay-transforms/tests/transform_connections/fixtures/stream-connection.expected index e279dc6f8b7e3..e580e264ed066 100644 --- a/compiler/crates/relay-transforms/tests/transform_connections/fixtures/stream-connection.expected +++ b/compiler/crates/relay-transforms/tests/transform_connections/fixtures/stream-connection.expected @@ -51,7 +51,7 @@ query NodeQuery( id ... on Story { comments(first: 10) @__clientField(key: "NodeQuery_comments", handle: "connection", filters: null, dynamicKey_UNSTABLE: null) { - edges @stream(label: "NodeQuery_comments", initial_count: 0) { + edges @stream(label: "NodeQuery_comments", initialCount: 0) { node { actor { name diff --git a/compiler/crates/relay-transforms/tests/transform_connections/mod.rs b/compiler/crates/relay-transforms/tests/transform_connections/mod.rs deleted file mode 100644 index 7f43b861d0504..0000000000000 --- a/compiler/crates/relay-transforms/tests/transform_connections/mod.rs +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use graphql_text_printer::print_fragment; -use graphql_text_printer::print_operation; -use graphql_text_printer::PrinterOptions; -use relay_test_schema::get_test_schema; -use relay_transforms::transform_connections; -use relay_transforms::validate_connections; -use relay_transforms::ConnectionInterface; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let schema = get_test_schema(); - - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir = build(&schema, &ast.definitions) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let program = Program::from_definitions(Arc::clone(&schema), ir); - - let connection_interface = ConnectionInterface::default(); - - validate_connections(&program, &connection_interface) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let next_program = transform_connections(&program, &connection_interface); - - let printer_options = PrinterOptions { - debug_directive_data: true, - ..Default::default() - }; - let mut printed = next_program - .operations() - .map(|def| print_operation(&schema, def, printer_options.clone())) - .chain( - next_program - .fragments() - .map(|def| print_fragment(&schema, def, printer_options.clone())), - ) - .collect::>(); - printed.sort(); - Ok(printed.join("\n\n")) -} diff --git a/compiler/crates/relay-transforms/tests/transform_connections_test.rs b/compiler/crates/relay-transforms/tests/transform_connections_test.rs index 82cc9ff7e5f03..aa3545e9c6c88 100644 --- a/compiler/crates/relay-transforms/tests/transform_connections_test.rs +++ b/compiler/crates/relay-transforms/tests/transform_connections_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<5eb995d94b94b43b9433675f2d83cc6f>> + * @generated SignedSource<> */ mod transform_connections; @@ -12,79 +12,79 @@ mod transform_connections; use transform_connections::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn connection() { +#[tokio::test] +async fn connection() { let input = include_str!("transform_connections/fixtures/connection.graphql"); let expected = include_str!("transform_connections/fixtures/connection.expected"); - test_fixture(transform_fixture, "connection.graphql", "transform_connections/fixtures/connection.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection.graphql", "transform_connections/fixtures/connection.expected", input, expected).await; } -#[test] -fn connection_directions() { +#[tokio::test] +async fn connection_directions() { let input = include_str!("transform_connections/fixtures/connection-directions.graphql"); let expected = include_str!("transform_connections/fixtures/connection-directions.expected"); - test_fixture(transform_fixture, "connection-directions.graphql", "transform_connections/fixtures/connection-directions.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-directions.graphql", "transform_connections/fixtures/connection-directions.expected", input, expected).await; } -#[test] -fn connection_empty_filters() { +#[tokio::test] +async fn connection_empty_filters() { let input = include_str!("transform_connections/fixtures/connection-empty-filters.graphql"); let expected = include_str!("transform_connections/fixtures/connection-empty-filters.expected"); - test_fixture(transform_fixture, "connection-empty-filters.graphql", "transform_connections/fixtures/connection-empty-filters.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-empty-filters.graphql", "transform_connections/fixtures/connection-empty-filters.expected", input, expected).await; } -#[test] -fn connection_filters() { +#[tokio::test] +async fn connection_filters() { let input = include_str!("transform_connections/fixtures/connection-filters.graphql"); let expected = include_str!("transform_connections/fixtures/connection-filters.expected"); - test_fixture(transform_fixture, "connection-filters.graphql", "transform_connections/fixtures/connection-filters.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-filters.graphql", "transform_connections/fixtures/connection-filters.expected", input, expected).await; } -#[test] -fn connection_generate_filters() { +#[tokio::test] +async fn connection_generate_filters() { let input = include_str!("transform_connections/fixtures/connection-generate-filters.graphql"); let expected = include_str!("transform_connections/fixtures/connection-generate-filters.expected"); - test_fixture(transform_fixture, "connection-generate-filters.graphql", "transform_connections/fixtures/connection-generate-filters.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-generate-filters.graphql", "transform_connections/fixtures/connection-generate-filters.expected", input, expected).await; } -#[test] -fn connection_with_aliased_edges_page_info() { +#[tokio::test] +async fn connection_with_aliased_edges_page_info() { let input = include_str!("transform_connections/fixtures/connection-with-aliased-edges-page-info.graphql"); let expected = include_str!("transform_connections/fixtures/connection-with-aliased-edges-page-info.expected"); - test_fixture(transform_fixture, "connection-with-aliased-edges-page-info.graphql", "transform_connections/fixtures/connection-with-aliased-edges-page-info.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-aliased-edges-page-info.graphql", "transform_connections/fixtures/connection-with-aliased-edges-page-info.expected", input, expected).await; } -#[test] -fn connection_with_custom_handler() { +#[tokio::test] +async fn connection_with_custom_handler() { let input = include_str!("transform_connections/fixtures/connection-with-custom-handler.graphql"); let expected = include_str!("transform_connections/fixtures/connection-with-custom-handler.expected"); - test_fixture(transform_fixture, "connection-with-custom-handler.graphql", "transform_connections/fixtures/connection-with-custom-handler.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-custom-handler.graphql", "transform_connections/fixtures/connection-with-custom-handler.expected", input, expected).await; } -#[test] -fn connection_with_page_info() { +#[tokio::test] +async fn connection_with_page_info() { let input = include_str!("transform_connections/fixtures/connection-with-page-info.graphql"); let expected = include_str!("transform_connections/fixtures/connection-with-page-info.expected"); - test_fixture(transform_fixture, "connection-with-page-info.graphql", "transform_connections/fixtures/connection-with-page-info.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-page-info.graphql", "transform_connections/fixtures/connection-with-page-info.expected", input, expected).await; } -#[test] -fn connection_with_variables() { +#[tokio::test] +async fn connection_with_variables() { let input = include_str!("transform_connections/fixtures/connection-with-variables.graphql"); let expected = include_str!("transform_connections/fixtures/connection-with-variables.expected"); - test_fixture(transform_fixture, "connection-with-variables.graphql", "transform_connections/fixtures/connection-with-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-variables.graphql", "transform_connections/fixtures/connection-with-variables.expected", input, expected).await; } -#[test] -fn stream_connection() { +#[tokio::test] +async fn stream_connection() { let input = include_str!("transform_connections/fixtures/stream-connection.graphql"); let expected = include_str!("transform_connections/fixtures/stream-connection.expected"); - test_fixture(transform_fixture, "stream-connection.graphql", "transform_connections/fixtures/stream-connection.expected", input, expected); + test_fixture(transform_fixture, file!(), "stream-connection.graphql", "transform_connections/fixtures/stream-connection.expected", input, expected).await; } -#[test] -fn stream_connection_no_label() { +#[tokio::test] +async fn stream_connection_no_label() { let input = include_str!("transform_connections/fixtures/stream-connection-no-label.graphql"); let expected = include_str!("transform_connections/fixtures/stream-connection-no-label.expected"); - test_fixture(transform_fixture, "stream-connection-no-label.graphql", "transform_connections/fixtures/stream-connection-no-label.expected", input, expected); + test_fixture(transform_fixture, file!(), "stream-connection-no-label.graphql", "transform_connections/fixtures/stream-connection-no-label.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/updatable_directive.rs b/compiler/crates/relay-transforms/tests/updatable_directive.rs new file mode 100644 index 0000000000000..4f1ff5e2f2747 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/updatable_directive.rs @@ -0,0 +1,42 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::get_test_schema; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::validate_updatable_directive; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let mut parts = fixture.content.split("%extensions%"); + let base = parts.next().expect("Pre-extension content required"); + let maybe_extensions = parts.next(); + let schema = if let Some(extensions) = maybe_extensions { + get_test_schema_with_extensions(extensions) + } else { + get_test_schema() + }; + + let ast = parse_executable(base, source_location).unwrap(); + let ir_result = build(&schema, &ast.definitions); + let ir = ir_result + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let program = Program::from_definitions(Arc::clone(&schema), ir); + validate_updatable_directive(&program) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) +} diff --git a/compiler/crates/relay-transforms/tests/updatable_directive/fixtures/resolver-linked.invalid.expected b/compiler/crates/relay-transforms/tests/updatable_directive/fixtures/resolver-linked.invalid.expected index 73513bfa6bb4e..f2fbbfcfe98e5 100644 --- a/compiler/crates/relay-transforms/tests/updatable_directive/fixtures/resolver-linked.invalid.expected +++ b/compiler/crates/relay-transforms/tests/updatable_directive/fixtures/resolver-linked.invalid.expected @@ -12,7 +12,7 @@ extend type Query { resolver_field: User @relay_resolver(import_path: "ResolverModule") } ==================================== ERROR ==================================== -✖︎ Fields defined using Relay Resolvers are not not allowed within @updatable operations. +✖︎ Fields defined using Relay Resolvers are not allowed within @updatable operations. resolver-linked.invalid.graphql:3:3 2 │ query resolverQuery @updatable { diff --git a/compiler/crates/relay-transforms/tests/updatable_directive/fixtures/resolver-scalar.invalid.expected b/compiler/crates/relay-transforms/tests/updatable_directive/fixtures/resolver-scalar.invalid.expected index a23bb0bd08dfa..3f10b15cfee05 100644 --- a/compiler/crates/relay-transforms/tests/updatable_directive/fixtures/resolver-scalar.invalid.expected +++ b/compiler/crates/relay-transforms/tests/updatable_directive/fixtures/resolver-scalar.invalid.expected @@ -10,7 +10,7 @@ extend type Query { resolver_field: Boolean @relay_resolver(import_path: "ResolverModule") } ==================================== ERROR ==================================== -✖︎ Fields defined using Relay Resolvers are not not allowed within @updatable operations. +✖︎ Fields defined using Relay Resolvers are not allowed within @updatable operations. resolver-scalar.invalid.graphql:3:3 2 │ query resolverQuery @updatable { diff --git a/compiler/crates/relay-transforms/tests/updatable_directive/mod.rs b/compiler/crates/relay-transforms/tests/updatable_directive/mod.rs deleted file mode 100644 index e256681c21f57..0000000000000 --- a/compiler/crates/relay-transforms/tests/updatable_directive/mod.rs +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::get_test_schema; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::validate_updatable_directive; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let mut parts = fixture.content.split("%extensions%"); - let base = parts.next().expect("Pre-extension content required"); - let maybe_extensions = parts.next(); - let schema = if let Some(extensions) = maybe_extensions { - get_test_schema_with_extensions(extensions) - } else { - get_test_schema() - }; - - let ast = parse_executable(base, source_location).unwrap(); - let ir_result = build(&schema, &ast.definitions); - let ir = ir_result - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let program = Program::from_definitions(Arc::clone(&schema), ir); - validate_updatable_directive(&program) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) -} diff --git a/compiler/crates/relay-transforms/tests/updatable_directive_test.rs b/compiler/crates/relay-transforms/tests/updatable_directive_test.rs index 23a93336a7b64..de7d40afb6de1 100644 --- a/compiler/crates/relay-transforms/tests/updatable_directive_test.rs +++ b/compiler/crates/relay-transforms/tests/updatable_directive_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<04bba5dd6952e840a3ddf09a94de7667>> + * @generated SignedSource<<97d52b0fa84f80726d7eb06310a18c0d>> */ mod updatable_directive; @@ -12,156 +12,156 @@ mod updatable_directive; use updatable_directive::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn assignable_fragment_spread_not_subtype_invalid() { +#[tokio::test] +async fn assignable_fragment_spread_not_subtype_invalid() { let input = include_str!("updatable_directive/fixtures/assignable-fragment-spread-not-subtype.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/assignable-fragment-spread-not-subtype.invalid.expected"); - test_fixture(transform_fixture, "assignable-fragment-spread-not-subtype.invalid.graphql", "updatable_directive/fixtures/assignable-fragment-spread-not-subtype.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "assignable-fragment-spread-not-subtype.invalid.graphql", "updatable_directive/fixtures/assignable-fragment-spread-not-subtype.invalid.expected", input, expected).await; } -#[test] -fn assignable_fragment_spreads() { +#[tokio::test] +async fn assignable_fragment_spreads() { let input = include_str!("updatable_directive/fixtures/assignable-fragment-spreads.graphql"); let expected = include_str!("updatable_directive/fixtures/assignable-fragment-spreads.expected"); - test_fixture(transform_fixture, "assignable-fragment-spreads.graphql", "updatable_directive/fixtures/assignable-fragment-spreads.expected", input, expected); + test_fixture(transform_fixture, file!(), "assignable-fragment-spreads.graphql", "updatable_directive/fixtures/assignable-fragment-spreads.expected", input, expected).await; } -#[test] -fn client_side_updatable() { +#[tokio::test] +async fn client_side_updatable() { let input = include_str!("updatable_directive/fixtures/client-side-updatable.graphql"); let expected = include_str!("updatable_directive/fixtures/client-side-updatable.expected"); - test_fixture(transform_fixture, "client-side-updatable.graphql", "updatable_directive/fixtures/client-side-updatable.expected", input, expected); + test_fixture(transform_fixture, file!(), "client-side-updatable.graphql", "updatable_directive/fixtures/client-side-updatable.expected", input, expected).await; } -#[test] -fn directive_fragment_spread_invalid() { +#[tokio::test] +async fn directive_fragment_spread_invalid() { let input = include_str!("updatable_directive/fixtures/directive-fragment-spread.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/directive-fragment-spread.invalid.expected"); - test_fixture(transform_fixture, "directive-fragment-spread.invalid.graphql", "updatable_directive/fixtures/directive-fragment-spread.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive-fragment-spread.invalid.graphql", "updatable_directive/fixtures/directive-fragment-spread.invalid.expected", input, expected).await; } -#[test] -fn directive_inline_fragment_invalid() { +#[tokio::test] +async fn directive_inline_fragment_invalid() { let input = include_str!("updatable_directive/fixtures/directive-inline-fragment.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/directive-inline-fragment.invalid.expected"); - test_fixture(transform_fixture, "directive-inline-fragment.invalid.graphql", "updatable_directive/fixtures/directive-inline-fragment.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive-inline-fragment.invalid.graphql", "updatable_directive/fixtures/directive-inline-fragment.invalid.expected", input, expected).await; } -#[test] -fn directive_linked_field_invalid() { +#[tokio::test] +async fn directive_linked_field_invalid() { let input = include_str!("updatable_directive/fixtures/directive-linked-field.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/directive-linked-field.invalid.expected"); - test_fixture(transform_fixture, "directive-linked-field.invalid.graphql", "updatable_directive/fixtures/directive-linked-field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive-linked-field.invalid.graphql", "updatable_directive/fixtures/directive-linked-field.invalid.expected", input, expected).await; } -#[test] -fn directive_query_invalid() { +#[tokio::test] +async fn directive_query_invalid() { let input = include_str!("updatable_directive/fixtures/directive-query.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/directive-query.invalid.expected"); - test_fixture(transform_fixture, "directive-query.invalid.graphql", "updatable_directive/fixtures/directive-query.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive-query.invalid.graphql", "updatable_directive/fixtures/directive-query.invalid.expected", input, expected).await; } -#[test] -fn directive_scalar_field_invalid() { +#[tokio::test] +async fn directive_scalar_field_invalid() { let input = include_str!("updatable_directive/fixtures/directive-scalar-field.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/directive-scalar-field.invalid.expected"); - test_fixture(transform_fixture, "directive-scalar-field.invalid.graphql", "updatable_directive/fixtures/directive-scalar-field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive-scalar-field.invalid.graphql", "updatable_directive/fixtures/directive-scalar-field.invalid.expected", input, expected).await; } -#[test] -fn doubly_nested_fragment_spread_invalid() { +#[tokio::test] +async fn doubly_nested_fragment_spread_invalid() { let input = include_str!("updatable_directive/fixtures/doubly-nested-fragment-spread.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/doubly-nested-fragment-spread.invalid.expected"); - test_fixture(transform_fixture, "doubly-nested-fragment-spread.invalid.graphql", "updatable_directive/fixtures/doubly-nested-fragment-spread.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "doubly-nested-fragment-spread.invalid.graphql", "updatable_directive/fixtures/doubly-nested-fragment-spread.invalid.expected", input, expected).await; } -#[test] -fn include_invalid() { +#[tokio::test] +async fn include_invalid() { let input = include_str!("updatable_directive/fixtures/include.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/include.invalid.expected"); - test_fixture(transform_fixture, "include.invalid.graphql", "updatable_directive/fixtures/include.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "include.invalid.graphql", "updatable_directive/fixtures/include.invalid.expected", input, expected).await; } -#[test] -fn inline_fragment() { +#[tokio::test] +async fn inline_fragment() { let input = include_str!("updatable_directive/fixtures/inline-fragment.graphql"); let expected = include_str!("updatable_directive/fixtures/inline-fragment.expected"); - test_fixture(transform_fixture, "inline-fragment.graphql", "updatable_directive/fixtures/inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment.graphql", "updatable_directive/fixtures/inline-fragment.expected", input, expected).await; } -#[test] -fn inline_fragment_concrete_type_to_concrete_type_invalid() { +#[tokio::test] +async fn inline_fragment_concrete_type_to_concrete_type_invalid() { let input = include_str!("updatable_directive/fixtures/inline-fragment-concrete-type-to-concrete-type.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/inline-fragment-concrete-type-to-concrete-type.invalid.expected"); - test_fixture(transform_fixture, "inline-fragment-concrete-type-to-concrete-type.invalid.graphql", "updatable_directive/fixtures/inline-fragment-concrete-type-to-concrete-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-concrete-type-to-concrete-type.invalid.graphql", "updatable_directive/fixtures/inline-fragment-concrete-type-to-concrete-type.invalid.expected", input, expected).await; } -#[test] -fn inline_fragment_concrete_type_to_interface_invalid() { +#[tokio::test] +async fn inline_fragment_concrete_type_to_interface_invalid() { let input = include_str!("updatable_directive/fixtures/inline-fragment-concrete-type-to-interface.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/inline-fragment-concrete-type-to-interface.invalid.expected"); - test_fixture(transform_fixture, "inline-fragment-concrete-type-to-interface.invalid.graphql", "updatable_directive/fixtures/inline-fragment-concrete-type-to-interface.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-concrete-type-to-interface.invalid.graphql", "updatable_directive/fixtures/inline-fragment-concrete-type-to-interface.invalid.expected", input, expected).await; } -#[test] -fn inline_fragment_interface_to_interface_invalid() { +#[tokio::test] +async fn inline_fragment_interface_to_interface_invalid() { let input = include_str!("updatable_directive/fixtures/inline-fragment-interface-to-interface.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/inline-fragment-interface-to-interface.invalid.expected"); - test_fixture(transform_fixture, "inline-fragment-interface-to-interface.invalid.graphql", "updatable_directive/fixtures/inline-fragment-interface-to-interface.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-interface-to-interface.invalid.graphql", "updatable_directive/fixtures/inline-fragment-interface-to-interface.invalid.expected", input, expected).await; } -#[test] -fn inline_fragment_redundant_invalid() { +#[tokio::test] +async fn inline_fragment_redundant_invalid() { let input = include_str!("updatable_directive/fixtures/inline-fragment-redundant.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/inline-fragment-redundant.invalid.expected"); - test_fixture(transform_fixture, "inline-fragment-redundant.invalid.graphql", "updatable_directive/fixtures/inline-fragment-redundant.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment-redundant.invalid.graphql", "updatable_directive/fixtures/inline-fragment-redundant.invalid.expected", input, expected).await; } -#[test] -fn non_assignable_fragment_spreads_invalid() { +#[tokio::test] +async fn non_assignable_fragment_spreads_invalid() { let input = include_str!("updatable_directive/fixtures/non-assignable-fragment-spreads.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/non-assignable-fragment-spreads.invalid.expected"); - test_fixture(transform_fixture, "non-assignable-fragment-spreads.invalid.graphql", "updatable_directive/fixtures/non-assignable-fragment-spreads.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "non-assignable-fragment-spreads.invalid.graphql", "updatable_directive/fixtures/non-assignable-fragment-spreads.invalid.expected", input, expected).await; } -#[test] -fn required_invalid() { +#[tokio::test] +async fn required_invalid() { let input = include_str!("updatable_directive/fixtures/required.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/required.invalid.expected"); - test_fixture(transform_fixture, "required.invalid.graphql", "updatable_directive/fixtures/required.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "required.invalid.graphql", "updatable_directive/fixtures/required.invalid.expected", input, expected).await; } -#[test] -fn resolver_linked_invalid() { +#[tokio::test] +async fn resolver_linked_invalid() { let input = include_str!("updatable_directive/fixtures/resolver-linked.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/resolver-linked.invalid.expected"); - test_fixture(transform_fixture, "resolver-linked.invalid.graphql", "updatable_directive/fixtures/resolver-linked.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "resolver-linked.invalid.graphql", "updatable_directive/fixtures/resolver-linked.invalid.expected", input, expected).await; } -#[test] -fn resolver_scalar_invalid() { +#[tokio::test] +async fn resolver_scalar_invalid() { let input = include_str!("updatable_directive/fixtures/resolver-scalar.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/resolver-scalar.invalid.expected"); - test_fixture(transform_fixture, "resolver-scalar.invalid.graphql", "updatable_directive/fixtures/resolver-scalar.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "resolver-scalar.invalid.graphql", "updatable_directive/fixtures/resolver-scalar.invalid.expected", input, expected).await; } -#[test] -fn skip_invalid() { +#[tokio::test] +async fn skip_invalid() { let input = include_str!("updatable_directive/fixtures/skip.invalid.graphql"); let expected = include_str!("updatable_directive/fixtures/skip.invalid.expected"); - test_fixture(transform_fixture, "skip.invalid.graphql", "updatable_directive/fixtures/skip.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "skip.invalid.graphql", "updatable_directive/fixtures/skip.invalid.expected", input, expected).await; } -#[test] -fn type_narrowing() { +#[tokio::test] +async fn type_narrowing() { let input = include_str!("updatable_directive/fixtures/type-narrowing.graphql"); let expected = include_str!("updatable_directive/fixtures/type-narrowing.expected"); - test_fixture(transform_fixture, "type-narrowing.graphql", "updatable_directive/fixtures/type-narrowing.expected", input, expected); + test_fixture(transform_fixture, file!(), "type-narrowing.graphql", "updatable_directive/fixtures/type-narrowing.expected", input, expected).await; } -#[test] -fn updatable_fragment() { +#[tokio::test] +async fn updatable_fragment() { let input = include_str!("updatable_directive/fixtures/updatable-fragment.graphql"); let expected = include_str!("updatable_directive/fixtures/updatable-fragment.expected"); - test_fixture(transform_fixture, "updatable-fragment.graphql", "updatable_directive/fixtures/updatable-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-fragment.graphql", "updatable_directive/fixtures/updatable-fragment.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/updatable_fragment_spread.rs b/compiler/crates/relay-transforms/tests/updatable_fragment_spread.rs new file mode 100644 index 0000000000000..4630b38678d3f --- /dev/null +++ b/compiler/crates/relay-transforms/tests/updatable_fragment_spread.rs @@ -0,0 +1,32 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::TEST_SCHEMA; +use relay_transforms::validate_updatable_fragment_spread; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir_result = build(&TEST_SCHEMA, &ast.definitions); + let ir = ir_result + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); + validate_updatable_fragment_spread(&program) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) +} diff --git a/compiler/crates/relay-transforms/tests/updatable_fragment_spread/mod.rs b/compiler/crates/relay-transforms/tests/updatable_fragment_spread/mod.rs deleted file mode 100644 index 7b663f53f28ea..0000000000000 --- a/compiler/crates/relay-transforms/tests/updatable_fragment_spread/mod.rs +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::TEST_SCHEMA; -use relay_transforms::validate_updatable_fragment_spread; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir_result = build(&TEST_SCHEMA, &ast.definitions); - let ir = ir_result - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); - validate_updatable_fragment_spread(&program) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) -} diff --git a/compiler/crates/relay-transforms/tests/updatable_fragment_spread_test.rs b/compiler/crates/relay-transforms/tests/updatable_fragment_spread_test.rs index 807f6db9f7e80..a0c1221111b3a 100644 --- a/compiler/crates/relay-transforms/tests/updatable_fragment_spread_test.rs +++ b/compiler/crates/relay-transforms/tests/updatable_fragment_spread_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<18869a0e4b56456538b89f35c173d802>> + * @generated SignedSource<> */ mod updatable_fragment_spread; @@ -12,163 +12,163 @@ mod updatable_fragment_spread; use updatable_fragment_spread::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn updatable_fragment_spread_abstract_in_concrete() { +#[tokio::test] +async fn updatable_fragment_spread_abstract_in_concrete() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_abstract_in_concrete.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_abstract_in_concrete.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_abstract_in_concrete.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_abstract_in_concrete.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_abstract_in_concrete.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_abstract_in_concrete.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_abstract_in_different_non_extending_abstract_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_abstract_in_different_non_extending_abstract_invalid() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_abstract_in_different_non_extending_abstract.invalid.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_abstract_in_different_non_extending_abstract.invalid.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_abstract_in_different_non_extending_abstract.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_abstract_in_different_non_extending_abstract.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_abstract_in_different_non_extending_abstract.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_abstract_in_different_non_extending_abstract.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_abstract_in_same_abstract() { +#[tokio::test] +async fn updatable_fragment_spread_abstract_in_same_abstract() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_abstract_in_same_abstract.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_abstract_in_same_abstract.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_abstract_in_same_abstract.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_abstract_in_same_abstract.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_abstract_in_same_abstract.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_abstract_in_same_abstract.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_concrete_in_different_concrete_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_concrete_in_different_concrete_invalid() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_different_concrete.invalid.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_different_concrete.invalid.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_concrete_in_different_concrete.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_different_concrete.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_concrete_in_different_concrete.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_different_concrete.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_concrete_in_matching_abstract_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_concrete_in_matching_abstract_invalid() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_matching_abstract.invalid.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_matching_abstract.invalid.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_concrete_in_matching_abstract.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_matching_abstract.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_concrete_in_matching_abstract.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_matching_abstract.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_concrete_in_non_matching_abstract_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_concrete_in_non_matching_abstract_invalid() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_non_matching_abstract.invalid.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_non_matching_abstract.invalid.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_concrete_in_non_matching_abstract.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_non_matching_abstract.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_concrete_in_non_matching_abstract.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_non_matching_abstract.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_concrete_in_same_concrete() { +#[tokio::test] +async fn updatable_fragment_spread_concrete_in_same_concrete() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_same_concrete.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_same_concrete.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_concrete_in_same_concrete.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_same_concrete.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_concrete_in_same_concrete.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_concrete_in_same_concrete.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_condition_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_in_condition_invalid() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_condition.invalid.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_condition.invalid.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_condition.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_condition.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_condition.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_condition.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_inline_fragment() { +#[tokio::test] +async fn updatable_fragment_spread_in_inline_fragment() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_inline_fragment.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_inline_fragment.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_inline_fragment_other_selections_abstract_type_condition_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_in_inline_fragment_other_selections_abstract_type_condition_invalid() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_abstract_type_condition.invalid.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_abstract_type_condition.invalid.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_inline_fragment_other_selections_abstract_type_condition.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_abstract_type_condition.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_inline_fragment_other_selections_abstract_type_condition.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_abstract_type_condition.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_inline_fragment_other_selections_no_typename_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_in_inline_fragment_other_selections_no_typename_invalid() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_no_typename.invalid.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_no_typename.invalid.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_inline_fragment_other_selections_no_typename.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_no_typename.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_inline_fragment_other_selections_no_typename.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_no_typename.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_inline_fragment_other_selections_redundant_type_condition_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_in_inline_fragment_other_selections_redundant_type_condition_invalid() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_redundant_type_condition.invalid.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_redundant_type_condition.invalid.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_inline_fragment_other_selections_redundant_type_condition.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_redundant_type_condition.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_inline_fragment_other_selections_redundant_type_condition.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_redundant_type_condition.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_alias_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_alias_invalid() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_alias.invalid.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_alias.invalid.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_alias.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_alias.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_alias.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_alias.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_condition_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_condition_invalid() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_condition.invalid.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_condition.invalid.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_condition.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_condition.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_condition.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_condition.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_directives_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_directives_invalid() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_directives.invalid.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_directives.invalid.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_directives.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_directives.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_directives.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_typename_with_directives.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_inline_fragment_other_selections_with_fragment_spread_directives() { +#[tokio::test] +async fn updatable_fragment_spread_in_inline_fragment_other_selections_with_fragment_spread_directives() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_with_fragment_spread_directives.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_with_fragment_spread_directives.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_inline_fragment_other_selections_with_fragment_spread_directives.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_with_fragment_spread_directives.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_inline_fragment_other_selections_with_fragment_spread_directives.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_with_fragment_spread_directives.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type_invalid_1() { +#[tokio::test] +async fn updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type_invalid_1() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_1.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_1.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_1.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_1.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_1.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_1.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type_invalid_2() { +#[tokio::test] +async fn updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type_invalid_2() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_2.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_2.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_2.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_2.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_2.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_2.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type_invalid_3() { +#[tokio::test] +async fn updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type_invalid_3() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_3.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_3.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_3.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_3.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_3.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_3.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type_invalid_4() { +#[tokio::test] +async fn updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type_invalid_4() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_4.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_4.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_4.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_4.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_4.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_other_selections_wrong_type.invalid_4.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_inline_fragment_with_abstract_type_condition_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_in_inline_fragment_with_abstract_type_condition_invalid() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_with_abstract_type_condition.invalid.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_with_abstract_type_condition.invalid.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_inline_fragment_with_abstract_type_condition.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_with_abstract_type_condition.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_inline_fragment_with_abstract_type_condition.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_with_abstract_type_condition.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_in_inline_fragment_without_typename_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_in_inline_fragment_without_typename_invalid() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_without_typename.invalid.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_without_typename.invalid.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_in_inline_fragment_without_typename.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_without_typename.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_in_inline_fragment_without_typename.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_in_inline_fragment_without_typename.invalid.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_top_level_invalid() { +#[tokio::test] +async fn updatable_fragment_spread_top_level_invalid() { let input = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_top_level.invalid.graphql"); let expected = include_str!("updatable_fragment_spread/fixtures/updatable_fragment_spread_top_level.invalid.expected"); - test_fixture(transform_fixture, "updatable_fragment_spread_top_level.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_top_level.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable_fragment_spread_top_level.invalid.graphql", "updatable_fragment_spread/fixtures/updatable_fragment_spread_top_level.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/validate_connections.rs b/compiler/crates/relay-transforms/tests/validate_connections.rs new file mode 100644 index 0000000000000..beb384d389476 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_connections.rs @@ -0,0 +1,33 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::TEST_SCHEMA; +use relay_transforms::validate_connections; +use relay_transforms::ConnectionInterface; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir_result = build(&TEST_SCHEMA, &ast.definitions); + let ir = ir_result + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); + validate_connections(&program, &ConnectionInterface::default()) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) +} diff --git a/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-filters-not-a-string.expected b/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-filters-not-a-string.expected new file mode 100644 index 0000000000000..23ba0c4e7e3dd --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-filters-not-a-string.expected @@ -0,0 +1,27 @@ +==================================== INPUT ==================================== +# expected-to-throw +query NodeQuery($id: ID!, $orderBy: String) { + node(id: $id) { + id + ... on Story { + comments(first: 10, orderby: $orderBy) + @connection(key: "NodeQuery_comments", filters: [123]) { + edges { + node { + actor { + name + } + } + } + } + } + } +} +==================================== ERROR ==================================== +✖︎ Expected a value of type 'String' + + connection-filters-not-a-string.graphql:7:58 + 6 │ comments(first: 10, orderby: $orderBy) + 7 │ @connection(key: "NodeQuery_comments", filters: [123]) { + │ ^^^ + 8 │ edges { diff --git a/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-filters-not-a-string.graphql b/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-filters-not-a-string.graphql new file mode 100644 index 0000000000000..4aea7574a48c3 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-filters-not-a-string.graphql @@ -0,0 +1,18 @@ +# expected-to-throw +query NodeQuery($id: ID!, $orderBy: String) { + node(id: $id) { + id + ... on Story { + comments(first: 10, orderby: $orderBy) + @connection(key: "NodeQuery_comments", filters: [123]) { + edges { + node { + actor { + name + } + } + } + } + } + } +} diff --git a/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-filters-not-an-arg.expected b/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-filters-not-an-arg.expected new file mode 100644 index 0000000000000..0122f726aed0c --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-filters-not-an-arg.expected @@ -0,0 +1,35 @@ +==================================== INPUT ==================================== +# expected-to-throw + +query NodeQuery($id: ID!, $ordering: String) { + node(id: $id) { + id + ... on Story { + comments(first: 10, orderby: $ordering) + @connection(key: "NodeQuery_comments", filters: ["ordering"]) { + edges { + node { + actor { + name + friends(first: 10) @connection(key: "NodeQuery_friends") { + edges { + node { + name + } + } + } + } + } + } + } + } + } +} +==================================== ERROR ==================================== +✖︎ Expected the `filters` argument to `@connection` to be a list of argument names to the connection field to use to identify the connection, got `ordering`. Not specifying `filters` is often recommended and will use all fields. + + connection-filters-not-an-arg.graphql:8:57 + 7 │ comments(first: 10, orderby: $ordering) + 8 │ @connection(key: "NodeQuery_comments", filters: ["ordering"]) { + │ ^^^^^^^^^^^^ + 9 │ edges { diff --git a/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-filters-not-an-arg.graphql b/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-filters-not-an-arg.graphql new file mode 100644 index 0000000000000..b8623f99ea3ae --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-filters-not-an-arg.graphql @@ -0,0 +1,26 @@ +# expected-to-throw + +query NodeQuery($id: ID!, $ordering: String) { + node(id: $id) { + id + ... on Story { + comments(first: 10, orderby: $ordering) + @connection(key: "NodeQuery_comments", filters: ["ordering"]) { + edges { + node { + actor { + name + friends(first: 10) @connection(key: "NodeQuery_friends") { + edges { + node { + name + } + } + } + } + } + } + } + } + } +} diff --git a/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-missing-edges-selection.invalid.expected b/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-missing-edges-selection.invalid.expected index 5c086a4b8c3f9..097bfe86aa7f6 100644 --- a/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-missing-edges-selection.invalid.expected +++ b/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-missing-edges-selection.invalid.expected @@ -12,7 +12,7 @@ query NodeQuery($id: ID!, $first: Int, $after: ID) { } } ==================================== ERROR ==================================== -✖︎ Expected 'comments' to have a 'edges' selection. +✖︎ Expected 'comments' to be passed a 'edges' selection. connection-missing-edges-selection.invalid.graphql:6:7 5 │ ... on Story { diff --git a/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-missing-first-arg.invalid.expected b/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-missing-first-arg.invalid.expected index efdc5e9d1ec54..ba561d350dc3b 100644 --- a/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-missing-first-arg.invalid.expected +++ b/compiler/crates/relay-transforms/tests/validate_connections/fixtures/connection-missing-first-arg.invalid.expected @@ -17,7 +17,7 @@ query NodeQuery($id: ID!) { } } ==================================== ERROR ==================================== -✖︎ Expected field 'comments' to have a 'first' or 'last' argument. +✖︎ Expected field 'comments' to be passed a 'first' or 'last' argument. connection-missing-first-arg.invalid.graphql:6:7 5 │ ... on Story { diff --git a/compiler/crates/relay-transforms/tests/validate_connections/mod.rs b/compiler/crates/relay-transforms/tests/validate_connections/mod.rs deleted file mode 100644 index 58a70a6d0754b..0000000000000 --- a/compiler/crates/relay-transforms/tests/validate_connections/mod.rs +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::TEST_SCHEMA; -use relay_transforms::validate_connections; -use relay_transforms::ConnectionInterface; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir_result = build(&TEST_SCHEMA, &ast.definitions); - let ir = ir_result - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); - validate_connections(&program, &ConnectionInterface::default()) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) -} diff --git a/compiler/crates/relay-transforms/tests/validate_connections_schema.rs b/compiler/crates/relay-transforms/tests/validate_connections_schema.rs new file mode 100644 index 0000000000000..9b98a721d7cb9 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_connections_schema.rs @@ -0,0 +1,37 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::validate_connections; +use relay_transforms::ConnectionInterface; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + + if let [base, extensions] = parts.as_slice() { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(base, source_location).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + validate_connections(&program, &ConnectionInterface::default()) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_string()) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-transforms/tests/validate_connections_schema/fixtures/connection-invalid-edges-list-type.invalid.expected b/compiler/crates/relay-transforms/tests/validate_connections_schema/fixtures/connection-invalid-edges-list-type.invalid.expected index d87a3488b9465..c5d5c9c977a32 100644 --- a/compiler/crates/relay-transforms/tests/validate_connections_schema/fixtures/connection-invalid-edges-list-type.invalid.expected +++ b/compiler/crates/relay-transforms/tests/validate_connections_schema/fixtures/connection-invalid-edges-list-type.invalid.expected @@ -38,7 +38,7 @@ type BadConnectionPageInfo { startCursor: String } ==================================== ERROR ==================================== -✖︎ Expected 'badConnection' to have a 'edges' selection. +✖︎ Expected 'badConnection' to be passed a 'edges' selection. connection-invalid-edges-list-type.invalid.graphql:6:7 5 │ ... on User { diff --git a/compiler/crates/relay-transforms/tests/validate_connections_schema/mod.rs b/compiler/crates/relay-transforms/tests/validate_connections_schema/mod.rs deleted file mode 100644 index 070337be8c55d..0000000000000 --- a/compiler/crates/relay-transforms/tests/validate_connections_schema/mod.rs +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::validate_connections; -use relay_transforms::ConnectionInterface; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - - if let [base, extensions] = parts.as_slice() { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(base, source_location).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - validate_connections(&program, &ConnectionInterface::default()) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_string()) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} diff --git a/compiler/crates/relay-transforms/tests/validate_connections_schema_test.rs b/compiler/crates/relay-transforms/tests/validate_connections_schema_test.rs index 694cebe6348b3..de34f8af1f2e7 100644 --- a/compiler/crates/relay-transforms/tests/validate_connections_schema_test.rs +++ b/compiler/crates/relay-transforms/tests/validate_connections_schema_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<580f6eb4b2f477c47798f520d56c4872>> */ mod validate_connections_schema; @@ -12,37 +12,37 @@ mod validate_connections_schema; use validate_connections_schema::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn connection_invalid_edge_type_invalid() { +#[tokio::test] +async fn connection_invalid_edge_type_invalid() { let input = include_str!("validate_connections_schema/fixtures/connection-invalid-edge-type.invalid.graphql"); let expected = include_str!("validate_connections_schema/fixtures/connection-invalid-edge-type.invalid.expected"); - test_fixture(transform_fixture, "connection-invalid-edge-type.invalid.graphql", "validate_connections_schema/fixtures/connection-invalid-edge-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-invalid-edge-type.invalid.graphql", "validate_connections_schema/fixtures/connection-invalid-edge-type.invalid.expected", input, expected).await; } -#[test] -fn connection_invalid_edges_field_invalid() { +#[tokio::test] +async fn connection_invalid_edges_field_invalid() { let input = include_str!("validate_connections_schema/fixtures/connection-invalid-edges-field.invalid.graphql"); let expected = include_str!("validate_connections_schema/fixtures/connection-invalid-edges-field.invalid.expected"); - test_fixture(transform_fixture, "connection-invalid-edges-field.invalid.graphql", "validate_connections_schema/fixtures/connection-invalid-edges-field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-invalid-edges-field.invalid.graphql", "validate_connections_schema/fixtures/connection-invalid-edges-field.invalid.expected", input, expected).await; } -#[test] -fn connection_invalid_edges_list_type_invalid() { +#[tokio::test] +async fn connection_invalid_edges_list_type_invalid() { let input = include_str!("validate_connections_schema/fixtures/connection-invalid-edges-list-type.invalid.graphql"); let expected = include_str!("validate_connections_schema/fixtures/connection-invalid-edges-list-type.invalid.expected"); - test_fixture(transform_fixture, "connection-invalid-edges-list-type.invalid.graphql", "validate_connections_schema/fixtures/connection-invalid-edges-list-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-invalid-edges-list-type.invalid.graphql", "validate_connections_schema/fixtures/connection-invalid-edges-list-type.invalid.expected", input, expected).await; } -#[test] -fn connection_invalid_no_page_info_invalid() { +#[tokio::test] +async fn connection_invalid_no_page_info_invalid() { let input = include_str!("validate_connections_schema/fixtures/connection-invalid-no-page-info.invalid.graphql"); let expected = include_str!("validate_connections_schema/fixtures/connection-invalid-no-page-info.invalid.expected"); - test_fixture(transform_fixture, "connection-invalid-no-page-info.invalid.graphql", "validate_connections_schema/fixtures/connection-invalid-no-page-info.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-invalid-no-page-info.invalid.graphql", "validate_connections_schema/fixtures/connection-invalid-no-page-info.invalid.expected", input, expected).await; } -#[test] -fn connection_invalid_page_info_invalid() { +#[tokio::test] +async fn connection_invalid_page_info_invalid() { let input = include_str!("validate_connections_schema/fixtures/connection-invalid-page-info.invalid.graphql"); let expected = include_str!("validate_connections_schema/fixtures/connection-invalid-page-info.invalid.expected"); - test_fixture(transform_fixture, "connection-invalid-page-info.invalid.graphql", "validate_connections_schema/fixtures/connection-invalid-page-info.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-invalid-page-info.invalid.graphql", "validate_connections_schema/fixtures/connection-invalid-page-info.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/validate_connections_test.rs b/compiler/crates/relay-transforms/tests/validate_connections_test.rs index d2bc5335bad8a..2c47c1e3d5239 100644 --- a/compiler/crates/relay-transforms/tests/validate_connections_test.rs +++ b/compiler/crates/relay-transforms/tests/validate_connections_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<14f3bebd3c7df4188cb21afad859aacd>> */ mod validate_connections; @@ -12,128 +12,142 @@ mod validate_connections; use validate_connections::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn connection() { +#[tokio::test] +async fn connection() { let input = include_str!("validate_connections/fixtures/connection.graphql"); let expected = include_str!("validate_connections/fixtures/connection.expected"); - test_fixture(transform_fixture, "connection.graphql", "validate_connections/fixtures/connection.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection.graphql", "validate_connections/fixtures/connection.expected", input, expected).await; } -#[test] -fn connection_directions() { +#[tokio::test] +async fn connection_directions() { let input = include_str!("validate_connections/fixtures/connection-directions.graphql"); let expected = include_str!("validate_connections/fixtures/connection-directions.expected"); - test_fixture(transform_fixture, "connection-directions.graphql", "validate_connections/fixtures/connection-directions.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-directions.graphql", "validate_connections/fixtures/connection-directions.expected", input, expected).await; } -#[test] -fn connection_empty_filters() { +#[tokio::test] +async fn connection_empty_filters() { let input = include_str!("validate_connections/fixtures/connection-empty-filters.graphql"); let expected = include_str!("validate_connections/fixtures/connection-empty-filters.expected"); - test_fixture(transform_fixture, "connection-empty-filters.graphql", "validate_connections/fixtures/connection-empty-filters.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-empty-filters.graphql", "validate_connections/fixtures/connection-empty-filters.expected", input, expected).await; } -#[test] -fn connection_filters() { +#[tokio::test] +async fn connection_filters() { let input = include_str!("validate_connections/fixtures/connection-filters.graphql"); let expected = include_str!("validate_connections/fixtures/connection-filters.expected"); - test_fixture(transform_fixture, "connection-filters.graphql", "validate_connections/fixtures/connection-filters.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-filters.graphql", "validate_connections/fixtures/connection-filters.expected", input, expected).await; } -#[test] -fn connection_filters_null_invalid() { +#[tokio::test] +async fn connection_filters_not_a_string() { + let input = include_str!("validate_connections/fixtures/connection-filters-not-a-string.graphql"); + let expected = include_str!("validate_connections/fixtures/connection-filters-not-a-string.expected"); + test_fixture(transform_fixture, file!(), "connection-filters-not-a-string.graphql", "validate_connections/fixtures/connection-filters-not-a-string.expected", input, expected).await; +} + +#[tokio::test] +async fn connection_filters_not_an_arg() { + let input = include_str!("validate_connections/fixtures/connection-filters-not-an-arg.graphql"); + let expected = include_str!("validate_connections/fixtures/connection-filters-not-an-arg.expected"); + test_fixture(transform_fixture, file!(), "connection-filters-not-an-arg.graphql", "validate_connections/fixtures/connection-filters-not-an-arg.expected", input, expected).await; +} + +#[tokio::test] +async fn connection_filters_null_invalid() { let input = include_str!("validate_connections/fixtures/connection-filters-null.invalid.graphql"); let expected = include_str!("validate_connections/fixtures/connection-filters-null.invalid.expected"); - test_fixture(transform_fixture, "connection-filters-null.invalid.graphql", "validate_connections/fixtures/connection-filters-null.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-filters-null.invalid.graphql", "validate_connections/fixtures/connection-filters-null.invalid.expected", input, expected).await; } -#[test] -fn connection_generate_filters() { +#[tokio::test] +async fn connection_generate_filters() { let input = include_str!("validate_connections/fixtures/connection-generate-filters.graphql"); let expected = include_str!("validate_connections/fixtures/connection-generate-filters.expected"); - test_fixture(transform_fixture, "connection-generate-filters.graphql", "validate_connections/fixtures/connection-generate-filters.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-generate-filters.graphql", "validate_connections/fixtures/connection-generate-filters.expected", input, expected).await; } -#[test] -fn connection_invalid_key_name_invalid() { +#[tokio::test] +async fn connection_invalid_key_name_invalid() { let input = include_str!("validate_connections/fixtures/connection-invalid-key-name.invalid.graphql"); let expected = include_str!("validate_connections/fixtures/connection-invalid-key-name.invalid.expected"); - test_fixture(transform_fixture, "connection-invalid-key-name.invalid.graphql", "validate_connections/fixtures/connection-invalid-key-name.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-invalid-key-name.invalid.graphql", "validate_connections/fixtures/connection-invalid-key-name.invalid.expected", input, expected).await; } -#[test] -fn connection_invalid_key_type_invalid() { +#[tokio::test] +async fn connection_invalid_key_type_invalid() { let input = include_str!("validate_connections/fixtures/connection-invalid-key-type.invalid.graphql"); let expected = include_str!("validate_connections/fixtures/connection-invalid-key-type.invalid.expected"); - test_fixture(transform_fixture, "connection-invalid-key-type.invalid.graphql", "validate_connections/fixtures/connection-invalid-key-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-invalid-key-type.invalid.graphql", "validate_connections/fixtures/connection-invalid-key-type.invalid.expected", input, expected).await; } -#[test] -fn connection_invalid_type_invalid() { +#[tokio::test] +async fn connection_invalid_type_invalid() { let input = include_str!("validate_connections/fixtures/connection-invalid-type.invalid.graphql"); let expected = include_str!("validate_connections/fixtures/connection-invalid-type.invalid.expected"); - test_fixture(transform_fixture, "connection-invalid-type.invalid.graphql", "validate_connections/fixtures/connection-invalid-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-invalid-type.invalid.graphql", "validate_connections/fixtures/connection-invalid-type.invalid.expected", input, expected).await; } -#[test] -fn connection_missing_edges_selection_invalid() { +#[tokio::test] +async fn connection_missing_edges_selection_invalid() { let input = include_str!("validate_connections/fixtures/connection-missing-edges-selection.invalid.graphql"); let expected = include_str!("validate_connections/fixtures/connection-missing-edges-selection.invalid.expected"); - test_fixture(transform_fixture, "connection-missing-edges-selection.invalid.graphql", "validate_connections/fixtures/connection-missing-edges-selection.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-missing-edges-selection.invalid.graphql", "validate_connections/fixtures/connection-missing-edges-selection.invalid.expected", input, expected).await; } -#[test] -fn connection_missing_first_arg_invalid() { +#[tokio::test] +async fn connection_missing_first_arg_invalid() { let input = include_str!("validate_connections/fixtures/connection-missing-first-arg.invalid.graphql"); let expected = include_str!("validate_connections/fixtures/connection-missing-first-arg.invalid.expected"); - test_fixture(transform_fixture, "connection-missing-first-arg.invalid.graphql", "validate_connections/fixtures/connection-missing-first-arg.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-missing-first-arg.invalid.graphql", "validate_connections/fixtures/connection-missing-first-arg.invalid.expected", input, expected).await; } -#[test] -fn connection_with_aliased_edges_page_info() { +#[tokio::test] +async fn connection_with_aliased_edges_page_info() { let input = include_str!("validate_connections/fixtures/connection-with-aliased-edges-page-info.graphql"); let expected = include_str!("validate_connections/fixtures/connection-with-aliased-edges-page-info.expected"); - test_fixture(transform_fixture, "connection-with-aliased-edges-page-info.graphql", "validate_connections/fixtures/connection-with-aliased-edges-page-info.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-aliased-edges-page-info.graphql", "validate_connections/fixtures/connection-with-aliased-edges-page-info.expected", input, expected).await; } -#[test] -fn connection_with_custom_handler() { +#[tokio::test] +async fn connection_with_custom_handler() { let input = include_str!("validate_connections/fixtures/connection-with-custom-handler.graphql"); let expected = include_str!("validate_connections/fixtures/connection-with-custom-handler.expected"); - test_fixture(transform_fixture, "connection-with-custom-handler.graphql", "validate_connections/fixtures/connection-with-custom-handler.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-custom-handler.graphql", "validate_connections/fixtures/connection-with-custom-handler.expected", input, expected).await; } -#[test] -fn connection_with_invalid_custom_handler_invalid() { +#[tokio::test] +async fn connection_with_invalid_custom_handler_invalid() { let input = include_str!("validate_connections/fixtures/connection-with-invalid-custom-handler.invalid.graphql"); let expected = include_str!("validate_connections/fixtures/connection-with-invalid-custom-handler.invalid.expected"); - test_fixture(transform_fixture, "connection-with-invalid-custom-handler.invalid.graphql", "validate_connections/fixtures/connection-with-invalid-custom-handler.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-invalid-custom-handler.invalid.graphql", "validate_connections/fixtures/connection-with-invalid-custom-handler.invalid.expected", input, expected).await; } -#[test] -fn connection_with_page_info() { +#[tokio::test] +async fn connection_with_page_info() { let input = include_str!("validate_connections/fixtures/connection-with-page-info.graphql"); let expected = include_str!("validate_connections/fixtures/connection-with-page-info.expected"); - test_fixture(transform_fixture, "connection-with-page-info.graphql", "validate_connections/fixtures/connection-with-page-info.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-page-info.graphql", "validate_connections/fixtures/connection-with-page-info.expected", input, expected).await; } -#[test] -fn connection_with_variables() { +#[tokio::test] +async fn connection_with_variables() { let input = include_str!("validate_connections/fixtures/connection-with-variables.graphql"); let expected = include_str!("validate_connections/fixtures/connection-with-variables.expected"); - test_fixture(transform_fixture, "connection-with-variables.graphql", "validate_connections/fixtures/connection-with-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "connection-with-variables.graphql", "validate_connections/fixtures/connection-with-variables.expected", input, expected).await; } -#[test] -fn stream_connection_with_aliased_edges_invalid() { +#[tokio::test] +async fn stream_connection_with_aliased_edges_invalid() { let input = include_str!("validate_connections/fixtures/stream-connection-with-aliased-edges.invalid.graphql"); let expected = include_str!("validate_connections/fixtures/stream-connection-with-aliased-edges.invalid.expected"); - test_fixture(transform_fixture, "stream-connection-with-aliased-edges.invalid.graphql", "validate_connections/fixtures/stream-connection-with-aliased-edges.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "stream-connection-with-aliased-edges.invalid.graphql", "validate_connections/fixtures/stream-connection-with-aliased-edges.invalid.expected", input, expected).await; } -#[test] -fn stream_connection_with_aliased_page_info_invalid() { +#[tokio::test] +async fn stream_connection_with_aliased_page_info_invalid() { let input = include_str!("validate_connections/fixtures/stream-connection-with-aliased-page-info.invalid.graphql"); let expected = include_str!("validate_connections/fixtures/stream-connection-with-aliased-page-info.invalid.expected"); - test_fixture(transform_fixture, "stream-connection-with-aliased-page-info.invalid.graphql", "validate_connections/fixtures/stream-connection-with-aliased-page-info.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "stream-connection-with-aliased-page-info.invalid.graphql", "validate_connections/fixtures/stream-connection-with-aliased-page-info.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/validate_deprecated_fields.rs b/compiler/crates/relay-transforms/tests/validate_deprecated_fields.rs new file mode 100644 index 0000000000000..9c29a4f82be8a --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_deprecated_fields.rs @@ -0,0 +1,36 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::deprecated_fields; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + + if let [base, extensions] = parts.as_slice() { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(base, source_location).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + let warnings = deprecated_fields(&schema, &program) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok(diagnostics_to_sorted_string(fixture.content, &warnings)) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-transforms/tests/validate_deprecated_fields/mod.rs b/compiler/crates/relay-transforms/tests/validate_deprecated_fields/mod.rs deleted file mode 100644 index 0e366f89df016..0000000000000 --- a/compiler/crates/relay-transforms/tests/validate_deprecated_fields/mod.rs +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::deprecated_fields; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - - if let [base, extensions] = parts.as_slice() { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(base, source_location).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - let warnings = deprecated_fields(&schema, &program) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok(diagnostics_to_sorted_string(fixture.content, &warnings)) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} diff --git a/compiler/crates/relay-transforms/tests/validate_deprecated_fields_test.rs b/compiler/crates/relay-transforms/tests/validate_deprecated_fields_test.rs index 7ce6326ae7bae..a040f949cff91 100644 --- a/compiler/crates/relay-transforms/tests/validate_deprecated_fields_test.rs +++ b/compiler/crates/relay-transforms/tests/validate_deprecated_fields_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<523e6262355cff63df80d9ea70e0d2ef>> */ mod validate_deprecated_fields; @@ -12,65 +12,65 @@ mod validate_deprecated_fields; use validate_deprecated_fields::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn deprecated_directive_arg() { +#[tokio::test] +async fn deprecated_directive_arg() { let input = include_str!("validate_deprecated_fields/fixtures/deprecated_directive_arg.graphql"); let expected = include_str!("validate_deprecated_fields/fixtures/deprecated_directive_arg.expected"); - test_fixture(transform_fixture, "deprecated_directive_arg.graphql", "validate_deprecated_fields/fixtures/deprecated_directive_arg.expected", input, expected); + test_fixture(transform_fixture, file!(), "deprecated_directive_arg.graphql", "validate_deprecated_fields/fixtures/deprecated_directive_arg.expected", input, expected).await; } -#[test] -fn deprecated_directive_arg_with_reason() { +#[tokio::test] +async fn deprecated_directive_arg_with_reason() { let input = include_str!("validate_deprecated_fields/fixtures/deprecated_directive_arg_with_reason.graphql"); let expected = include_str!("validate_deprecated_fields/fixtures/deprecated_directive_arg_with_reason.expected"); - test_fixture(transform_fixture, "deprecated_directive_arg_with_reason.graphql", "validate_deprecated_fields/fixtures/deprecated_directive_arg_with_reason.expected", input, expected); + test_fixture(transform_fixture, file!(), "deprecated_directive_arg_with_reason.graphql", "validate_deprecated_fields/fixtures/deprecated_directive_arg_with_reason.expected", input, expected).await; } -#[test] -fn deprecated_field_arg() { +#[tokio::test] +async fn deprecated_field_arg() { let input = include_str!("validate_deprecated_fields/fixtures/deprecated_field_arg.graphql"); let expected = include_str!("validate_deprecated_fields/fixtures/deprecated_field_arg.expected"); - test_fixture(transform_fixture, "deprecated_field_arg.graphql", "validate_deprecated_fields/fixtures/deprecated_field_arg.expected", input, expected); + test_fixture(transform_fixture, file!(), "deprecated_field_arg.graphql", "validate_deprecated_fields/fixtures/deprecated_field_arg.expected", input, expected).await; } -#[test] -fn deprecated_field_arg_with_reason() { +#[tokio::test] +async fn deprecated_field_arg_with_reason() { let input = include_str!("validate_deprecated_fields/fixtures/deprecated_field_arg_with_reason.graphql"); let expected = include_str!("validate_deprecated_fields/fixtures/deprecated_field_arg_with_reason.expected"); - test_fixture(transform_fixture, "deprecated_field_arg_with_reason.graphql", "validate_deprecated_fields/fixtures/deprecated_field_arg_with_reason.expected", input, expected); + test_fixture(transform_fixture, file!(), "deprecated_field_arg_with_reason.graphql", "validate_deprecated_fields/fixtures/deprecated_field_arg_with_reason.expected", input, expected).await; } -#[test] -fn deprecated_field_with_arguments() { +#[tokio::test] +async fn deprecated_field_with_arguments() { let input = include_str!("validate_deprecated_fields/fixtures/deprecated_field_with_arguments.graphql"); let expected = include_str!("validate_deprecated_fields/fixtures/deprecated_field_with_arguments.expected"); - test_fixture(transform_fixture, "deprecated_field_with_arguments.graphql", "validate_deprecated_fields/fixtures/deprecated_field_with_arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "deprecated_field_with_arguments.graphql", "validate_deprecated_fields/fixtures/deprecated_field_with_arguments.expected", input, expected).await; } -#[test] -fn deprecated_field_with_reason() { +#[tokio::test] +async fn deprecated_field_with_reason() { let input = include_str!("validate_deprecated_fields/fixtures/deprecated_field_with_reason.graphql"); let expected = include_str!("validate_deprecated_fields/fixtures/deprecated_field_with_reason.expected"); - test_fixture(transform_fixture, "deprecated_field_with_reason.graphql", "validate_deprecated_fields/fixtures/deprecated_field_with_reason.expected", input, expected); + test_fixture(transform_fixture, file!(), "deprecated_field_with_reason.graphql", "validate_deprecated_fields/fixtures/deprecated_field_with_reason.expected", input, expected).await; } -#[test] -fn deprecated_linked_field() { +#[tokio::test] +async fn deprecated_linked_field() { let input = include_str!("validate_deprecated_fields/fixtures/deprecated_linked_field.graphql"); let expected = include_str!("validate_deprecated_fields/fixtures/deprecated_linked_field.expected"); - test_fixture(transform_fixture, "deprecated_linked_field.graphql", "validate_deprecated_fields/fixtures/deprecated_linked_field.expected", input, expected); + test_fixture(transform_fixture, file!(), "deprecated_linked_field.graphql", "validate_deprecated_fields/fixtures/deprecated_linked_field.expected", input, expected).await; } -#[test] -fn deprecated_scalar_field() { +#[tokio::test] +async fn deprecated_scalar_field() { let input = include_str!("validate_deprecated_fields/fixtures/deprecated_scalar_field.graphql"); let expected = include_str!("validate_deprecated_fields/fixtures/deprecated_scalar_field.expected"); - test_fixture(transform_fixture, "deprecated_scalar_field.graphql", "validate_deprecated_fields/fixtures/deprecated_scalar_field.expected", input, expected); + test_fixture(transform_fixture, file!(), "deprecated_scalar_field.graphql", "validate_deprecated_fields/fixtures/deprecated_scalar_field.expected", input, expected).await; } -#[test] -fn deprecated_scalar_field_within_linked_field() { +#[tokio::test] +async fn deprecated_scalar_field_within_linked_field() { let input = include_str!("validate_deprecated_fields/fixtures/deprecated_scalar_field_within_linked_field.graphql"); let expected = include_str!("validate_deprecated_fields/fixtures/deprecated_scalar_field_within_linked_field.expected"); - test_fixture(transform_fixture, "deprecated_scalar_field_within_linked_field.graphql", "validate_deprecated_fields/fixtures/deprecated_scalar_field_within_linked_field.expected", input, expected); + test_fixture(transform_fixture, file!(), "deprecated_scalar_field_within_linked_field.graphql", "validate_deprecated_fields/fixtures/deprecated_scalar_field_within_linked_field.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/validate_global_variable_names.rs b/compiler/crates/relay-transforms/tests/validate_global_variable_names.rs new file mode 100644 index 0000000000000..d9a1d3807c070 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_global_variable_names.rs @@ -0,0 +1,31 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::TEST_SCHEMA; +use relay_transforms::validate_global_variable_names; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir = build(&TEST_SCHEMA, &ast.definitions) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); + validate_global_variable_names(&program) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) +} diff --git a/compiler/crates/relay-transforms/tests/validate_global_variable_names/mod.rs b/compiler/crates/relay-transforms/tests/validate_global_variable_names/mod.rs deleted file mode 100644 index 94bcc9560981f..0000000000000 --- a/compiler/crates/relay-transforms/tests/validate_global_variable_names/mod.rs +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::TEST_SCHEMA; -use relay_transforms::validate_global_variable_names; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir = build(&TEST_SCHEMA, &ast.definitions) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); - validate_global_variable_names(&program) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) -} diff --git a/compiler/crates/relay-transforms/tests/validate_global_variable_names_test.rs b/compiler/crates/relay-transforms/tests/validate_global_variable_names_test.rs index 044f4cbfc2ec4..62ad9d3d0c024 100644 --- a/compiler/crates/relay-transforms/tests/validate_global_variable_names_test.rs +++ b/compiler/crates/relay-transforms/tests/validate_global_variable_names_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<> */ mod validate_global_variable_names; @@ -12,9 +12,9 @@ mod validate_global_variable_names; use validate_global_variable_names::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn relayinternal_prefix_invalid() { +#[tokio::test] +async fn relayinternal_prefix_invalid() { let input = include_str!("validate_global_variable_names/fixtures/relayinternal_prefix_invalid.graphql"); let expected = include_str!("validate_global_variable_names/fixtures/relayinternal_prefix_invalid.expected"); - test_fixture(transform_fixture, "relayinternal_prefix_invalid.graphql", "validate_global_variable_names/fixtures/relayinternal_prefix_invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "relayinternal_prefix_invalid.graphql", "validate_global_variable_names/fixtures/relayinternal_prefix_invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/validate_global_variables.rs b/compiler/crates/relay-transforms/tests/validate_global_variables.rs new file mode 100644 index 0000000000000..635f14709ba57 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_global_variables.rs @@ -0,0 +1,62 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use graphql_cli::DiagnosticPrinter; +use graphql_ir::build_ir_with_extra_features; +use graphql_ir::BuilderOptions; +use graphql_ir::FragmentVariablesSemantic; +use graphql_ir::Program; +use graphql_ir::RelayMode; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::TEST_SCHEMA; +use relay_transforms::validate_global_variables; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir_result = build_ir_with_extra_features( + &TEST_SCHEMA, + &ast.definitions, + &BuilderOptions { + allow_undefined_fragment_spreads: false, + fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, + relay_mode: Some(RelayMode), + default_anonymous_operation_name: None, + allow_custom_scalar_literals: true, // for compatibility + }, + ); + + let ir = match ir_result { + Ok(res) => res, + Err(errors) => { + let mut errs = errors + .into_iter() + .map(|err| { + let printer = DiagnosticPrinter::new(|_| { + Some(TextSource::from_whole_document(fixture.content.to_string())) + }); + printer.diagnostic_to_string(&err) + }) + .collect::>(); + errs.sort(); + return Err(errs.join("\n\n")); + } + }; + + let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); + validate_global_variables(&program) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) +} diff --git a/compiler/crates/relay-transforms/tests/validate_global_variables/fixtures/fragment-spread-with-undefined-argument.invalid.expected b/compiler/crates/relay-transforms/tests/validate_global_variables/fixtures/fragment-spread-with-undefined-argument.invalid.expected new file mode 100644 index 0000000000000..050695c0dbb2a --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_global_variables/fixtures/fragment-spread-with-undefined-argument.invalid.expected @@ -0,0 +1,20 @@ +==================================== INPUT ==================================== +#expected-to-throw +query QueryWithUndefinedArgument { + node { + ...UserFragment @arguments(age: $age) + } +} + +fragment UserFragment on User +@argumentDefinitions(age: {type: "Int", defaultValue: null}) { + name +} +==================================== ERROR ==================================== +✖︎ Expected variable `$age` to be defined on the operation + + fragment-spread-with-undefined-argument.invalid.graphql:4:37 + 3 │ node { + 4 │ ...UserFragment @arguments(age: $age) + │ ^^^^ + 5 │ } diff --git a/compiler/crates/relay-transforms/tests/validate_global_variables/fixtures/fragment-spread-with-undefined-argument.invalid.graphql b/compiler/crates/relay-transforms/tests/validate_global_variables/fixtures/fragment-spread-with-undefined-argument.invalid.graphql new file mode 100644 index 0000000000000..4fa989945dbcc --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_global_variables/fixtures/fragment-spread-with-undefined-argument.invalid.graphql @@ -0,0 +1,11 @@ +#expected-to-throw +query QueryWithUndefinedArgument { + node { + ...UserFragment @arguments(age: $age) + } +} + +fragment UserFragment on User +@argumentDefinitions(age: {type: "Int", defaultValue: null}) { + name +} diff --git a/compiler/crates/relay-transforms/tests/validate_global_variables/mod.rs b/compiler/crates/relay-transforms/tests/validate_global_variables/mod.rs deleted file mode 100644 index 65d0ee14372f7..0000000000000 --- a/compiler/crates/relay-transforms/tests/validate_global_variables/mod.rs +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use common::TextSource; -use fixture_tests::Fixture; -use graphql_cli::DiagnosticPrinter; -use graphql_ir::build_ir_with_extra_features; -use graphql_ir::BuilderOptions; -use graphql_ir::FragmentVariablesSemantic; -use graphql_ir::Program; -use graphql_ir::RelayMode; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::TEST_SCHEMA; -use relay_transforms::validate_global_variables; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir_result = build_ir_with_extra_features( - &TEST_SCHEMA, - &ast.definitions, - &BuilderOptions { - allow_undefined_fragment_spreads: false, - fragment_variables_semantic: FragmentVariablesSemantic::PassedValue, - relay_mode: Some(RelayMode), - default_anonymous_operation_name: None, - }, - ); - - let ir = match ir_result { - Ok(res) => res, - Err(errors) => { - let mut errs = errors - .into_iter() - .map(|err| { - let printer = DiagnosticPrinter::new(|_| { - Some(TextSource::from_whole_document(fixture.content.to_string())) - }); - printer.diagnostic_to_string(&err) - }) - .collect::>(); - errs.sort(); - return Err(errs.join("\n\n")); - } - }; - - let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); - validate_global_variables(&program) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) -} diff --git a/compiler/crates/relay-transforms/tests/validate_global_variables_test.rs b/compiler/crates/relay-transforms/tests/validate_global_variables_test.rs index d1247f66b861b..29cdb3506c7f5 100644 --- a/compiler/crates/relay-transforms/tests/validate_global_variables_test.rs +++ b/compiler/crates/relay-transforms/tests/validate_global_variables_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<6103440a7211fcb7a084c2b65bdc9621>> + * @generated SignedSource<> */ mod validate_global_variables; @@ -12,30 +12,37 @@ mod validate_global_variables; use validate_global_variables::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn fragment_include_with_provided_argument() { +#[tokio::test] +async fn fragment_include_with_provided_argument() { let input = include_str!("validate_global_variables/fixtures/fragment-include-with-provided-argument.graphql"); let expected = include_str!("validate_global_variables/fixtures/fragment-include-with-provided-argument.expected"); - test_fixture(transform_fixture, "fragment-include-with-provided-argument.graphql", "validate_global_variables/fixtures/fragment-include-with-provided-argument.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-include-with-provided-argument.graphql", "validate_global_variables/fixtures/fragment-include-with-provided-argument.expected", input, expected).await; } -#[test] -fn fragment_with_undefined_variable_invalid() { +#[tokio::test] +async fn fragment_spread_with_undefined_argument_invalid() { + let input = include_str!("validate_global_variables/fixtures/fragment-spread-with-undefined-argument.invalid.graphql"); + let expected = include_str!("validate_global_variables/fixtures/fragment-spread-with-undefined-argument.invalid.expected"); + test_fixture(transform_fixture, file!(), "fragment-spread-with-undefined-argument.invalid.graphql", "validate_global_variables/fixtures/fragment-spread-with-undefined-argument.invalid.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_with_undefined_variable_invalid() { let input = include_str!("validate_global_variables/fixtures/fragment-with-undefined-variable.invalid.graphql"); let expected = include_str!("validate_global_variables/fixtures/fragment-with-undefined-variable.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-undefined-variable.invalid.graphql", "validate_global_variables/fixtures/fragment-with-undefined-variable.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-undefined-variable.invalid.graphql", "validate_global_variables/fixtures/fragment-with-undefined-variable.invalid.expected", input, expected).await; } -#[test] -fn query_with_undefined_variable_invalid() { +#[tokio::test] +async fn query_with_undefined_variable_invalid() { let input = include_str!("validate_global_variables/fixtures/query-with-undefined-variable.invalid.graphql"); let expected = include_str!("validate_global_variables/fixtures/query-with-undefined-variable.invalid.expected"); - test_fixture(transform_fixture, "query-with-undefined-variable.invalid.graphql", "validate_global_variables/fixtures/query-with-undefined-variable.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-undefined-variable.invalid.graphql", "validate_global_variables/fixtures/query-with-undefined-variable.invalid.expected", input, expected).await; } -#[test] -fn query_with_variables() { +#[tokio::test] +async fn query_with_variables() { let input = include_str!("validate_global_variables/fixtures/query-with-variables.graphql"); let expected = include_str!("validate_global_variables/fixtures/query-with-variables.expected"); - test_fixture(transform_fixture, "query-with-variables.graphql", "validate_global_variables/fixtures/query-with-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-variables.graphql", "validate_global_variables/fixtures/query-with-variables.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/validate_module_names.rs b/compiler/crates/relay-transforms/tests/validate_module_names.rs new file mode 100644 index 0000000000000..7823d564a37e2 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_module_names.rs @@ -0,0 +1,31 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::TEST_SCHEMA; +use relay_transforms::validate_module_names; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir = build(&TEST_SCHEMA, &ast.definitions) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); + validate_module_names(&program) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) +} diff --git a/compiler/crates/relay-transforms/tests/validate_module_names/mod.rs b/compiler/crates/relay-transforms/tests/validate_module_names/mod.rs deleted file mode 100644 index 07586ed2deb4a..0000000000000 --- a/compiler/crates/relay-transforms/tests/validate_module_names/mod.rs +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::TEST_SCHEMA; -use relay_transforms::validate_module_names; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir = build(&TEST_SCHEMA, &ast.definitions) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); - validate_module_names(&program) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) -} diff --git a/compiler/crates/relay-transforms/tests/validate_module_names_test.rs b/compiler/crates/relay-transforms/tests/validate_module_names_test.rs index 389c1b0ec7bcc..910d25c4bc95f 100644 --- a/compiler/crates/relay-transforms/tests/validate_module_names_test.rs +++ b/compiler/crates/relay-transforms/tests/validate_module_names_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<2d52bbc7a9df87e6e39c2f0062c18bc3>> */ mod validate_module_names; @@ -12,121 +12,121 @@ mod validate_module_names; use validate_module_names::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn fragment_with_valid_name() { +#[tokio::test] +async fn fragment_with_valid_name() { let input = include_str!("validate_module_names/fixtures/fragment-with-valid-name.graphql"); let expected = include_str!("validate_module_names/fixtures/fragment-with-valid-name.expected"); - test_fixture(transform_fixture, "fragment-with-valid-name.graphql", "validate_module_names/fixtures/fragment-with-valid-name.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-valid-name.graphql", "validate_module_names/fixtures/fragment-with-valid-name.expected", input, expected).await; } -#[test] -fn fragmentwithinvalidlycapitalizedname_invalid() { +#[tokio::test] +async fn fragmentwithinvalidlycapitalizedname_invalid() { let input = include_str!("validate_module_names/fixtures/FragmentWithInvalidlyCapitalizedName.invalid.graphql"); let expected = include_str!("validate_module_names/fixtures/FragmentWithInvalidlyCapitalizedName.invalid.expected"); - test_fixture(transform_fixture, "FragmentWithInvalidlyCapitalizedName.invalid.graphql", "validate_module_names/fixtures/FragmentWithInvalidlyCapitalizedName.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "FragmentWithInvalidlyCapitalizedName.invalid.graphql", "validate_module_names/fixtures/FragmentWithInvalidlyCapitalizedName.invalid.expected", input, expected).await; } -#[test] -fn fragmentwithinvalidname_invalid() { +#[tokio::test] +async fn fragmentwithinvalidname_invalid() { let input = include_str!("validate_module_names/fixtures/FragmentWithInvalidName.invalid.graphql"); let expected = include_str!("validate_module_names/fixtures/FragmentWithInvalidName.invalid.expected"); - test_fixture(transform_fixture, "FragmentWithInvalidName.invalid.graphql", "validate_module_names/fixtures/FragmentWithInvalidName.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "FragmentWithInvalidName.invalid.graphql", "validate_module_names/fixtures/FragmentWithInvalidName.invalid.expected", input, expected).await; } -#[test] -fn fragmentwithvalidname() { +#[tokio::test] +async fn fragmentwithvalidname() { let input = include_str!("validate_module_names/fixtures/FragmentWithValidName.graphql"); let expected = include_str!("validate_module_names/fixtures/FragmentWithValidName.expected"); - test_fixture(transform_fixture, "FragmentWithValidName.graphql", "validate_module_names/fixtures/FragmentWithValidName.expected", input, expected); + test_fixture(transform_fixture, file!(), "FragmentWithValidName.graphql", "validate_module_names/fixtures/FragmentWithValidName.expected", input, expected).await; } -#[test] -fn fragmentwithvalidname_android() { +#[tokio::test] +async fn fragmentwithvalidname_android() { let input = include_str!("validate_module_names/fixtures/FragmentWithValidName.android.graphql"); let expected = include_str!("validate_module_names/fixtures/FragmentWithValidName.android.expected"); - test_fixture(transform_fixture, "FragmentWithValidName.android.graphql", "validate_module_names/fixtures/FragmentWithValidName.android.expected", input, expected); + test_fixture(transform_fixture, file!(), "FragmentWithValidName.android.graphql", "validate_module_names/fixtures/FragmentWithValidName.android.expected", input, expected).await; } -#[test] -fn fragmentwithvalidname_ios() { +#[tokio::test] +async fn fragmentwithvalidname_ios() { let input = include_str!("validate_module_names/fixtures/FragmentWithValidName.ios.graphql"); let expected = include_str!("validate_module_names/fixtures/FragmentWithValidName.ios.expected"); - test_fixture(transform_fixture, "FragmentWithValidName.ios.graphql", "validate_module_names/fixtures/FragmentWithValidName.ios.expected", input, expected); + test_fixture(transform_fixture, file!(), "FragmentWithValidName.ios.graphql", "validate_module_names/fixtures/FragmentWithValidName.ios.expected", input, expected).await; } -#[test] -fn fragmentwithvalidname_other_suffix() { +#[tokio::test] +async fn fragmentwithvalidname_other_suffix() { let input = include_str!("validate_module_names/fixtures/FragmentWithValidName.other-suffix.graphql"); let expected = include_str!("validate_module_names/fixtures/FragmentWithValidName.other-suffix.expected"); - test_fixture(transform_fixture, "FragmentWithValidName.other-suffix.graphql", "validate_module_names/fixtures/FragmentWithValidName.other-suffix.expected", input, expected); + test_fixture(transform_fixture, file!(), "FragmentWithValidName.other-suffix.graphql", "validate_module_names/fixtures/FragmentWithValidName.other-suffix.expected", input, expected).await; } -#[test] -fn fragmentwithvalidname_other_suffix_ios_and_another() { +#[tokio::test] +async fn fragmentwithvalidname_other_suffix_ios_and_another() { let input = include_str!("validate_module_names/fixtures/FragmentWithValidName.other-suffix.ios.and-another.graphql"); let expected = include_str!("validate_module_names/fixtures/FragmentWithValidName.other-suffix.ios.and-another.expected"); - test_fixture(transform_fixture, "FragmentWithValidName.other-suffix.ios.and-another.graphql", "validate_module_names/fixtures/FragmentWithValidName.other-suffix.ios.and-another.expected", input, expected); + test_fixture(transform_fixture, file!(), "FragmentWithValidName.other-suffix.ios.and-another.graphql", "validate_module_names/fixtures/FragmentWithValidName.other-suffix.ios.and-another.expected", input, expected).await; } -#[test] -fn mutationwithinvalidname_invalid() { +#[tokio::test] +async fn mutationwithinvalidname_invalid() { let input = include_str!("validate_module_names/fixtures/MutationWithInvalidName.invalid.graphql"); let expected = include_str!("validate_module_names/fixtures/MutationWithInvalidName.invalid.expected"); - test_fixture(transform_fixture, "MutationWithInvalidName.invalid.graphql", "validate_module_names/fixtures/MutationWithInvalidName.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "MutationWithInvalidName.invalid.graphql", "validate_module_names/fixtures/MutationWithInvalidName.invalid.expected", input, expected).await; } -#[test] -fn mutationwithinvalidsuffix_invalid() { +#[tokio::test] +async fn mutationwithinvalidsuffix_invalid() { let input = include_str!("validate_module_names/fixtures/MutationWithInvalidSuffix.invalid.graphql"); let expected = include_str!("validate_module_names/fixtures/MutationWithInvalidSuffix.invalid.expected"); - test_fixture(transform_fixture, "MutationWithInvalidSuffix.invalid.graphql", "validate_module_names/fixtures/MutationWithInvalidSuffix.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "MutationWithInvalidSuffix.invalid.graphql", "validate_module_names/fixtures/MutationWithInvalidSuffix.invalid.expected", input, expected).await; } -#[test] -fn mutationwithvalidname() { +#[tokio::test] +async fn mutationwithvalidname() { let input = include_str!("validate_module_names/fixtures/MutationWithValidName.graphql"); let expected = include_str!("validate_module_names/fixtures/MutationWithValidName.expected"); - test_fixture(transform_fixture, "MutationWithValidName.graphql", "validate_module_names/fixtures/MutationWithValidName.expected", input, expected); + test_fixture(transform_fixture, file!(), "MutationWithValidName.graphql", "validate_module_names/fixtures/MutationWithValidName.expected", input, expected).await; } -#[test] -fn querywithinvalidname_invalid() { +#[tokio::test] +async fn querywithinvalidname_invalid() { let input = include_str!("validate_module_names/fixtures/QueryWithInvalidName.invalid.graphql"); let expected = include_str!("validate_module_names/fixtures/QueryWithInvalidName.invalid.expected"); - test_fixture(transform_fixture, "QueryWithInvalidName.invalid.graphql", "validate_module_names/fixtures/QueryWithInvalidName.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "QueryWithInvalidName.invalid.graphql", "validate_module_names/fixtures/QueryWithInvalidName.invalid.expected", input, expected).await; } -#[test] -fn querywithinvalidsuffix_invalid() { +#[tokio::test] +async fn querywithinvalidsuffix_invalid() { let input = include_str!("validate_module_names/fixtures/QueryWithInvalidSuffix.invalid.graphql"); let expected = include_str!("validate_module_names/fixtures/QueryWithInvalidSuffix.invalid.expected"); - test_fixture(transform_fixture, "QueryWithInvalidSuffix.invalid.graphql", "validate_module_names/fixtures/QueryWithInvalidSuffix.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "QueryWithInvalidSuffix.invalid.graphql", "validate_module_names/fixtures/QueryWithInvalidSuffix.invalid.expected", input, expected).await; } -#[test] -fn querywithvalidname() { +#[tokio::test] +async fn querywithvalidname() { let input = include_str!("validate_module_names/fixtures/QueryWithValidName.graphql"); let expected = include_str!("validate_module_names/fixtures/QueryWithValidName.expected"); - test_fixture(transform_fixture, "QueryWithValidName.graphql", "validate_module_names/fixtures/QueryWithValidName.expected", input, expected); + test_fixture(transform_fixture, file!(), "QueryWithValidName.graphql", "validate_module_names/fixtures/QueryWithValidName.expected", input, expected).await; } -#[test] -fn subscriptionwithinvalidname_invalid() { +#[tokio::test] +async fn subscriptionwithinvalidname_invalid() { let input = include_str!("validate_module_names/fixtures/SubscriptionWithInvalidName.invalid.graphql"); let expected = include_str!("validate_module_names/fixtures/SubscriptionWithInvalidName.invalid.expected"); - test_fixture(transform_fixture, "SubscriptionWithInvalidName.invalid.graphql", "validate_module_names/fixtures/SubscriptionWithInvalidName.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "SubscriptionWithInvalidName.invalid.graphql", "validate_module_names/fixtures/SubscriptionWithInvalidName.invalid.expected", input, expected).await; } -#[test] -fn subscriptionwithinvalidsuffix_invalid() { +#[tokio::test] +async fn subscriptionwithinvalidsuffix_invalid() { let input = include_str!("validate_module_names/fixtures/SubscriptionWithInvalidSuffix.invalid.graphql"); let expected = include_str!("validate_module_names/fixtures/SubscriptionWithInvalidSuffix.invalid.expected"); - test_fixture(transform_fixture, "SubscriptionWithInvalidSuffix.invalid.graphql", "validate_module_names/fixtures/SubscriptionWithInvalidSuffix.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "SubscriptionWithInvalidSuffix.invalid.graphql", "validate_module_names/fixtures/SubscriptionWithInvalidSuffix.invalid.expected", input, expected).await; } -#[test] -fn subscriptionwithvalidname() { +#[tokio::test] +async fn subscriptionwithvalidname() { let input = include_str!("validate_module_names/fixtures/SubscriptionWithValidName.graphql"); let expected = include_str!("validate_module_names/fixtures/SubscriptionWithValidName.expected"); - test_fixture(transform_fixture, "SubscriptionWithValidName.graphql", "validate_module_names/fixtures/SubscriptionWithValidName.expected", input, expected); + test_fixture(transform_fixture, file!(), "SubscriptionWithValidName.graphql", "validate_module_names/fixtures/SubscriptionWithValidName.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/validate_no_double_underscore_alias.rs b/compiler/crates/relay-transforms/tests/validate_no_double_underscore_alias.rs new file mode 100644 index 0000000000000..a73b65df5d867 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_no_double_underscore_alias.rs @@ -0,0 +1,32 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::TEST_SCHEMA; +use relay_transforms::validate_no_double_underscore_alias; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir_result = build(&TEST_SCHEMA, &ast.definitions); + let ir = ir_result + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); + validate_no_double_underscore_alias(&program) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) +} diff --git a/compiler/crates/relay-transforms/tests/validate_no_double_underscore_alias/mod.rs b/compiler/crates/relay-transforms/tests/validate_no_double_underscore_alias/mod.rs deleted file mode 100644 index c5015744006fa..0000000000000 --- a/compiler/crates/relay-transforms/tests/validate_no_double_underscore_alias/mod.rs +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::TEST_SCHEMA; -use relay_transforms::validate_no_double_underscore_alias; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir_result = build(&TEST_SCHEMA, &ast.definitions); - let ir = ir_result - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); - validate_no_double_underscore_alias(&program) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) -} diff --git a/compiler/crates/relay-transforms/tests/validate_no_double_underscore_alias_test.rs b/compiler/crates/relay-transforms/tests/validate_no_double_underscore_alias_test.rs index 266a1d24d50d3..e4aacf445a723 100644 --- a/compiler/crates/relay-transforms/tests/validate_no_double_underscore_alias_test.rs +++ b/compiler/crates/relay-transforms/tests/validate_no_double_underscore_alias_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<<6b323ea6b50baef0fe3152e849c4393a>> */ mod validate_no_double_underscore_alias; @@ -12,16 +12,16 @@ mod validate_no_double_underscore_alias; use validate_no_double_underscore_alias::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn double_underscore_invalid() { +#[tokio::test] +async fn double_underscore_invalid() { let input = include_str!("validate_no_double_underscore_alias/fixtures/double_underscore.invalid.graphql"); let expected = include_str!("validate_no_double_underscore_alias/fixtures/double_underscore.invalid.expected"); - test_fixture(transform_fixture, "double_underscore.invalid.graphql", "validate_no_double_underscore_alias/fixtures/double_underscore.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "double_underscore.invalid.graphql", "validate_no_double_underscore_alias/fixtures/double_underscore.invalid.expected", input, expected).await; } -#[test] -fn non_alias() { +#[tokio::test] +async fn non_alias() { let input = include_str!("validate_no_double_underscore_alias/fixtures/non_alias.graphql"); let expected = include_str!("validate_no_double_underscore_alias/fixtures/non_alias.expected"); - test_fixture(transform_fixture, "non_alias.graphql", "validate_no_double_underscore_alias/fixtures/non_alias.expected", input, expected); + test_fixture(transform_fixture, file!(), "non_alias.graphql", "validate_no_double_underscore_alias/fixtures/non_alias.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/validate_no_unselectable_selections.rs b/compiler/crates/relay-transforms/tests/validate_no_unselectable_selections.rs new file mode 100644 index 0000000000000..06ccecb0090ee --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_no_unselectable_selections.rs @@ -0,0 +1,17 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use graphql_test_helpers::apply_transform_for_test; +use relay_transforms::validate_no_unselectable_selections; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + apply_transform_for_test(fixture, |p| { + validate_no_unselectable_selections(p, &Default::default())?; + Ok(p.clone()) + }) +} diff --git a/compiler/crates/relay-transforms/tests/validate_no_unselectable_selections/mod.rs b/compiler/crates/relay-transforms/tests/validate_no_unselectable_selections/mod.rs deleted file mode 100644 index 5ce929d265670..0000000000000 --- a/compiler/crates/relay-transforms/tests/validate_no_unselectable_selections/mod.rs +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use graphql_test_helpers::apply_transform_for_test; -use relay_transforms::validate_no_unselectable_selections; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - apply_transform_for_test(fixture, |p| { - validate_no_unselectable_selections(p, &Default::default())?; - Ok(p.clone()) - }) -} diff --git a/compiler/crates/relay-transforms/tests/validate_no_unselectable_selections_test.rs b/compiler/crates/relay-transforms/tests/validate_no_unselectable_selections_test.rs index a9ff0c981739f..c32ee75b655e4 100644 --- a/compiler/crates/relay-transforms/tests/validate_no_unselectable_selections_test.rs +++ b/compiler/crates/relay-transforms/tests/validate_no_unselectable_selections_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<82ae09fce8803e956efea1b568733995>> + * @generated SignedSource<<816428d5c7728de4b7b23dddd578359b>> */ mod validate_no_unselectable_selections; @@ -12,30 +12,30 @@ mod validate_no_unselectable_selections; use validate_no_unselectable_selections::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn multiple_unselectables_invalid() { +#[tokio::test] +async fn multiple_unselectables_invalid() { let input = include_str!("validate_no_unselectable_selections/fixtures/multiple-unselectables.invalid.graphql"); let expected = include_str!("validate_no_unselectable_selections/fixtures/multiple-unselectables.invalid.expected"); - test_fixture(transform_fixture, "multiple-unselectables.invalid.graphql", "validate_no_unselectable_selections/fixtures/multiple-unselectables.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "multiple-unselectables.invalid.graphql", "validate_no_unselectable_selections/fixtures/multiple-unselectables.invalid.expected", input, expected).await; } -#[test] -fn unselectable_linked_field_invalid() { +#[tokio::test] +async fn unselectable_linked_field_invalid() { let input = include_str!("validate_no_unselectable_selections/fixtures/unselectable-linked-field.invalid.graphql"); let expected = include_str!("validate_no_unselectable_selections/fixtures/unselectable-linked-field.invalid.expected"); - test_fixture(transform_fixture, "unselectable-linked-field.invalid.graphql", "validate_no_unselectable_selections/fixtures/unselectable-linked-field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unselectable-linked-field.invalid.graphql", "validate_no_unselectable_selections/fixtures/unselectable-linked-field.invalid.expected", input, expected).await; } -#[test] -fn unselectable_scalar_invalid() { +#[tokio::test] +async fn unselectable_scalar_invalid() { let input = include_str!("validate_no_unselectable_selections/fixtures/unselectable-scalar.invalid.graphql"); let expected = include_str!("validate_no_unselectable_selections/fixtures/unselectable-scalar.invalid.expected"); - test_fixture(transform_fixture, "unselectable-scalar.invalid.graphql", "validate_no_unselectable_selections/fixtures/unselectable-scalar.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unselectable-scalar.invalid.graphql", "validate_no_unselectable_selections/fixtures/unselectable-scalar.invalid.expected", input, expected).await; } -#[test] -fn valid_selection() { +#[tokio::test] +async fn valid_selection() { let input = include_str!("validate_no_unselectable_selections/fixtures/valid-selection.graphql"); let expected = include_str!("validate_no_unselectable_selections/fixtures/valid-selection.expected"); - test_fixture(transform_fixture, "valid-selection.graphql", "validate_no_unselectable_selections/fixtures/valid-selection.expected", input, expected); + test_fixture(transform_fixture, file!(), "valid-selection.graphql", "validate_no_unselectable_selections/fixtures/valid-selection.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/validate_relay_directives.rs b/compiler/crates/relay-transforms/tests/validate_relay_directives.rs new file mode 100644 index 0000000000000..f6408ba43b3ab --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_relay_directives.rs @@ -0,0 +1,30 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::get_test_schema; +use relay_transforms::validate_relay_directives; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let schema = get_test_schema(); + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir = build(&schema, &ast.definitions) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let program = Program::from_definitions(schema, ir); + validate_relay_directives(&program) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) +} diff --git a/compiler/crates/relay-transforms/tests/validate_relay_directives/mod.rs b/compiler/crates/relay-transforms/tests/validate_relay_directives/mod.rs deleted file mode 100644 index cbdb895928113..0000000000000 --- a/compiler/crates/relay-transforms/tests/validate_relay_directives/mod.rs +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::get_test_schema; -use relay_transforms::validate_relay_directives; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let schema = get_test_schema(); - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir = build(&schema, &ast.definitions) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let program = Program::from_definitions(schema, ir); - validate_relay_directives(&program) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) -} diff --git a/compiler/crates/relay-transforms/tests/validate_relay_directives_test.rs b/compiler/crates/relay-transforms/tests/validate_relay_directives_test.rs index 7fa60ffc8200b..3a5f1c8f5049a 100644 --- a/compiler/crates/relay-transforms/tests/validate_relay_directives_test.rs +++ b/compiler/crates/relay-transforms/tests/validate_relay_directives_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<> */ mod validate_relay_directives; @@ -12,72 +12,72 @@ mod validate_relay_directives; use validate_relay_directives::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn mask_incompatible_type_invalid() { +#[tokio::test] +async fn mask_incompatible_type_invalid() { let input = include_str!("validate_relay_directives/fixtures/mask-incompatible-type-invalid.graphql"); let expected = include_str!("validate_relay_directives/fixtures/mask-incompatible-type-invalid.expected"); - test_fixture(transform_fixture, "mask-incompatible-type-invalid.graphql", "validate_relay_directives/fixtures/mask-incompatible-type-invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "mask-incompatible-type-invalid.graphql", "validate_relay_directives/fixtures/mask-incompatible-type-invalid.expected", input, expected).await; } -#[test] -fn mask_incompatible_type_invalid2() { +#[tokio::test] +async fn mask_incompatible_type_invalid2() { let input = include_str!("validate_relay_directives/fixtures/mask-incompatible-type-invalid2.graphql"); let expected = include_str!("validate_relay_directives/fixtures/mask-incompatible-type-invalid2.expected"); - test_fixture(transform_fixture, "mask-incompatible-type-invalid2.graphql", "validate_relay_directives/fixtures/mask-incompatible-type-invalid2.expected", input, expected); + test_fixture(transform_fixture, file!(), "mask-incompatible-type-invalid2.graphql", "validate_relay_directives/fixtures/mask-incompatible-type-invalid2.expected", input, expected).await; } -#[test] -fn mask_incompatible_type_query_invalid() { +#[tokio::test] +async fn mask_incompatible_type_query_invalid() { let input = include_str!("validate_relay_directives/fixtures/mask-incompatible-type-query.invalid.graphql"); let expected = include_str!("validate_relay_directives/fixtures/mask-incompatible-type-query.invalid.expected"); - test_fixture(transform_fixture, "mask-incompatible-type-query.invalid.graphql", "validate_relay_directives/fixtures/mask-incompatible-type-query.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "mask-incompatible-type-query.invalid.graphql", "validate_relay_directives/fixtures/mask-incompatible-type-query.invalid.expected", input, expected).await; } -#[test] -fn mask_mixed_local_root_invalid() { +#[tokio::test] +async fn mask_mixed_local_root_invalid() { let input = include_str!("validate_relay_directives/fixtures/mask-mixed-local-root-invalid.graphql"); let expected = include_str!("validate_relay_directives/fixtures/mask-mixed-local-root-invalid.expected"); - test_fixture(transform_fixture, "mask-mixed-local-root-invalid.graphql", "validate_relay_directives/fixtures/mask-mixed-local-root-invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "mask-mixed-local-root-invalid.graphql", "validate_relay_directives/fixtures/mask-mixed-local-root-invalid.expected", input, expected).await; } -#[test] -fn mask_mixed_null() { +#[tokio::test] +async fn mask_mixed_null() { let input = include_str!("validate_relay_directives/fixtures/mask-mixed-null.graphql"); let expected = include_str!("validate_relay_directives/fixtures/mask-mixed-null.expected"); - test_fixture(transform_fixture, "mask-mixed-null.graphql", "validate_relay_directives/fixtures/mask-mixed-null.expected", input, expected); + test_fixture(transform_fixture, file!(), "mask-mixed-null.graphql", "validate_relay_directives/fixtures/mask-mixed-null.expected", input, expected).await; } -#[test] -fn plural_fragment() { +#[tokio::test] +async fn plural_fragment() { let input = include_str!("validate_relay_directives/fixtures/plural-fragment.graphql"); let expected = include_str!("validate_relay_directives/fixtures/plural-fragment.expected"); - test_fixture(transform_fixture, "plural-fragment.graphql", "validate_relay_directives/fixtures/plural-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "plural-fragment.graphql", "validate_relay_directives/fixtures/plural-fragment.expected", input, expected).await; } -#[test] -fn plural_fragment_variables_invalid() { +#[tokio::test] +async fn plural_fragment_variables_invalid() { let input = include_str!("validate_relay_directives/fixtures/plural-fragment-variables.invalid.graphql"); let expected = include_str!("validate_relay_directives/fixtures/plural-fragment-variables.invalid.expected"); - test_fixture(transform_fixture, "plural-fragment-variables.invalid.graphql", "validate_relay_directives/fixtures/plural-fragment-variables.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "plural-fragment-variables.invalid.graphql", "validate_relay_directives/fixtures/plural-fragment-variables.invalid.expected", input, expected).await; } -#[test] -fn unmasked_spread() { +#[tokio::test] +async fn unmasked_spread() { let input = include_str!("validate_relay_directives/fixtures/unmasked-spread.graphql"); let expected = include_str!("validate_relay_directives/fixtures/unmasked-spread.expected"); - test_fixture(transform_fixture, "unmasked-spread.graphql", "validate_relay_directives/fixtures/unmasked-spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "unmasked-spread.graphql", "validate_relay_directives/fixtures/unmasked-spread.expected", input, expected).await; } -#[test] -fn unmasked_spread_with_argument_definition_invalid() { +#[tokio::test] +async fn unmasked_spread_with_argument_definition_invalid() { let input = include_str!("validate_relay_directives/fixtures/unmasked-spread-with-argument-definition.invalid.graphql"); let expected = include_str!("validate_relay_directives/fixtures/unmasked-spread-with-argument-definition.invalid.expected"); - test_fixture(transform_fixture, "unmasked-spread-with-argument-definition.invalid.graphql", "validate_relay_directives/fixtures/unmasked-spread-with-argument-definition.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unmasked-spread-with-argument-definition.invalid.graphql", "validate_relay_directives/fixtures/unmasked-spread-with-argument-definition.invalid.expected", input, expected).await; } -#[test] -fn unmasked_spread_with_directive_invalid() { +#[tokio::test] +async fn unmasked_spread_with_directive_invalid() { let input = include_str!("validate_relay_directives/fixtures/unmasked-spread-with-directive.invalid.graphql"); let expected = include_str!("validate_relay_directives/fixtures/unmasked-spread-with-directive.invalid.expected"); - test_fixture(transform_fixture, "unmasked-spread-with-directive.invalid.graphql", "validate_relay_directives/fixtures/unmasked-spread-with-directive.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "unmasked-spread-with-directive.invalid.graphql", "validate_relay_directives/fixtures/unmasked-spread-with-directive.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/validate_required_arguments.rs b/compiler/crates/relay-transforms/tests/validate_required_arguments.rs new file mode 100644 index 0000000000000..d88b2770b5196 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_required_arguments.rs @@ -0,0 +1,31 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::TEST_SCHEMA; +use relay_transforms::validate_required_arguments; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir = build(&TEST_SCHEMA, &ast.definitions) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); + validate_required_arguments(&program) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) +} diff --git a/compiler/crates/relay-transforms/tests/validate_required_arguments/mod.rs b/compiler/crates/relay-transforms/tests/validate_required_arguments/mod.rs deleted file mode 100644 index f0d088f4f33c1..0000000000000 --- a/compiler/crates/relay-transforms/tests/validate_required_arguments/mod.rs +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::TEST_SCHEMA; -use relay_transforms::validate_required_arguments; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir = build(&TEST_SCHEMA, &ast.definitions) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); - validate_required_arguments(&program) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) -} diff --git a/compiler/crates/relay-transforms/tests/validate_required_arguments_test.rs b/compiler/crates/relay-transforms/tests/validate_required_arguments_test.rs index 8c34e9bd39a3e..4b019f1448aa8 100644 --- a/compiler/crates/relay-transforms/tests/validate_required_arguments_test.rs +++ b/compiler/crates/relay-transforms/tests/validate_required_arguments_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<86341e77fc38860285a555f3430c105e>> + * @generated SignedSource<<1c25f56c7131b7ca80b5d3064376b38e>> */ mod validate_required_arguments; @@ -12,58 +12,58 @@ mod validate_required_arguments; use validate_required_arguments::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn argument_on_field() { +#[tokio::test] +async fn argument_on_field() { let input = include_str!("validate_required_arguments/fixtures/argument-on-field.graphql"); let expected = include_str!("validate_required_arguments/fixtures/argument-on-field.expected"); - test_fixture(transform_fixture, "argument-on-field.graphql", "validate_required_arguments/fixtures/argument-on-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument-on-field.graphql", "validate_required_arguments/fixtures/argument-on-field.expected", input, expected).await; } -#[test] -fn argument_on_linked_field() { +#[tokio::test] +async fn argument_on_linked_field() { let input = include_str!("validate_required_arguments/fixtures/argument-on-linked-field.graphql"); let expected = include_str!("validate_required_arguments/fixtures/argument-on-linked-field.expected"); - test_fixture(transform_fixture, "argument-on-linked-field.graphql", "validate_required_arguments/fixtures/argument-on-linked-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "argument-on-linked-field.graphql", "validate_required_arguments/fixtures/argument-on-linked-field.expected", input, expected).await; } -#[test] -fn default_argument_on_field() { +#[tokio::test] +async fn default_argument_on_field() { let input = include_str!("validate_required_arguments/fixtures/default-argument-on-field.graphql"); let expected = include_str!("validate_required_arguments/fixtures/default-argument-on-field.expected"); - test_fixture(transform_fixture, "default-argument-on-field.graphql", "validate_required_arguments/fixtures/default-argument-on-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "default-argument-on-field.graphql", "validate_required_arguments/fixtures/default-argument-on-field.expected", input, expected).await; } -#[test] -fn missing_argument_on_directive_invalid() { +#[tokio::test] +async fn missing_argument_on_directive_invalid() { let input = include_str!("validate_required_arguments/fixtures/missing-argument-on-directive.invalid.graphql"); let expected = include_str!("validate_required_arguments/fixtures/missing-argument-on-directive.invalid.expected"); - test_fixture(transform_fixture, "missing-argument-on-directive.invalid.graphql", "validate_required_arguments/fixtures/missing-argument-on-directive.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "missing-argument-on-directive.invalid.graphql", "validate_required_arguments/fixtures/missing-argument-on-directive.invalid.expected", input, expected).await; } -#[test] -fn missing_argument_on_field_invalid() { +#[tokio::test] +async fn missing_argument_on_field_invalid() { let input = include_str!("validate_required_arguments/fixtures/missing-argument-on-field.invalid.graphql"); let expected = include_str!("validate_required_arguments/fixtures/missing-argument-on-field.invalid.expected"); - test_fixture(transform_fixture, "missing-argument-on-field.invalid.graphql", "validate_required_arguments/fixtures/missing-argument-on-field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "missing-argument-on-field.invalid.graphql", "validate_required_arguments/fixtures/missing-argument-on-field.invalid.expected", input, expected).await; } -#[test] -fn missing_argument_on_linked_field_in_inline_fragment_invalid() { +#[tokio::test] +async fn missing_argument_on_linked_field_in_inline_fragment_invalid() { let input = include_str!("validate_required_arguments/fixtures/missing-argument-on-linked-field-in-inline-fragment.invalid.graphql"); let expected = include_str!("validate_required_arguments/fixtures/missing-argument-on-linked-field-in-inline-fragment.invalid.expected"); - test_fixture(transform_fixture, "missing-argument-on-linked-field-in-inline-fragment.invalid.graphql", "validate_required_arguments/fixtures/missing-argument-on-linked-field-in-inline-fragment.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "missing-argument-on-linked-field-in-inline-fragment.invalid.graphql", "validate_required_arguments/fixtures/missing-argument-on-linked-field-in-inline-fragment.invalid.expected", input, expected).await; } -#[test] -fn missing_argument_on_linked_field_invalid() { +#[tokio::test] +async fn missing_argument_on_linked_field_invalid() { let input = include_str!("validate_required_arguments/fixtures/missing-argument-on-linked-field.invalid.graphql"); let expected = include_str!("validate_required_arguments/fixtures/missing-argument-on-linked-field.invalid.expected"); - test_fixture(transform_fixture, "missing-argument-on-linked-field.invalid.graphql", "validate_required_arguments/fixtures/missing-argument-on-linked-field.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "missing-argument-on-linked-field.invalid.graphql", "validate_required_arguments/fixtures/missing-argument-on-linked-field.invalid.expected", input, expected).await; } -#[test] -fn missing_argument_on_linked_field_on_abstract_type_invalid() { +#[tokio::test] +async fn missing_argument_on_linked_field_on_abstract_type_invalid() { let input = include_str!("validate_required_arguments/fixtures/missing-argument-on-linked-field-on-abstract-type.invalid.graphql"); let expected = include_str!("validate_required_arguments/fixtures/missing-argument-on-linked-field-on-abstract-type.invalid.expected"); - test_fixture(transform_fixture, "missing-argument-on-linked-field-on-abstract-type.invalid.graphql", "validate_required_arguments/fixtures/missing-argument-on-linked-field-on-abstract-type.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "missing-argument-on-linked-field-on-abstract-type.invalid.graphql", "validate_required_arguments/fixtures/missing-argument-on-linked-field-on-abstract-type.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/validate_server_only_directives.rs b/compiler/crates/relay-transforms/tests/validate_server_only_directives.rs new file mode 100644 index 0000000000000..589ada2fccee3 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_server_only_directives.rs @@ -0,0 +1,36 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::validate_server_only_directives; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + + if let [base, extensions] = parts.as_slice() { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(base, source_location).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + validate_server_only_directives(&program) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-child-of-client.invalid.expected b/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-child-of-client.invalid.expected index 71e9b5d1152c7..f581b57be4a83 100644 --- a/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-child-of-client.invalid.expected +++ b/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-child-of-client.invalid.expected @@ -11,7 +11,7 @@ fragment FeedbackFragment on Feedback { id foo { bar { - users @stream(initial_count: 1, label: "StreamedActorsLabel") { + users @stream(initialCount: 1, label: "StreamedActorsLabel") { id name } @@ -36,7 +36,7 @@ type Bar { fragment-with-stream-child-of-client.invalid.graphql:13:13 12 │ bar { - 13 │ users @stream(initial_count: 1, label: "StreamedActorsLabel") { + 13 │ users @stream(initialCount: 1, label: "StreamedActorsLabel") { │ ^^^^^^^ 14 │ id diff --git a/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-child-of-client.invalid.graphql b/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-child-of-client.invalid.graphql index 49880cbacd8cc..ce5213ab3d448 100644 --- a/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-child-of-client.invalid.graphql +++ b/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-child-of-client.invalid.graphql @@ -10,7 +10,7 @@ fragment FeedbackFragment on Feedback { id foo { bar { - users @stream(initial_count: 1, label: "StreamedActorsLabel") { + users @stream(initialCount: 1, label: "StreamedActorsLabel") { id name } diff --git a/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-on-client.invalid.expected b/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-on-client.invalid.expected index 0a8232ffb6cbd..f15daf0a8c3e7 100644 --- a/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-on-client.invalid.expected +++ b/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-on-client.invalid.expected @@ -9,7 +9,7 @@ query QueryWithFragmentWithStream($id: ID!) { fragment FeedbackFragment on Feedback { id - foos @stream(initial_count: 1, label: "StreamedActorsLabel") { + foos @stream(initialCount: 1, label: "StreamedActorsLabel") { bar } } @@ -28,7 +28,7 @@ type Foo { fragment-with-stream-on-client.invalid.graphql:11:8 10 │ id - 11 │ foos @stream(initial_count: 1, label: "StreamedActorsLabel") { + 11 │ foos @stream(initialCount: 1, label: "StreamedActorsLabel") { │ ^^^^^^^ 12 │ bar @@ -36,6 +36,6 @@ type Foo { fragment-with-stream-on-client.invalid.graphql:11:3 10 │ id - 11 │ foos @stream(initial_count: 1, label: "StreamedActorsLabel") { + 11 │ foos @stream(initialCount: 1, label: "StreamedActorsLabel") { │ ^^^^ 12 │ bar diff --git a/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-on-client.invalid.graphql b/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-on-client.invalid.graphql index 045192ee83bfa..5a45d44cf6645 100644 --- a/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-on-client.invalid.graphql +++ b/compiler/crates/relay-transforms/tests/validate_server_only_directives/fixtures/fragment-with-stream-on-client.invalid.graphql @@ -8,7 +8,7 @@ query QueryWithFragmentWithStream($id: ID!) { fragment FeedbackFragment on Feedback { id - foos @stream(initial_count: 1, label: "StreamedActorsLabel") { + foos @stream(initialCount: 1, label: "StreamedActorsLabel") { bar } } diff --git a/compiler/crates/relay-transforms/tests/validate_server_only_directives/mod.rs b/compiler/crates/relay-transforms/tests/validate_server_only_directives/mod.rs deleted file mode 100644 index 629a1092e5bd6..0000000000000 --- a/compiler/crates/relay-transforms/tests/validate_server_only_directives/mod.rs +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::validate_server_only_directives; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - - if let [base, extensions] = parts.as_slice() { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(base, source_location).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - validate_server_only_directives(&program) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} diff --git a/compiler/crates/relay-transforms/tests/validate_server_only_directives_test.rs b/compiler/crates/relay-transforms/tests/validate_server_only_directives_test.rs index 7a857f3d2b326..366089ce9b7e1 100644 --- a/compiler/crates/relay-transforms/tests/validate_server_only_directives_test.rs +++ b/compiler/crates/relay-transforms/tests/validate_server_only_directives_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<2e68a7fab154b5051d92bf6e0b537bb5>> + * @generated SignedSource<<197406268b8f70fead05918254355695>> */ mod validate_server_only_directives; @@ -12,72 +12,72 @@ mod validate_server_only_directives; use validate_server_only_directives::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn fragment_with_client_fileds_inside_valid_fragments() { +#[tokio::test] +async fn fragment_with_client_fileds_inside_valid_fragments() { let input = include_str!("validate_server_only_directives/fixtures/fragment-with-client-fileds-inside-valid-fragments.graphql"); let expected = include_str!("validate_server_only_directives/fixtures/fragment-with-client-fileds-inside-valid-fragments.expected"); - test_fixture(transform_fixture, "fragment-with-client-fileds-inside-valid-fragments.graphql", "validate_server_only_directives/fixtures/fragment-with-client-fileds-inside-valid-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-client-fileds-inside-valid-fragments.graphql", "validate_server_only_directives/fixtures/fragment-with-client-fileds-inside-valid-fragments.expected", input, expected).await; } -#[test] -fn fragment_with_defer_on_client_invalid() { +#[tokio::test] +async fn fragment_with_defer_on_client_invalid() { let input = include_str!("validate_server_only_directives/fixtures/fragment-with-defer-on-client.invalid.graphql"); let expected = include_str!("validate_server_only_directives/fixtures/fragment-with-defer-on-client.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-defer-on-client.invalid.graphql", "validate_server_only_directives/fixtures/fragment-with-defer-on-client.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-defer-on-client.invalid.graphql", "validate_server_only_directives/fixtures/fragment-with-defer-on-client.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_defer_on_fragment_with_only_client_fields_invalid() { +#[tokio::test] +async fn fragment_with_defer_on_fragment_with_only_client_fields_invalid() { let input = include_str!("validate_server_only_directives/fixtures/fragment-with-defer-on-fragment-with-only-client-fields.invalid.graphql"); let expected = include_str!("validate_server_only_directives/fixtures/fragment-with-defer-on-fragment-with-only-client-fields.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-defer-on-fragment-with-only-client-fields.invalid.graphql", "validate_server_only_directives/fixtures/fragment-with-defer-on-fragment-with-only-client-fields.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-defer-on-fragment-with-only-client-fields.invalid.graphql", "validate_server_only_directives/fixtures/fragment-with-defer-on-fragment-with-only-client-fields.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_stream_child_of_client_invalid() { +#[tokio::test] +async fn fragment_with_stream_child_of_client_invalid() { let input = include_str!("validate_server_only_directives/fixtures/fragment-with-stream-child-of-client.invalid.graphql"); let expected = include_str!("validate_server_only_directives/fixtures/fragment-with-stream-child-of-client.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-stream-child-of-client.invalid.graphql", "validate_server_only_directives/fixtures/fragment-with-stream-child-of-client.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-stream-child-of-client.invalid.graphql", "validate_server_only_directives/fixtures/fragment-with-stream-child-of-client.invalid.expected", input, expected).await; } -#[test] -fn fragment_with_stream_on_client_invalid() { +#[tokio::test] +async fn fragment_with_stream_on_client_invalid() { let input = include_str!("validate_server_only_directives/fixtures/fragment-with-stream-on-client.invalid.graphql"); let expected = include_str!("validate_server_only_directives/fixtures/fragment-with-stream-on-client.invalid.expected"); - test_fixture(transform_fixture, "fragment-with-stream-on-client.invalid.graphql", "validate_server_only_directives/fixtures/fragment-with-stream-on-client.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-stream-on-client.invalid.graphql", "validate_server_only_directives/fixtures/fragment-with-stream-on-client.invalid.expected", input, expected).await; } -#[test] -fn match_() { +#[tokio::test] +async fn match_() { let input = include_str!("validate_server_only_directives/fixtures/match_.graphql"); let expected = include_str!("validate_server_only_directives/fixtures/match_.expected"); - test_fixture(transform_fixture, "match_.graphql", "validate_server_only_directives/fixtures/match_.expected", input, expected); + test_fixture(transform_fixture, file!(), "match_.graphql", "validate_server_only_directives/fixtures/match_.expected", input, expected).await; } -#[test] -fn module_on_child_of_client() { +#[tokio::test] +async fn module_on_child_of_client() { let input = include_str!("validate_server_only_directives/fixtures/module-on-child-of-client.graphql"); let expected = include_str!("validate_server_only_directives/fixtures/module-on-child-of-client.expected"); - test_fixture(transform_fixture, "module-on-child-of-client.graphql", "validate_server_only_directives/fixtures/module-on-child-of-client.expected", input, expected); + test_fixture(transform_fixture, file!(), "module-on-child-of-client.graphql", "validate_server_only_directives/fixtures/module-on-child-of-client.expected", input, expected).await; } -#[test] -fn module_on_child_of_client_in_fragment() { +#[tokio::test] +async fn module_on_child_of_client_in_fragment() { let input = include_str!("validate_server_only_directives/fixtures/module-on-child-of-client-in-fragment.graphql"); let expected = include_str!("validate_server_only_directives/fixtures/module-on-child-of-client-in-fragment.expected"); - test_fixture(transform_fixture, "module-on-child-of-client-in-fragment.graphql", "validate_server_only_directives/fixtures/module-on-child-of-client-in-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "module-on-child-of-client-in-fragment.graphql", "validate_server_only_directives/fixtures/module-on-child-of-client-in-fragment.expected", input, expected).await; } -#[test] -fn nested_fragment_with_defer() { +#[tokio::test] +async fn nested_fragment_with_defer() { let input = include_str!("validate_server_only_directives/fixtures/nested-fragment-with-defer.graphql"); let expected = include_str!("validate_server_only_directives/fixtures/nested-fragment-with-defer.expected"); - test_fixture(transform_fixture, "nested-fragment-with-defer.graphql", "validate_server_only_directives/fixtures/nested-fragment-with-defer.expected", input, expected); + test_fixture(transform_fixture, file!(), "nested-fragment-with-defer.graphql", "validate_server_only_directives/fixtures/nested-fragment-with-defer.expected", input, expected).await; } -#[test] -fn stream_connection_on_client_invalid() { +#[tokio::test] +async fn stream_connection_on_client_invalid() { let input = include_str!("validate_server_only_directives/fixtures/stream-connection-on-client.invalid.graphql"); let expected = include_str!("validate_server_only_directives/fixtures/stream-connection-on-client.invalid.expected"); - test_fixture(transform_fixture, "stream-connection-on-client.invalid.graphql", "validate_server_only_directives/fixtures/stream-connection-on-client.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "stream-connection-on-client.invalid.graphql", "validate_server_only_directives/fixtures/stream-connection-on-client.invalid.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/validate_static_args.rs b/compiler/crates/relay-transforms/tests/validate_static_args.rs new file mode 100644 index 0000000000000..a3978e842419e --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_static_args.rs @@ -0,0 +1,36 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::validate_static_args; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split("%extensions%").collect(); + + if let [base, extensions] = parts.as_slice() { + let source_location = SourceLocationKey::standalone(fixture.file_name); + let ast = parse_executable(base, source_location).unwrap(); + let schema = get_test_schema_with_extensions(extensions); + + let ir = build(&schema, &ast.definitions).unwrap(); + let program = Program::from_definitions(Arc::clone(&schema), ir); + validate_static_args(&program) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) + } else { + panic!("Expected exactly one %extensions% section marker.") + } +} diff --git a/compiler/crates/relay-transforms/tests/validate_static_args/mod.rs b/compiler/crates/relay-transforms/tests/validate_static_args/mod.rs deleted file mode 100644 index 8117834eab6bb..0000000000000 --- a/compiler/crates/relay-transforms/tests/validate_static_args/mod.rs +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::validate_static_args; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split("%extensions%").collect(); - - if let [base, extensions] = parts.as_slice() { - let source_location = SourceLocationKey::standalone(fixture.file_name); - let ast = parse_executable(base, source_location).unwrap(); - let schema = get_test_schema_with_extensions(extensions); - - let ir = build(&schema, &ast.definitions).unwrap(); - let program = Program::from_definitions(Arc::clone(&schema), ir); - validate_static_args(&program) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) - } else { - panic!("Expected exactly one %extensions% section marker.") - } -} diff --git a/compiler/crates/relay-transforms/tests/validate_static_args_test.rs b/compiler/crates/relay-transforms/tests/validate_static_args_test.rs index d29aa21e5ef60..472e4efb9b801 100644 --- a/compiler/crates/relay-transforms/tests/validate_static_args_test.rs +++ b/compiler/crates/relay-transforms/tests/validate_static_args_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<7c97b400056e87e80acdee4470b99a05>> + * @generated SignedSource<> */ mod validate_static_args; @@ -12,30 +12,30 @@ mod validate_static_args; use validate_static_args::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn match_with_dynamic_arg_invalid() { +#[tokio::test] +async fn match_with_dynamic_arg_invalid() { let input = include_str!("validate_static_args/fixtures/match-with-dynamic-arg.invalid.graphql"); let expected = include_str!("validate_static_args/fixtures/match-with-dynamic-arg.invalid.expected"); - test_fixture(transform_fixture, "match-with-dynamic-arg.invalid.graphql", "validate_static_args/fixtures/match-with-dynamic-arg.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-with-dynamic-arg.invalid.graphql", "validate_static_args/fixtures/match-with-dynamic-arg.invalid.expected", input, expected).await; } -#[test] -fn nonstatic_values_on_static_arg_invalid() { +#[tokio::test] +async fn nonstatic_values_on_static_arg_invalid() { let input = include_str!("validate_static_args/fixtures/nonstatic-values-on-static-arg.invalid.graphql"); let expected = include_str!("validate_static_args/fixtures/nonstatic-values-on-static-arg.invalid.expected"); - test_fixture(transform_fixture, "nonstatic-values-on-static-arg.invalid.graphql", "validate_static_args/fixtures/nonstatic-values-on-static-arg.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "nonstatic-values-on-static-arg.invalid.graphql", "validate_static_args/fixtures/nonstatic-values-on-static-arg.invalid.expected", input, expected).await; } -#[test] -fn required_with_dynamic_arg_invalid() { +#[tokio::test] +async fn required_with_dynamic_arg_invalid() { let input = include_str!("validate_static_args/fixtures/required-with-dynamic-arg.invalid.graphql"); let expected = include_str!("validate_static_args/fixtures/required-with-dynamic-arg.invalid.expected"); - test_fixture(transform_fixture, "required-with-dynamic-arg.invalid.graphql", "validate_static_args/fixtures/required-with-dynamic-arg.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-with-dynamic-arg.invalid.graphql", "validate_static_args/fixtures/required-with-dynamic-arg.invalid.expected", input, expected).await; } -#[test] -fn static_only_for_constants() { +#[tokio::test] +async fn static_only_for_constants() { let input = include_str!("validate_static_args/fixtures/static-only-for-constants.graphql"); let expected = include_str!("validate_static_args/fixtures/static-only-for-constants.expected"); - test_fixture(transform_fixture, "static-only-for-constants.graphql", "validate_static_args/fixtures/static-only-for-constants.expected", input, expected); + test_fixture(transform_fixture, file!(), "static-only-for-constants.graphql", "validate_static_args/fixtures/static-only-for-constants.expected", input, expected).await; } diff --git a/compiler/crates/relay-transforms/tests/validate_unused_variables.rs b/compiler/crates/relay-transforms/tests/validate_unused_variables.rs new file mode 100644 index 0000000000000..e7e7716a9a442 --- /dev/null +++ b/compiler/crates/relay-transforms/tests/validate_unused_variables.rs @@ -0,0 +1,31 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::SourceLocationKey; +use fixture_tests::Fixture; +use graphql_ir::build; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use relay_test_schema::TEST_SCHEMA; +use relay_transforms::validate_unused_variables; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let ast = parse_executable(fixture.content, source_location).unwrap(); + let ir = build(&TEST_SCHEMA, &ast.definitions) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); + validate_unused_variables(&program) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; + + Ok("OK".to_owned()) +} diff --git a/compiler/crates/relay-transforms/tests/validate_unused_variables/mod.rs b/compiler/crates/relay-transforms/tests/validate_unused_variables/mod.rs deleted file mode 100644 index 59101d7a6923b..0000000000000 --- a/compiler/crates/relay-transforms/tests/validate_unused_variables/mod.rs +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::SourceLocationKey; -use fixture_tests::Fixture; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use relay_test_schema::TEST_SCHEMA; -use relay_transforms::validate_unused_variables; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let ast = parse_executable(fixture.content, source_location).unwrap(); - let ir = build(&TEST_SCHEMA, &ast.definitions) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - let program = Program::from_definitions(Arc::clone(&TEST_SCHEMA), ir); - validate_unused_variables(&program) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?; - - Ok("OK".to_owned()) -} diff --git a/compiler/crates/relay-transforms/tests/validate_unused_variables_test.rs b/compiler/crates/relay-transforms/tests/validate_unused_variables_test.rs index 82ed942e4aee4..0910f691fa962 100644 --- a/compiler/crates/relay-transforms/tests/validate_unused_variables_test.rs +++ b/compiler/crates/relay-transforms/tests/validate_unused_variables_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<620255d5fdc8dee833f3b68dc61e8fa8>> + * @generated SignedSource<<2d9c78832993b2bbe4c580b22d0aeecf>> */ mod validate_unused_variables; @@ -12,58 +12,58 @@ mod validate_unused_variables; use validate_unused_variables::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn fragment_with_root_arguments() { +#[tokio::test] +async fn fragment_with_root_arguments() { let input = include_str!("validate_unused_variables/fixtures/fragment-with-root-arguments.graphql"); let expected = include_str!("validate_unused_variables/fixtures/fragment-with-root-arguments.expected"); - test_fixture(transform_fixture, "fragment-with-root-arguments.graphql", "validate_unused_variables/fixtures/fragment-with-root-arguments.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-with-root-arguments.graphql", "validate_unused_variables/fixtures/fragment-with-root-arguments.expected", input, expected).await; } -#[test] -fn practically_unused_but_actually_used_variables() { +#[tokio::test] +async fn practically_unused_but_actually_used_variables() { let input = include_str!("validate_unused_variables/fixtures/practically-unused-but-actually-used-variables.graphql"); let expected = include_str!("validate_unused_variables/fixtures/practically-unused-but-actually-used-variables.expected"); - test_fixture(transform_fixture, "practically-unused-but-actually-used-variables.graphql", "validate_unused_variables/fixtures/practically-unused-but-actually-used-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "practically-unused-but-actually-used-variables.graphql", "validate_unused_variables/fixtures/practically-unused-but-actually-used-variables.expected", input, expected).await; } -#[test] -fn query_with_invalid_error_suppression() { +#[tokio::test] +async fn query_with_invalid_error_suppression() { let input = include_str!("validate_unused_variables/fixtures/query-with-invalid-error-suppression.graphql"); let expected = include_str!("validate_unused_variables/fixtures/query-with-invalid-error-suppression.expected"); - test_fixture(transform_fixture, "query-with-invalid-error-suppression.graphql", "validate_unused_variables/fixtures/query-with-invalid-error-suppression.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-invalid-error-suppression.graphql", "validate_unused_variables/fixtures/query-with-invalid-error-suppression.expected", input, expected).await; } -#[test] -fn query_with_unused_root_variable_shadowed_by_local_invalid() { +#[tokio::test] +async fn query_with_unused_root_variable_shadowed_by_local_invalid() { let input = include_str!("validate_unused_variables/fixtures/query-with-unused-root-variable-shadowed-by-local.invalid.graphql"); let expected = include_str!("validate_unused_variables/fixtures/query-with-unused-root-variable-shadowed-by-local.invalid.expected"); - test_fixture(transform_fixture, "query-with-unused-root-variable-shadowed-by-local.invalid.graphql", "validate_unused_variables/fixtures/query-with-unused-root-variable-shadowed-by-local.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-unused-root-variable-shadowed-by-local.invalid.graphql", "validate_unused_variables/fixtures/query-with-unused-root-variable-shadowed-by-local.invalid.expected", input, expected).await; } -#[test] -fn query_with_unused_variable_error_suppressed() { +#[tokio::test] +async fn query_with_unused_variable_error_suppressed() { let input = include_str!("validate_unused_variables/fixtures/query-with-unused-variable-error-suppressed.graphql"); let expected = include_str!("validate_unused_variables/fixtures/query-with-unused-variable-error-suppressed.expected"); - test_fixture(transform_fixture, "query-with-unused-variable-error-suppressed.graphql", "validate_unused_variables/fixtures/query-with-unused-variable-error-suppressed.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-unused-variable-error-suppressed.graphql", "validate_unused_variables/fixtures/query-with-unused-variable-error-suppressed.expected", input, expected).await; } -#[test] -fn query_with_unused_variable_invalid() { +#[tokio::test] +async fn query_with_unused_variable_invalid() { let input = include_str!("validate_unused_variables/fixtures/query-with-unused-variable.invalid.graphql"); let expected = include_str!("validate_unused_variables/fixtures/query-with-unused-variable.invalid.expected"); - test_fixture(transform_fixture, "query-with-unused-variable.invalid.graphql", "validate_unused_variables/fixtures/query-with-unused-variable.invalid.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-unused-variable.invalid.graphql", "validate_unused_variables/fixtures/query-with-unused-variable.invalid.expected", input, expected).await; } -#[test] -fn query_with_variables_shadowed_by_local_variable_and_used_as_root_variable() { +#[tokio::test] +async fn query_with_variables_shadowed_by_local_variable_and_used_as_root_variable() { let input = include_str!("validate_unused_variables/fixtures/query-with-variables-shadowed-by-local-variable-and-used-as-root-variable.graphql"); let expected = include_str!("validate_unused_variables/fixtures/query-with-variables-shadowed-by-local-variable-and-used-as-root-variable.expected"); - test_fixture(transform_fixture, "query-with-variables-shadowed-by-local-variable-and-used-as-root-variable.graphql", "validate_unused_variables/fixtures/query-with-variables-shadowed-by-local-variable-and-used-as-root-variable.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-variables-shadowed-by-local-variable-and-used-as-root-variable.graphql", "validate_unused_variables/fixtures/query-with-variables-shadowed-by-local-variable-and-used-as-root-variable.expected", input, expected).await; } -#[test] -fn variable_in_the_complex_object_list() { +#[tokio::test] +async fn variable_in_the_complex_object_list() { let input = include_str!("validate_unused_variables/fixtures/variable-in-the-complex-object-list.graphql"); let expected = include_str!("validate_unused_variables/fixtures/variable-in-the-complex-object-list.expected"); - test_fixture(transform_fixture, "variable-in-the-complex-object-list.graphql", "validate_unused_variables/fixtures/variable-in-the-complex-object-list.expected", input, expected); + test_fixture(transform_fixture, file!(), "variable-in-the-complex-object-list.graphql", "validate_unused_variables/fixtures/variable-in-the-complex-object-list.expected", input, expected).await; } diff --git a/compiler/crates/relay-typegen/Cargo.toml b/compiler/crates/relay-typegen/Cargo.toml index 968e9c780f4f7..3a1d7a5553709 100644 --- a/compiler/crates/relay-typegen/Cargo.toml +++ b/compiler/crates/relay-typegen/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/relay-typegen:[generate_flow_test,generate_flow_test_with_custom_id,generate_typescript_test,relay-typegen] + [package] name = "relay-typegen" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -15,9 +17,9 @@ common = { path = "../common" } docblock-shared = { path = "../docblock-shared" } fnv = "1.0" graphql-ir = { path = "../graphql-ir" } -indexmap = { version = "1.9.2", features = ["arbitrary", "rayon", "serde-1"] } +indexmap = { version = "2.2.6", features = ["arbitrary", "rayon", "serde"] } intern = { path = "../intern" } -itertools = "0.10.3" +itertools = "0.11.0" lazy_static = "1.4" relay-config = { path = "../relay-config" } relay-schema = { path = "../relay-schema" } @@ -30,3 +32,4 @@ graphql-syntax = { path = "../graphql-syntax" } graphql-test-helpers = { path = "../graphql-test-helpers" } relay-codegen = { path = "../relay-codegen" } relay-test-schema = { path = "../relay-test-schema" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/relay-typegen/src/flow.rs b/compiler/crates/relay-typegen/src/flow.rs index 16687d937ec96..824f8ef4c9830 100644 --- a/compiler/crates/relay-typegen/src/flow.rs +++ b/compiler/crates/relay-typegen/src/flow.rs @@ -42,7 +42,7 @@ impl Writer for FlowPrinter { AST::OtherTypename => self.write_other_string(), AST::Number => write!(&mut self.result, "number"), AST::Boolean => write!(&mut self.result, "boolean"), - AST::Callable(return_type) => self.write_callable(&*return_type), + AST::Callable(return_type) => self.write_callable(return_type), AST::Identifier(identifier) => write!(&mut self.result, "{}", identifier), AST::RawType(raw) => write!(&mut self.result, "{}", raw), AST::Union(members) => self.write_union(members), @@ -54,7 +54,7 @@ impl Writer for FlowPrinter { AST::Local3DPayload(document_name, selections) => { self.write_local_3d_payload(*document_name, selections) } - AST::FragmentReference(fragments) => self.write_fragment_references(&***fragments), + AST::FragmentReference(fragments) => self.write_fragment_references(fragments), AST::FragmentReferenceType(fragment) => { write!(&mut self.result, "{}$fragmentType", fragment) } @@ -85,10 +85,10 @@ impl Writer for FlowPrinter { "FragmentType" } - fn write_local_type(&mut self, name: &str, value: &AST) -> FmtResult { - write!(&mut self.result, "type {} = ", name)?; + fn write_type_assertion(&mut self, name: &str, value: &AST) -> FmtResult { + write!(&mut self.result, "({}: ", name)?; self.write(value)?; - writeln!(&mut self.result, ";") + writeln!(&mut self.result, ");") } fn write_export_type(&mut self, name: &str, value: &AST) -> FmtResult { @@ -312,11 +312,7 @@ impl FlowPrinter { } fn write_return_type_of_function_with_name(&mut self, function_name: StringKey) -> FmtResult { - write!( - &mut self.result, - "$Call<((...empty[]) => R) => R, typeof {}>", - function_name - ) + write!(&mut self.result, "ReturnType", function_name) } fn write_return_type_of_method_call( @@ -324,7 +320,7 @@ impl FlowPrinter { object: &AST, method_name: StringKey, ) -> FmtResult { - write!(&mut self.result, "$Call<")?; + write!(&mut self.result, "ReturnType<")?; self.write(object)?; write!(&mut self.result, "[\"{}\"]>", method_name) } @@ -616,7 +612,7 @@ mod tests { fn function_return_type() { assert_eq!( print_type(&AST::ReturnTypeOfFunctionWithName("someFunc".intern())), - "$Call<((...empty[]) => R) => R, typeof someFunc>".to_string() + "ReturnType".to_string() ); } } diff --git a/compiler/crates/relay-typegen/src/javascript.rs b/compiler/crates/relay-typegen/src/javascript.rs index e3e582ccdba05..ccdeb20329789 100644 --- a/compiler/crates/relay-typegen/src/javascript.rs +++ b/compiler/crates/relay-typegen/src/javascript.rs @@ -35,7 +35,7 @@ impl Writer for JavaScriptPrinter { "" } - fn write_local_type(&mut self, _name: &str, _value: &AST) -> FmtResult { + fn write_type_assertion(&mut self, _name: &str, _value: &AST) -> FmtResult { Ok(()) } diff --git a/compiler/crates/relay-typegen/src/lib.rs b/compiler/crates/relay-typegen/src/lib.rs index fc7902c15a0eb..3e3e335ff82ae 100644 --- a/compiler/crates/relay-typegen/src/lib.rs +++ b/compiler/crates/relay-typegen/src/lib.rs @@ -44,11 +44,8 @@ static REACT_RELAY_MULTI_ACTOR: &str = "react-relay/multi-actor"; static RELAY_RUNTIME: &str = "relay-runtime"; static LOCAL_3D_PAYLOAD: &str = "Local3DPayload"; static ACTOR_CHANGE_POINT: &str = "ActorChangePoint"; -pub static PROVIDED_VARIABLE_TYPE: &str = "ProvidedVariablesType"; static VALIDATOR_EXPORT_NAME: &str = "validate"; static LIVE_RESOLVERS_LIVE_STATE: &str = "LiveState"; -static LIVE_RESOLVERS_EXPERIMENTAL_STORE_PATH: &str = - "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; lazy_static! { static ref KEY_CLIENTID: StringKey = "__id".intern(); @@ -111,11 +108,42 @@ pub(crate) enum MaskStatus { Masked, } +pub fn generate_fragment_type_exports_section_from_extra_artifact( + fragment_definition: &FragmentDefinition, + schema: &SDLSchema, + project_config: &ProjectConfig, + fragment_locations: &FragmentLocations, +) -> String { + generate_fragment_type_exports_section_impl( + fragment_definition, + schema, + project_config, + fragment_locations, + true, + ) +} + pub fn generate_fragment_type_exports_section( fragment_definition: &FragmentDefinition, schema: &SDLSchema, project_config: &ProjectConfig, fragment_locations: &FragmentLocations, +) -> String { + generate_fragment_type_exports_section_impl( + fragment_definition, + schema, + project_config, + fragment_locations, + false, + ) +} + +fn generate_fragment_type_exports_section_impl( + fragment_definition: &FragmentDefinition, + schema: &SDLSchema, + project_config: &ProjectConfig, + fragment_locations: &FragmentLocations, + is_extra_artifact_branch_module: bool, ) -> String { let typegen_context = TypegenContext::new( schema, @@ -126,7 +154,10 @@ pub fn generate_fragment_type_exports_section( .is_some(), fragment_definition.name.map(|x| x.0), fragment_locations, - false, + TypegenOptions { + no_optional_fields_in_raw_response_type: false, + is_extra_artifact_branch_module, + }, ); let mut writer = new_writer_from_config(&project_config.typegen_config); write_fragment_type_exports_section(&typegen_context, fragment_definition, &mut writer) @@ -149,7 +180,10 @@ pub fn generate_named_validator_export( .is_some(), fragment_definition.name.map(|x| x.0), fragment_locations, - false, + TypegenOptions { + no_optional_fields_in_raw_response_type: false, + is_extra_artifact_branch_module: false, + }, ); let mut writer = new_writer_from_config(&project_config.typegen_config); write_validator_function(&typegen_context, fragment_definition, &mut writer).unwrap(); @@ -171,6 +205,7 @@ pub fn generate_operation_type_exports_section( schema: &SDLSchema, project_config: &ProjectConfig, fragment_locations: &FragmentLocations, + maybe_provided_variables: Option, ) -> String { let typegen_context = TypegenContext::new( schema, @@ -184,7 +219,10 @@ pub fn generate_operation_type_exports_section( typegen_operation.name.item.0, ), fragment_locations, - false, + TypegenOptions { + no_optional_fields_in_raw_response_type: false, + is_extra_artifact_branch_module: false, + }, ); let mut writer = new_writer_from_config(&project_config.typegen_config); write_operation_type_exports_section( @@ -192,6 +230,7 @@ pub fn generate_operation_type_exports_section( typegen_operation, normalization_operation, &mut writer, + maybe_provided_variables, ) .unwrap(); writer.into_string() @@ -217,7 +256,10 @@ pub fn generate_split_operation_type_exports_section( typegen_operation.name.item.0, ), fragment_locations, - no_optional_fields_in_raw_response_type, + TypegenOptions { + no_optional_fields_in_raw_response_type, + is_extra_artifact_branch_module: false, + }, ); let mut writer = new_writer_from_config(&project_config.typegen_config); @@ -240,8 +282,7 @@ struct TypegenContext<'a> { has_unified_output: bool, generating_updatable_types: bool, definition_source_location: WithLocation, - // All keys in raw response should be required - no_optional_fields_in_raw_response_type: bool, + typegen_options: TypegenOptions, } impl<'a> TypegenContext<'a> { @@ -251,7 +292,7 @@ impl<'a> TypegenContext<'a> { generating_updatable_types: bool, definition_source_location: WithLocation, fragment_locations: &'a FragmentLocations, - no_optional_fields_in_raw_response_type: bool, + typegen_options: TypegenOptions, ) -> Self { Self { schema, @@ -260,7 +301,14 @@ impl<'a> TypegenContext<'a> { has_unified_output: project_config.output.is_some(), generating_updatable_types, definition_source_location, - no_optional_fields_in_raw_response_type, + typegen_options, } } } + +struct TypegenOptions { + // All keys in raw response should be required + no_optional_fields_in_raw_response_type: bool, + // Some extra artifacts require special type generation + is_extra_artifact_branch_module: bool, +} diff --git a/compiler/crates/relay-typegen/src/typegen_state.rs b/compiler/crates/relay-typegen/src/typegen_state.rs index 2121d9bf5d783..4113aaabb85c7 100644 --- a/compiler/crates/relay-typegen/src/typegen_state.rs +++ b/compiler/crates/relay-typegen/src/typegen_state.rs @@ -25,7 +25,6 @@ use crate::writer::ExactObject; use crate::writer::Writer; use crate::writer::AST; use crate::KEY_DATA_ID; -use crate::LIVE_RESOLVERS_EXPERIMENTAL_STORE_PATH; use crate::LIVE_RESOLVERS_LIVE_STATE; use crate::LOCAL_3D_PAYLOAD; use crate::RELAY_RUNTIME; @@ -42,14 +41,10 @@ pub(crate) struct RuntimeImports { impl RuntimeImports { pub(crate) fn write_runtime_imports(&self, writer: &mut Box) -> FmtResult { + let mut runtime_import_types = vec![]; if self.resolver_live_state_type { - writer.write_import_type( - &[LIVE_RESOLVERS_LIVE_STATE], - LIVE_RESOLVERS_EXPERIMENTAL_STORE_PATH, - )?; + runtime_import_types.push(LIVE_RESOLVERS_LIVE_STATE); } - - let mut runtime_import_types = vec![]; if self.generic_fragment_type { runtime_import_types.push(writer.get_runtime_fragment_import()) } diff --git a/compiler/crates/relay-typegen/src/typescript.rs b/compiler/crates/relay-typegen/src/typescript.rs index 10e0d472fab44..f07f03e914be8 100644 --- a/compiler/crates/relay-typegen/src/typescript.rs +++ b/compiler/crates/relay-typegen/src/typescript.rs @@ -26,6 +26,7 @@ use crate::KEY_FRAGMENT_TYPE; pub struct TypeScriptPrinter { result: String, use_import_type_syntax: bool, + include_undefined_in_nullable_union: bool, indentation: usize, } @@ -53,7 +54,7 @@ impl Writer for TypeScriptPrinter { AST::OtherTypename => self.write_other_string(), AST::Number => write!(&mut self.result, "number"), AST::Boolean => write!(&mut self.result, "boolean"), - AST::Callable(return_type) => self.write_callable(&*return_type), + AST::Callable(return_type) => self.write_callable(return_type), AST::Identifier(identifier) => write!(&mut self.result, "{}", identifier), AST::RawType(raw) => write!(&mut self.result, "{}", raw), AST::Union(members) => self.write_union(members), @@ -91,10 +92,10 @@ impl Writer for TypeScriptPrinter { } } - fn write_local_type(&mut self, name: &str, value: &AST) -> FmtResult { - write!(&mut self.result, "type {} = ", name)?; + fn write_type_assertion(&mut self, name: &str, value: &AST) -> FmtResult { + write!(&mut self.result, "({} as ", name)?; self.write(value)?; - writeln!(&mut self.result, ";") + writeln!(&mut self.result, ");") } fn write_export_type(&mut self, name: &str, value: &AST) -> FmtResult { @@ -170,6 +171,8 @@ impl TypeScriptPrinter { result: String::new(), indentation: 0, use_import_type_syntax: config.use_import_type_syntax, + include_undefined_in_nullable_union: !config + .typescript_exclude_undefined_from_nullable_union, } } @@ -212,20 +215,28 @@ impl TypeScriptPrinter { fn write_nullable(&mut self, of_type: &AST) -> FmtResult { let null_type = AST::RawType("null".intern()); + let undefined_type = AST::RawType("undefined".intern()); if let AST::Union(members) = of_type { let mut new_members = Vec::with_capacity(members.len() + 1); new_members.extend_from_slice(members); new_members.push(null_type); - self.write_union(&*new_members)?; + if self.include_undefined_in_nullable_union { + new_members.push(undefined_type); + } + self.write_union(&new_members)?; } else { - self.write_union(&*vec![of_type.clone(), null_type])?; + let mut union_members = vec![of_type.clone(), null_type]; + if self.include_undefined_in_nullable_union { + union_members.push(undefined_type) + } + self.write_union(&union_members)?; } Ok(()) } fn write_object(&mut self, props: &[Prop]) -> FmtResult { if props.is_empty() { - write!(&mut self.result, "{{}}")?; + write!(&mut self.result, "Record")?; return Ok(()); } @@ -233,7 +244,7 @@ impl TypeScriptPrinter { // are missing a newline. if props.len() == 1 { if let Prop::Spread(_) = props[0] { - write!(&mut self.result, "{{}}")?; + write!(&mut self.result, "Record")?; return Ok(()); } } @@ -276,10 +287,22 @@ impl TypeScriptPrinter { self.write(&key_value_pair.value)?; writeln!(&mut self.result, ";")?; } - Prop::GetterSetterPair(_) => { - panic!( - "Getters and setters with different types are not implemented in typescript. See https://github.com/microsoft/TypeScript/issues/43662" - ); + Prop::GetterSetterPair(getter_setter_pair) => { + // Write the getter + self.write_indentation()?; + write!(&mut self.result, "get ")?; + self.write(&AST::Identifier(getter_setter_pair.key))?; + write!(&mut self.result, "(): ")?; + self.write(&getter_setter_pair.getter_return_value)?; + writeln!(&mut self.result, ";")?; + + // Write the setter + self.write_indentation()?; + write!(&mut self.result, "set ")?; + self.write(&AST::Identifier(getter_setter_pair.key))?; + write!(&mut self.result, "(value: ")?; + self.write(&getter_setter_pair.setter_parameter)?; + writeln!(&mut self.result, ");")?; } } } @@ -387,14 +410,14 @@ mod tests { fn nullable_type() { assert_eq!( print_type(&AST::Nullable(Box::new(AST::String))), - "string | null".to_string() + "string | null | undefined".to_string() ); assert_eq!( print_type(&AST::Nullable(Box::new(AST::Union(SortedASTList::new( vec![AST::String, AST::Number], ))))), - "string | number | null" + "string | number | null | undefined" ) } @@ -402,7 +425,7 @@ mod tests { fn exact_object() { assert_eq!( print_type(&AST::ExactObject(ExactObject::new(Vec::new()))), - r"{}".to_string() + r"Record".to_string() ); assert_eq!( @@ -487,7 +510,7 @@ mod tests { fn inexact_object() { assert_eq!( print_type(&AST::InexactObject(InexactObject::new(Vec::new()))), - "{}".to_string() + "Record".to_string() ); assert_eq!( diff --git a/compiler/crates/relay-typegen/src/visit.rs b/compiler/crates/relay-typegen/src/visit.rs index ee976b8107487..7530167be3707 100644 --- a/compiler/crates/relay-typegen/src/visit.rs +++ b/compiler/crates/relay-typegen/src/visit.rs @@ -5,6 +5,7 @@ * LICENSE file in the root directory of this source tree. */ +use std::collections::HashSet; use std::hash::Hash; use std::path::PathBuf; use std::sync::Arc; @@ -16,7 +17,10 @@ use ::intern::Lookup; use common::ArgumentName; use common::DirectiveName; use common::NamedItem; +use docblock_shared::FRAGMENT_KEY_ARGUMENT_NAME; use docblock_shared::KEY_RESOLVER_ID_FIELD; +use docblock_shared::RELAY_RESOLVER_DIRECTIVE_NAME; +use docblock_shared::RELAY_RESOLVER_MODEL_INSTANCE_FIELD; use docblock_shared::RESOLVER_VALUE_SCALAR_NAME; use graphql_ir::Condition; use graphql_ir::Directive; @@ -30,9 +34,12 @@ use graphql_ir::Selection; use indexmap::map::Entry; use indexmap::IndexMap; use indexmap::IndexSet; +use itertools::Itertools; +use lazy_static::lazy_static; use relay_config::CustomScalarType; use relay_config::CustomScalarTypeImport; use relay_config::TypegenLanguage; +use relay_schema::definitions::ResolverType; use relay_schema::CUSTOM_SCALAR_DIRECTIVE_NAME; use relay_schema::EXPORT_NAME_CUSTOM_SCALAR_ARGUMENT_NAME; use relay_schema::PATH_CUSTOM_SCALAR_ARGUMENT_NAME; @@ -52,6 +59,7 @@ use relay_transforms::RELAY_ACTOR_CHANGE_DIRECTIVE_FOR_CODEGEN; use relay_transforms::UPDATABLE_DIRECTIVE_FOR_TYPEGEN; use schema::EnumID; use schema::Field; +use schema::ObjectID; use schema::SDLSchema; use schema::ScalarID; use schema::Schema; @@ -110,6 +118,13 @@ use crate::TYPE_INT; use crate::TYPE_STRING; use crate::VARIABLES; +lazy_static! { + static ref THROW_ON_FIELD_ERROR_DIRECTIVE: DirectiveName = + DirectiveName("throwOnFieldError".intern()); + static ref SEMANTIC_NON_NULL_DIRECTIVE: DirectiveName = + DirectiveName("semanticNonNull".intern()); +} + #[allow(clippy::too_many_arguments)] pub(crate) fn visit_selections( typegen_context: &'_ TypegenContext<'_>, @@ -123,6 +138,7 @@ pub(crate) fn visit_selections( custom_scalars: &mut CustomScalarsImports, runtime_imports: &mut RuntimeImports, enclosing_linked_field_concrete_type: Option, + is_throw_on_field_error: bool, ) -> Vec { let mut type_selections = Vec::new(); for selection in selections { @@ -138,6 +154,7 @@ pub(crate) fn visit_selections( encountered_fragments, imported_resolvers, runtime_imports, + is_throw_on_field_error, ), Selection::InlineFragment(inline_fragment) => visit_inline_fragment( typegen_context, @@ -152,6 +169,7 @@ pub(crate) fn visit_selections( custom_scalars, runtime_imports, enclosing_linked_field_concrete_type, + is_throw_on_field_error, ), Selection::LinkedField(linked_field) => { let linked_field_type = typegen_context @@ -166,7 +184,7 @@ pub(crate) fn visit_selections( Some(linked_field_type) }; gen_visit_linked_field( - typegen_context.schema, + typegen_context, &mut type_selections, linked_field, |selections| { @@ -182,8 +200,10 @@ pub(crate) fn visit_selections( custom_scalars, runtime_imports, nested_enclosing_linked_field_concrete_type, + is_throw_on_field_error, ) }, + is_throw_on_field_error, ) } Selection::ScalarField(scalar_field) => { @@ -203,6 +223,7 @@ pub(crate) fn visit_selections( resolver_metadata, RequiredMetadataDirective::find(&scalar_field.directives).is_some(), imported_resolvers, + is_throw_on_field_error, ); } else { visit_scalar_field( @@ -212,6 +233,7 @@ pub(crate) fn visit_selections( encountered_enums, custom_scalars, enclosing_linked_field_concrete_type, + is_throw_on_field_error, ) } } @@ -228,6 +250,7 @@ pub(crate) fn visit_selections( custom_scalars, runtime_imports, enclosing_linked_field_concrete_type, + is_throw_on_field_error, ), } } @@ -246,6 +269,7 @@ fn visit_fragment_spread( encountered_fragments: &mut EncounteredFragments, imported_resolvers: &mut ImportedResolvers, runtime_imports: &mut RuntimeImports, + is_throw_on_field_error: bool, ) { if let Some(resolver_metadata) = RelayResolverMetadata::find(&fragment_spread.directives) { visit_relay_resolver( @@ -261,6 +285,7 @@ fn visit_fragment_spread( resolver_metadata, RequiredMetadataDirective::find(&fragment_spread.directives).is_some(), imported_resolvers, + is_throw_on_field_error, ); } else { let name = fragment_spread.fragment.item; @@ -282,12 +307,16 @@ fn visit_fragment_spread( let selection = if let Some(fragment_alias_metadata) = FragmentAliasMetadata::find(&fragment_spread.directives) { + // If/when @required is supported here, we would apply that to this type reference. + // TODO: What about plural fragments, is that just handled by the parent? + let mut node_type = TypeReference::Named(fragment_alias_metadata.selection_type); + if fragment_alias_metadata.non_nullable { + node_type = TypeReference::NonNull(Box::new(node_type)); + } // We will model the types as a linked filed containing just the fragment spread. TypeSelection::LinkedField(TypeSelectionLinkedField { field_name_or_alias: fragment_alias_metadata.alias.item, - // If/when @required is supported here, we would apply that to this type reference. - // TODO: What about plural fragments, is that just handled by the parent? - node_type: TypeReference::Named(fragment_alias_metadata.selection_type), + node_type, node_selections: selections_to_map(vec![spread_selection].into_iter(), true), conditional: false, concrete_type: None, @@ -312,7 +341,9 @@ fn generate_resolver_type( fragment_name: Option, resolver_metadata: &RelayResolverMetadata, ) -> AST { + // For the purposes of function type assertion, we always use the semantic type. let schema_field = resolver_metadata.field(typegen_context.schema); + let schema_field_type = schema_field.semantic_type(); let resolver_arguments = get_resolver_arguments( fragment_name, @@ -328,51 +359,37 @@ fn generate_resolver_type( let inner_ast = match &resolver_metadata.output_type_info { ResolverOutputTypeInfo::ScalarField => { if is_relay_resolver_type(typegen_context, schema_field) { - AST::Mixed + match schema_field_type.is_non_null() { + true => AST::NonNullable(Box::new(AST::Mixed)), + false => AST::Mixed, + } } else { - transform_type_reference_into_ast(&schema_field.type_, |type_| { - expect_scalar_type(typegen_context, encountered_enums, custom_scalars, type_) - }) + let type_ = &schema_field_type.inner(); + expect_scalar_type(typegen_context, encountered_enums, custom_scalars, type_) } } ResolverOutputTypeInfo::Composite(normalization_info) => { - imported_raw_response_types.0.insert( - normalization_info.normalization_operation.item.0, - Some(normalization_info.normalization_operation.location), - ); - - let type_ = AST::Nullable(Box::new(AST::RawType( - normalization_info.normalization_operation.item.0, - ))); - - let ast = if let Some(field_type) = normalization_info.weak_object_instance_field { - transform_type_reference_into_ast( - &typegen_context.schema.field(field_type).type_, - |type_| { - expect_scalar_type( - typegen_context, - encountered_enums, - custom_scalars, - type_, - ) - }, - ) + if let Some(field_id) = normalization_info.weak_object_instance_field { + let type_ = &typegen_context.schema.field(field_id).type_.inner(); + expect_scalar_type(typegen_context, encountered_enums, custom_scalars, type_) } else { - type_ - }; - - if normalization_info.plural { - AST::ReadOnlyArray(Box::new(ast)) - } else { - ast + imported_raw_response_types.0.insert( + normalization_info.normalization_operation.item.0, + Some(normalization_info.normalization_operation.location), + ); + AST::RawType(normalization_info.normalization_operation.item.0) } } - ResolverOutputTypeInfo::EdgeTo => { - create_edge_to_return_type_ast(schema_field, typegen_context.schema, runtime_imports) - } + ResolverOutputTypeInfo::EdgeTo => create_edge_to_return_type_ast( + &schema_field_type.inner(), + typegen_context.schema, + runtime_imports, + ), ResolverOutputTypeInfo::Legacy => AST::Mixed, }; + let ast = transform_type_reference_into_ast(&schema_field_type, |_| inner_ast); + let return_type = if matches!( typegen_context.project_config.typegen_config.language, TypegenLanguage::TypeScript @@ -383,10 +400,10 @@ fn generate_resolver_type( runtime_imports.resolver_live_state_type = true; AST::GenericType { outer: *LIVE_STATE_TYPE, - inner: Box::new(inner_ast), + inner: Box::new(ast), } } else { - inner_ast + ast }; AST::AssertFunctionType(FunctionTypeAssertion { @@ -396,6 +413,56 @@ fn generate_resolver_type( }) } +fn add_fragment_name_to_encountered_fragments( + fragment_name: FragmentDefinitionName, + encountered_fragments: &mut EncounteredFragments, +) { + encountered_fragments + .0 + .insert(EncounteredFragment::Data(fragment_name)); +} + +fn get_fragment_data_type(fragment_name: StringKey) -> Box { + Box::new(AST::RawType(format!("{}$data", fragment_name).intern())) +} + +fn add_model_argument_for_interface_resolver( + resolver_arguments: &mut Vec, + encountered_fragments: &mut EncounteredFragments, + implementing_objects: HashSet, + typegen_context: &TypegenContext<'_>, +) { + let mut model_types_for_type_assertion = vec![]; + for object_id in implementing_objects.iter().sorted() { + if !Type::Object(*object_id).is_terse_resolver_object(typegen_context.schema) { + continue; + } + let type_name = typegen_context.schema.object(*object_id).name.item.0; + let fragment_name = typegen_context + .project_config + .name + .generate_name_for_object_and_field(type_name, *RELAY_RESOLVER_MODEL_INSTANCE_FIELD) + .intern(); + add_fragment_name_to_encountered_fragments( + FragmentDefinitionName(fragment_name), + encountered_fragments, + ); + model_types_for_type_assertion.push(AST::PropertyType { + type_: get_fragment_data_type(fragment_name), + property_name: *RELAY_RESOLVER_MODEL_INSTANCE_FIELD, + }); + } + if !model_types_for_type_assertion.is_empty() { + let interface_union_type = AST::Union(SortedASTList::new(model_types_for_type_assertion)); + resolver_arguments.push(KeyValuePairProp { + key: "model".intern(), + optional: false, + read_only: false, + value: interface_union_type, + }); + } +} + #[allow(clippy::too_many_arguments)] fn get_resolver_arguments( fragment_name: Option, @@ -408,22 +475,42 @@ fn get_resolver_arguments( schema_field: &Field, ) -> Vec { let mut resolver_arguments = vec![]; + if let Some(Type::Interface(interface_id)) = schema_field.parent_type { + let interface = typegen_context.schema.interface(interface_id); + let implementing_objects = + interface.recursively_implementing_objects(typegen_context.schema); + let resolver_directive = schema_field + .directives + .named(*RELAY_RESOLVER_DIRECTIVE_NAME) + .unwrap(); + // Add model argument if @rootFragment is not set on the resolver field + if !resolver_directive + .arguments + .iter() + .any(|arg| arg.name.0 == FRAGMENT_KEY_ARGUMENT_NAME.0) + { + add_model_argument_for_interface_resolver( + &mut resolver_arguments, + encountered_fragments, + implementing_objects, + typegen_context, + ) + } + } if let Some(fragment_name) = fragment_name { if let Some((fragment_name, injection_mode)) = resolver_metadata.fragment_data_injection_mode { match injection_mode { FragmentDataInjectionMode::Field { name, .. } => { - encountered_fragments - .0 - .insert(EncounteredFragment::Data(fragment_name.item)); - + add_fragment_name_to_encountered_fragments( + fragment_name.item, + encountered_fragments, + ); resolver_arguments.push(KeyValuePairProp { key: name, value: AST::PropertyType { - type_: Box::new(AST::RawType( - format!("{}$data", fragment_name.item).intern(), - )), + type_: get_fragment_data_type(fragment_name.item.0), property_name: name, }, read_only: false, @@ -447,7 +534,7 @@ fn get_resolver_arguments( let mut args = vec![]; for field_argument in schema_field.arguments.iter() { args.push(Prop::KeyValuePair(KeyValuePairProp { - key: field_argument.name.0, + key: field_argument.name.item.0, optional: false, read_only: false, value: transform_input_type( @@ -494,9 +581,11 @@ fn import_relay_resolver_function_type( ImportedResolverName::Default(local_resolver_name) }; - let import_path = typegen_context.project_config.js_module_import_path( - typegen_context.definition_source_location, - resolver_metadata.import_path, + let import_path = typegen_context.project_config.js_module_import_identifier( + &typegen_context + .project_config + .artifact_path_for_definition(typegen_context.definition_source_location), + &PathBuf::from(resolver_metadata.import_path.lookup()), ); let imported_resolver = ImportedResolver { @@ -534,6 +623,7 @@ fn is_relay_resolver_type(typegen_context: &'_ TypegenContext<'_>, field: &Field } /// Build relay resolver field type +#[allow(clippy::too_many_arguments)] fn relay_resolver_field_type( typegen_context: &'_ TypegenContext<'_>, resolver_metadata: &RelayResolverMetadata, @@ -542,6 +632,7 @@ fn relay_resolver_field_type( local_resolver_name: StringKey, required: bool, live: bool, + is_throw_on_field_error: bool, ) -> AST { let maybe_scalar_field = if let ResolverOutputTypeInfo::ScalarField = resolver_metadata.output_type_info { @@ -559,11 +650,15 @@ fn relay_resolver_field_type( }; if let Some(field) = maybe_scalar_field { - let inner_value = transform_type_reference_into_ast(&field.type_, |type_| { + let type_ = match is_throw_on_field_error { + true => field.semantic_type(), + false => field.type_.clone(), + }; + let inner_value = transform_type_reference_into_ast(&type_, |type_| { expect_scalar_type(typegen_context, encountered_enums, custom_scalars, type_) }); if required { - if field.type_.is_non_null() { + if type_.is_non_null() { inner_value } else { AST::NonNullable(Box::new(inner_value)) @@ -572,13 +667,19 @@ fn relay_resolver_field_type( inner_value } } else { + let field = resolver_metadata.field(typegen_context.schema); + let field_type = match is_throw_on_field_error { + true => field.semantic_type(), + false => field.type_.clone(), + }; + let inner_value = AST::ReturnTypeOfFunctionWithName(local_resolver_name); let inner_value = if live { AST::ReturnTypeOfMethodCall(Box::new(inner_value), intern!("read")) } else { inner_value }; - if required { + if required || field_type.is_non_null() { AST::NonNullable(Box::new(inner_value)) } else { AST::Nullable(Box::new(inner_value)) @@ -600,6 +701,7 @@ fn visit_relay_resolver( resolver_metadata: &RelayResolverMetadata, required: bool, imported_resolvers: &mut ImportedResolvers, + is_throw_on_field_error: bool, ) { import_relay_resolver_function_type( typegen_context, @@ -629,6 +731,7 @@ fn visit_relay_resolver( local_resolver_name, required, live, + is_throw_on_field_error, ); type_selections.push(TypeSelection::ScalarField(TypeSelectionScalarField { @@ -654,6 +757,7 @@ fn visit_client_edge( imported_resolvers: &mut ImportedResolvers, runtime_imports: &mut RuntimeImports, enclosing_linked_field_concrete_type: Option, + is_throw_on_field_error: bool, ) { let (resolver_metadata, fragment_name) = match &client_edge_metadata.backing_field { Selection::FragmentSpread(fragment_spread) => ( @@ -697,6 +801,7 @@ fn visit_client_edge( custom_scalars, runtime_imports, enclosing_linked_field_concrete_type, + is_throw_on_field_error, ); type_selections.append(&mut client_edge_selections); } @@ -715,6 +820,7 @@ fn visit_inline_fragment( custom_scalars: &mut CustomScalarsImports, runtime_imports: &mut RuntimeImports, enclosing_linked_field_concrete_type: Option, + is_throw_on_field_error: bool, ) { if let Some(module_metadata) = ModuleMetadata::find(&inline_fragment.directives) { let name = module_metadata.fragment_name; @@ -758,6 +864,7 @@ fn visit_inline_fragment( custom_scalars, runtime_imports, enclosing_linked_field_concrete_type, + is_throw_on_field_error, ); } else if let Some(client_edge_metadata) = ClientEdgeMetadata::find(inline_fragment) { visit_client_edge( @@ -773,6 +880,7 @@ fn visit_inline_fragment( imported_resolvers, runtime_imports, enclosing_linked_field_concrete_type, + is_throw_on_field_error, ); } else { let mut inline_selections = visit_selections( @@ -787,26 +895,27 @@ fn visit_inline_fragment( custom_scalars, runtime_imports, enclosing_linked_field_concrete_type, + inline_fragment + .directives + .named(*THROW_ON_FIELD_ERROR_DIRECTIVE) + .is_some(), ); let mut selections = if let Some(fragment_alias_metadata) = FragmentAliasMetadata::find(&inline_fragment.directives) { // We will model the types as a linked filed containing just the fragment spread. + let mut node_type = TypeReference::Named(fragment_alias_metadata.selection_type); + if fragment_alias_metadata.non_nullable { + node_type = TypeReference::NonNull(Box::new(node_type)); + } + + // With @required, null might bubble up to this synthetic field, so we need to apply that nullability here. + node_type = + apply_required_directive_nullability(&node_type, &inline_fragment.directives); vec![TypeSelection::LinkedField(TypeSelectionLinkedField { field_name_or_alias: fragment_alias_metadata.alias.item, - // We currently make inline fragment aliases always nullable - // because we want to be able to use them to be able to null - // them out in the case of missing data. If we choose to - // change that decision, ane make them non-nullable in the - // case where the type condition will always match, we must - // be sure to update this logic to account for the - // possibility that a `@required` has bubbled up to this - // field. - - // Additionally, if/when @required is supported _on_ aliased - // fragments, we would apply that to this type reference. - node_type: TypeReference::Named(fragment_alias_metadata.selection_type), + node_type, node_selections: selections_to_map(inline_selections.into_iter(), true), conditional: false, concrete_type: None, @@ -849,6 +958,7 @@ fn visit_actor_change( custom_scalars: &mut CustomScalarsImports, runtime_imports: &mut RuntimeImports, enclosing_linked_field_concrete_type: Option, + is_throw_on_field_error: bool, ) { let linked_field = match &inline_fragment.selections[0] { Selection::LinkedField(linked_field) => linked_field, @@ -878,6 +988,7 @@ fn visit_actor_change( custom_scalars, runtime_imports, enclosing_linked_field_concrete_type, + is_throw_on_field_error, ); type_selections.push(TypeSelection::ScalarField(TypeSelectionScalarField { field_name_or_alias: key, @@ -913,6 +1024,7 @@ fn raw_response_visit_inline_fragment( runtime_imports: &mut RuntimeImports, custom_scalars: &mut CustomScalarsImports, enclosing_linked_field_concrete_type: Option, + is_throw_on_field_error: bool, ) { let mut selections = raw_response_visit_selections( typegen_context, @@ -924,6 +1036,7 @@ fn raw_response_visit_inline_fragment( runtime_imports, custom_scalars, enclosing_linked_field_concrete_type, + is_throw_on_field_error, ); if inline_fragment .directives @@ -970,12 +1083,13 @@ fn raw_response_visit_inline_fragment( } fn gen_visit_linked_field( - schema: &SDLSchema, + typegen_context: &'_ TypegenContext<'_>, type_selections: &mut Vec, linked_field: &LinkedField, mut visit_selections_fn: impl FnMut(&[Selection]) -> Vec, + is_throw_on_field_error: bool, ) { - let field = schema.field(linked_field.definition.item); + let field = typegen_context.schema.field(linked_field.definition.item); let schema_name = field.name.item; let key = if let Some(alias) = linked_field.alias { alias.item @@ -984,7 +1098,10 @@ fn gen_visit_linked_field( }; let selections = visit_selections_fn(&linked_field.selections); - let node_type = apply_required_directive_nullability(&field.type_, &linked_field.directives); + let node_type = match is_throw_on_field_error { + true => apply_directive_nullability(field, &linked_field.directives), + false => apply_required_directive_nullability(&field.type_, &linked_field.directives), + }; type_selections.push(TypeSelection::LinkedField(TypeSelectionLinkedField { field_name_or_alias: key, @@ -1002,6 +1119,7 @@ fn visit_scalar_field( encountered_enums: &mut EncounteredEnums, custom_scalars: &mut CustomScalarsImports, enclosing_linked_field_concrete_type: Option, + is_throw_on_field_error: bool, ) { let field = typegen_context.schema.field(scalar_field.definition.item); let schema_name = field.name.item; @@ -1010,7 +1128,10 @@ fn visit_scalar_field( } else { schema_name }; - let field_type = apply_required_directive_nullability(&field.type_, &scalar_field.directives); + let field_type = match is_throw_on_field_error { + true => apply_directive_nullability(field, &scalar_field.directives), + false => apply_required_directive_nullability(&field.type_, &scalar_field.directives), + }; let special_field = ScalarFieldSpecialSchemaField::from_schema_name( schema_name, &typegen_context.project_config.schema_config, @@ -1066,6 +1187,7 @@ fn visit_condition( custom_scalars: &mut CustomScalarsImports, runtime_imports: &mut RuntimeImports, enclosing_linked_field_concrete_type: Option, + is_throw_on_field_error: bool, ) { let mut selections = visit_selections( typegen_context, @@ -1079,6 +1201,7 @@ fn visit_condition( custom_scalars, runtime_imports, enclosing_linked_field_concrete_type, + is_throw_on_field_error, ); for selection in selections.iter_mut() { selection.set_conditional(true); @@ -1139,7 +1262,7 @@ fn selections_to_babel( if let Some(concrete_type) = selection.get_enclosing_concrete_type() { by_concrete_type .entry(concrete_type) - .or_insert_with(Vec::new) + .or_default() .push(selection); } else { let key = selection.get_string_key(); @@ -1269,6 +1392,7 @@ fn get_merged_object_with_optional_fields( } } +#[allow(clippy::too_many_arguments)] fn get_discriminated_union_ast( by_concrete_type: IndexMap>, base_fields: &IndexMap, @@ -1353,7 +1477,7 @@ fn get_discriminated_union_ast( /// /// If base fields is not empty /// * if we have a type refinement to a concrete type -/// * and all fields are outside of type refinements are __typename selections +/// * and all fields outside of type refinements are __typename selections /// /// If this condition passes, we emit a discriminated union fn should_emit_discriminated_union( @@ -1383,7 +1507,7 @@ pub(crate) fn raw_response_selections_to_babel( if let Some(concrete_type) = selection.get_enclosing_concrete_type() { by_concrete_type .entry(concrete_type) - .or_insert_with(Vec::new) + .or_default() .push(selection); } else { base_fields.push(selection); @@ -1559,50 +1683,57 @@ fn make_prop( if linked_field.node_type.is_list() { AST::RawType(intern!("[]")) } else { - AST::RawType(intern!("null | void")) + match typegen_context.project_config.typegen_config.language { + TypegenLanguage::Flow | TypegenLanguage::JavaScript => { + AST::RawType(intern!("null | void")) + } + TypegenLanguage::TypeScript => { + AST::RawType(intern!("null | undefined")) + } + } } } else { let setter_parameter = AST::Union( - SortedASTList::new( - just_fragments - .iter() - .map(|fragment_spread| { - let type_condition_info = fragment_spread - .type_condition_info - .expect("Fragment spreads in updatable queries should have TypeConditionInfo"); - let (key, value) = match type_condition_info { - TypeConditionInfo::Abstract => (format!("__is{}", fragment_spread.fragment_name).intern(), AST::String), - TypeConditionInfo::Concrete { concrete_type } => ("__typename".intern(), AST::StringLiteral(StringLiteral(concrete_type))), - }; - let fragment_spread_or_concrete_type_marker = Prop::KeyValuePair(KeyValuePairProp { - key, - value, - read_only: true, - optional: false, - }); - let assignable_fragment_spread_ref= Prop::KeyValuePair(KeyValuePairProp { - key: *KEY_FRAGMENT_SPREADS, - value: AST::FragmentReferenceType( - fragment_spread.fragment_name.0, - ), - read_only: true, - optional: false, - }); - let client_id_field = Prop::KeyValuePair(KeyValuePairProp { - key: "__id".intern(), - value: AST::String, - read_only: true, - optional: false, - }); - - AST::InexactObject(InexactObject::new(vec![ - assignable_fragment_spread_ref, - fragment_spread_or_concrete_type_marker, - client_id_field, - ])) - }) - .collect(), - )); + SortedASTList::new( + just_fragments + .iter() + .map(|fragment_spread| { + let type_condition_info = fragment_spread + .type_condition_info + .expect("Fragment spreads in updatable queries should have TypeConditionInfo"); + let (key, value) = match type_condition_info { + TypeConditionInfo::Abstract => (format!("__is{}", fragment_spread.fragment_name).intern(), AST::String), + TypeConditionInfo::Concrete { concrete_type } => ("__typename".intern(), AST::StringLiteral(StringLiteral(concrete_type))), + }; + let fragment_spread_or_concrete_type_marker = Prop::KeyValuePair(KeyValuePairProp { + key, + value, + read_only: true, + optional: false, + }); + let assignable_fragment_spread_ref = Prop::KeyValuePair(KeyValuePairProp { + key: *KEY_FRAGMENT_SPREADS, + value: AST::FragmentReference( + SortedStringKeyList::new(vec![fragment_spread.fragment_name.0]), + ), + read_only: true, + optional: false, + }); + let client_id_field = Prop::KeyValuePair(KeyValuePairProp { + key: "__id".intern(), + value: AST::String, + read_only: true, + optional: false, + }); + + AST::InexactObject(InexactObject::new(vec![ + assignable_fragment_spread_ref, + fragment_spread_or_concrete_type_marker, + client_id_field, + ])) + }) + .collect(), + )); if linked_field.node_type.is_list() { AST::ReadOnlyArray(Box::new(setter_parameter)) } else { @@ -1689,8 +1820,10 @@ fn raw_response_make_prop( runtime_imports: &mut RuntimeImports, custom_scalars: &mut CustomScalarsImports, ) -> Prop { - let optional = - !typegen_context.no_optional_fields_in_raw_response_type && type_selection.is_conditional(); + let optional = !typegen_context + .typegen_options + .no_optional_fields_in_raw_response_type + && type_selection.is_conditional(); match type_selection { TypeSelection::ModuleDirective(module_directive) => Prop::Spread(SpreadProp { value: module_directive.fragment_name.0, @@ -1806,23 +1939,30 @@ fn transform_graphql_scalar_type( let path = directive .arguments .named(ArgumentName(*PATH_CUSTOM_SCALAR_ARGUMENT_NAME)) - .expect(&format!( - "Expected @{} directive to have a path argument", - *CUSTOM_SCALAR_DIRECTIVE_NAME - )) + .unwrap_or_else(|| { + panic!( + "Expected @{} directive to have a path argument", + *CUSTOM_SCALAR_DIRECTIVE_NAME + ) + }) .expect_string_literal(); - let import_path = typegen_context - .project_config - .js_module_import_path(typegen_context.definition_source_location, path); + let import_path = typegen_context.project_config.js_module_import_identifier( + &typegen_context + .project_config + .artifact_path_for_definition(typegen_context.definition_source_location), + &PathBuf::from(path.lookup()), + ); let export_name = directive .arguments .named(ArgumentName(*EXPORT_NAME_CUSTOM_SCALAR_ARGUMENT_NAME)) - .expect(&format!( - "Expected @{} directive to have an export_name argument", - *CUSTOM_SCALAR_DIRECTIVE_NAME - )) + .unwrap_or_else(|| { + panic!( + "Expected @{} directive to have an export_name argument", + *CUSTOM_SCALAR_DIRECTIVE_NAME + ) + }) .expect_string_literal(); custom_scalars.insert((export_name, PathBuf::from(import_path.lookup()))); return AST::RawType(export_name); @@ -1885,13 +2025,13 @@ pub(crate) fn raw_response_visit_selections( runtime_imports: &mut RuntimeImports, custom_scalars: &mut CustomScalarsImports, enclosing_linked_field_concrete_type: Option, + is_throw_on_field_error: bool, ) -> Vec { let mut type_selections = Vec::new(); for selection in selections { match selection { Selection::FragmentSpread(spread) => { - // @relay_client_component generate fragment spreads without - // @no_inline if no_inline isn't enabled for the fragment. + // TODO: this may be stale after removal of Flight and @relay_client_component if NoInlineFragmentSpreadMetadata::find(&spread.directives).is_some() { let spread_type = spread.fragment.item.0; imported_raw_response_types.0.insert( @@ -1920,8 +2060,21 @@ pub(crate) fn raw_response_visit_selections( runtime_imports, custom_scalars, enclosing_linked_field_concrete_type, + inline_fragment + .directives + .named(*THROW_ON_FIELD_ERROR_DIRECTIVE) + .is_some(), ), Selection::LinkedField(linked_field) => { + // Note: We intentionally use the semantic field type here + // despite the fact that we are generating a raw response type, + // which should model the _server's_ return type. + // + // While it's true that the server may return null for a semantic non-null field, + // it should only do so if that field also has an error in the errors array. Since + // raw response type is generally used to construct payloads for apis which do not + // allow the user to provide additional field level error data, we must ensure that + // only semantically valid values are allowed in the raw response type. let linked_field_type = typegen_context .schema .field(linked_field.definition.item) @@ -1934,7 +2087,7 @@ pub(crate) fn raw_response_visit_selections( Some(linked_field_type) }; gen_visit_linked_field( - typegen_context.schema, + typegen_context, &mut type_selections, linked_field, |selections| { @@ -1948,8 +2101,10 @@ pub(crate) fn raw_response_visit_selections( runtime_imports, custom_scalars, nested_enclosing_linked_field_concrete_type, + is_throw_on_field_error, ) }, + is_throw_on_field_error, ) } Selection::ScalarField(scalar_field) => visit_scalar_field( @@ -1959,6 +2114,7 @@ pub(crate) fn raw_response_visit_selections( encountered_enums, custom_scalars, enclosing_linked_field_concrete_type, + is_throw_on_field_error, ), Selection::Condition(condition) => { type_selections.extend(raw_response_visit_selections( @@ -1971,6 +2127,7 @@ pub(crate) fn raw_response_visit_selections( runtime_imports, custom_scalars, enclosing_linked_field_concrete_type, + is_throw_on_field_error, )); } } @@ -2012,14 +2169,15 @@ fn transform_non_nullable_input_type( .iter() .map(|field| { Prop::KeyValuePair(KeyValuePairProp { - key: field.name.0, + key: field.name.item.0, read_only: false, optional: !field.type_.is_non_null() || typegen_context .project_config .typegen_config .optional_input_fields - .contains(&field.name.0), + .contains(&field.name.item.0) + || field.default_value.is_some(), value: transform_input_type( typegen_context, &field.type_, @@ -2203,7 +2361,7 @@ fn merge_selection_maps( should_set_conditional: bool, ) { for (key, value) in b { - let item = a.remove(&key); + let item = a.swap_remove(&key); a.insert(key, merge_selection(item, value, should_set_conditional)); } } @@ -2260,19 +2418,29 @@ fn group_refs(props: impl Iterator) -> impl Iterator TypeReference { + match field.directives.named(*SEMANTIC_NON_NULL_DIRECTIVE) { + Some(_) => field.semantic_type(), + None => apply_required_directive_nullability(&field.type_, schema_field_directives), + } +} + fn apply_required_directive_nullability( field_type: &TypeReference, - directives: &[Directive], + schema_field_directives: &[Directive], ) -> TypeReference { // We apply bubbling before the field's own @required directive (which may // negate the effects of bubbling) because we need handle the case where // null can bubble to the _items_ in a plural field which is itself // @required. - let bubbled_type = match directives.named(*CHILDREN_CAN_BUBBLE_METADATA_KEY) { + let bubbled_type = match schema_field_directives.named(*CHILDREN_CAN_BUBBLE_METADATA_KEY) { Some(_) => field_type.with_nullable_item_type(), None => field_type.clone(), }; - match directives.named(RequiredMetadataDirective::directive_name()) { + match schema_field_directives.named(RequiredMetadataDirective::directive_name()) { Some(_) => bubbled_type.non_null(), None => bubbled_type, } @@ -2280,15 +2448,15 @@ fn apply_required_directive_nullability( fn get_type_condition_info(fragment_spread: &FragmentSpread) -> Option { fragment_spread - .directives - .named(*ASSIGNABLE_DIRECTIVE_FOR_TYPEGEN) - .map(|directive| { - directive - .data - .as_ref() - .and_then(|data| data.downcast_ref().copied()) - .expect("If a fragment spread contains an __updatable directive, the associated data should be present and have type TypeConditionInfo") - }) + .directives + .named(*ASSIGNABLE_DIRECTIVE_FOR_TYPEGEN) + .map(|directive| { + directive + .data + .as_ref() + .and_then(|data| data.downcast_ref().copied()) + .expect("If a fragment spread contains an __updatable directive, the associated data should be present and have type TypeConditionInfo") + }) } /// Returns the type of the generated query. This is the type parameter that you would have @@ -2325,16 +2493,13 @@ fn has_typename_selection(selections: &[TypeSelection]) -> bool { } fn create_edge_to_return_type_ast( - schema_field: &Field, + inner_type: &Type, schema: &SDLSchema, runtime_imports: &mut RuntimeImports, ) -> AST { // Mark that the DataID type is used, and must be imported. runtime_imports.data_id_type = true; - let schema_type_reference = &schema_field.type_; - let inner_type = schema_type_reference.inner(); - let mut fields = vec![Prop::KeyValuePair(KeyValuePairProp { // TODO consider reading the id field from the config. This must be done // in conjunction with runtime changes. @@ -2343,10 +2508,10 @@ fn create_edge_to_return_type_ast( read_only: true, optional: false, })]; - if inner_type.is_abstract_type() && schema.is_extension_type(inner_type) { + if inner_type.is_abstract_type() && schema.is_extension_type(*inner_type) { // Note: there is currently no way to create a resolver that returns an abstract // client type, so this branch will not be hit until we enable that feature. - let interface_id = schema_field.type_.inner().get_interface_id().expect( + let interface_id = inner_type.get_interface_id().expect( "Only interfaces are supported here. This indicates a bug in the Relay compiler.", ); let valid_typenames = schema @@ -2369,9 +2534,7 @@ fn create_edge_to_return_type_ast( })) } - transform_type_reference_into_ast(schema_type_reference, |_| { - AST::ExactObject(ExactObject::new(fields)) - }) + AST::ExactObject(ExactObject::new(fields)) } fn expect_scalar_type( diff --git a/compiler/crates/relay-typegen/src/write.rs b/compiler/crates/relay-typegen/src/write.rs index 07791d08fb41c..c4557d62325bc 100644 --- a/compiler/crates/relay-typegen/src/write.rs +++ b/compiler/crates/relay-typegen/src/write.rs @@ -13,6 +13,7 @@ use ::intern::intern; use ::intern::string_key::Intern; use ::intern::string_key::StringKey; use ::intern::Lookup; +use common::DirectiveName; use common::InputObjectName; use common::NamedItem; use graphql_ir::FragmentDefinition; @@ -22,6 +23,7 @@ use graphql_ir::ProvidedVariableMetadata; use graphql_ir::Selection; use indexmap::IndexMap; use itertools::Itertools; +use lazy_static::lazy_static; use relay_config::JsModuleFormat; use relay_config::TypegenLanguage; use relay_transforms::RefetchableDerivedFromMetadata; @@ -68,18 +70,23 @@ use crate::KEY_FRAGMENT_TYPE; use crate::KEY_RAW_RESPONSE; use crate::KEY_TYPENAME; use crate::KEY_UPDATABLE_FRAGMENT_SPREADS; -use crate::PROVIDED_VARIABLE_TYPE; use crate::RAW_RESPONSE_TYPE_DIRECTIVE_NAME; use crate::REACT_RELAY_MULTI_ACTOR; use crate::VALIDATOR_EXPORT_NAME; pub(crate) type CustomScalarsImports = HashSet<(StringKey, PathBuf)>; +lazy_static! { + static ref THROW_ON_FIELD_ERROR_DIRECTIVE: DirectiveName = + DirectiveName("throwOnFieldError".intern()); +} + pub(crate) fn write_operation_type_exports_section( typegen_context: &'_ TypegenContext<'_>, typegen_operation: &OperationDefinition, normalization_operation: &OperationDefinition, writer: &mut Box, + maybe_provided_variables_object: Option, ) -> FmtResult { let mut encountered_enums = Default::default(); let mut encountered_fragments = Default::default(); @@ -90,6 +97,11 @@ pub(crate) fn write_operation_type_exports_section( let mut input_object_types = Default::default(); let mut imported_raw_response_types = Default::default(); + let is_throw_on_field_error = typegen_operation + .directives + .named(*THROW_ON_FIELD_ERROR_DIRECTIVE) + .is_some(); + let type_selections = visit_selections( typegen_context, &typegen_operation.selections, @@ -102,6 +114,7 @@ pub(crate) fn write_operation_type_exports_section( &mut custom_scalars, &mut runtime_imports, None, + is_throw_on_field_error, ); let data_type = get_data_type( @@ -132,6 +145,7 @@ pub(crate) fn write_operation_type_exports_section( &mut runtime_imports, &mut custom_scalars, None, + is_throw_on_field_error, ); Some(( raw_response_selections_to_babel( @@ -164,11 +178,11 @@ pub(crate) fn write_operation_type_exports_section( write_import_actor_change_point(actor_change_status, writer)?; runtime_imports.write_runtime_imports(writer)?; write_fragment_imports(typegen_context, None, encountered_fragments, writer)?; - write_relay_resolver_imports(typegen_context, imported_resolvers, writer)?; + write_relay_resolver_imports(imported_resolvers, writer)?; write_split_raw_response_type_imports(typegen_context, imported_raw_response_types, writer)?; let mut input_object_types = IndexMap::default(); - let provided_variables_object = generate_provided_variables_type( + let expected_provided_variables_type = generate_provided_variables_type( typegen_context, normalization_operation, &mut input_object_types, @@ -215,8 +229,17 @@ pub(crate) fn write_operation_type_exports_section( &query_wrapper_type.into(), )?; - if let Some(provided_variables) = provided_variables_object { - writer.write_local_type(PROVIDED_VARIABLE_TYPE, &provided_variables)?; + if let Some(provided_variables_type) = expected_provided_variables_type { + let actual_provided_variables_object = maybe_provided_variables_object.unwrap_or_else(|| { + panic!("Expected the provided variables object. If you see this error, it most likley a bug in the compiler."); + }); + + // Assert that expected type of provided variables matches + // the flow/typescript types of functions with providers. + writer.write_type_assertion( + actual_provided_variables_object.as_str(), + &provided_variables_type, + )?; } Ok(()) @@ -258,6 +281,11 @@ pub(crate) fn write_split_operation_type_exports_section( let mut runtime_imports = RuntimeImports::default(); let mut custom_scalars = CustomScalarsImports::default(); + let is_throw_on_field_error = typegen_operation + .directives + .named(*THROW_ON_FIELD_ERROR_DIRECTIVE) + .is_some(); + let raw_response_selections = raw_response_visit_selections( typegen_context, &normalization_operation.selections, @@ -268,6 +296,7 @@ pub(crate) fn write_split_operation_type_exports_section( &mut runtime_imports, &mut custom_scalars, None, + is_throw_on_field_error, ); let raw_response_type = raw_response_selections_to_babel( typegen_context, @@ -305,6 +334,11 @@ pub(crate) fn write_fragment_type_exports_section( .named(*ASSIGNABLE_DIRECTIVE) .is_some(); + let is_throw_on_field_error = fragment_definition + .directives + .named(*THROW_ON_FIELD_ERROR_DIRECTIVE) + .is_some(); + let mut encountered_enums = Default::default(); let mut encountered_fragments = Default::default(); let mut imported_resolvers = Default::default(); @@ -329,6 +363,7 @@ pub(crate) fn write_fragment_type_exports_section( &mut custom_scalars, &mut runtime_imports, None, + is_throw_on_field_error, ); if !fragment_definition.type_condition.is_abstract_type() { let num_concrete_selections = type_selections @@ -414,7 +449,7 @@ pub(crate) fn write_fragment_type_exports_section( write_custom_scalar_imports(custom_scalars, writer)?; runtime_imports.write_runtime_imports(writer)?; - write_relay_resolver_imports(typegen_context, imported_resolvers, writer)?; + write_relay_resolver_imports(imported_resolvers, writer)?; let refetchable_metadata = RefetchableMetadata::find(&fragment_definition.directives); let fragment_type_name = format!("{}$fragmentType", fragment_name); @@ -444,6 +479,11 @@ pub(crate) fn write_fragment_type_exports_section( if !is_assignable_fragment { writer.write_export_type(&data_type_name, &data_type)?; writer.write_export_type(&format!("{}$key", fragment_definition.name.item), &ref_type)?; + } else if typegen_context + .typegen_options + .is_extra_artifact_branch_module + { + writer.write_export_type(&data_type_name, &data_type)?; } Ok(()) @@ -498,16 +538,15 @@ fn write_fragment_imports( ) }); - let path_for_artifact = - typegen_context.project_config.create_path_for_artifact( - fragment_location.source_location(), - current_referenced_fragment.to_string(), - ); - let fragment_import_path = - typegen_context.project_config.js_module_import_path( - typegen_context.definition_source_location, - path_for_artifact.to_str().unwrap().intern(), + typegen_context.project_config.js_module_import_identifier( + &typegen_context.project_config.artifact_path_for_definition( + typegen_context.definition_source_location, + ), + &typegen_context.project_config.create_path_for_artifact( + fragment_location.source_location(), + current_referenced_fragment.to_string(), + ), ); writer.write_import_fragment_type( @@ -539,19 +578,9 @@ fn write_import_actor_change_point( } fn write_relay_resolver_imports( - typegen_context: &'_ TypegenContext<'_>, mut imported_resolvers: ImportedResolvers, writer: &mut Box, ) -> FmtResult { - // We don't need to import resolver modules in the type-generation - // they should be imported in the codegen. - if matches!( - typegen_context.project_config.typegen_config.language, - TypegenLanguage::TypeScript - ) { - return Ok(()); - } - imported_resolvers.0.sort_keys(); for resolver in imported_resolvers.0.values() { match resolver.resolver_name { @@ -566,7 +595,9 @@ fn write_relay_resolver_imports( )?; } } - writer.write(&resolver.resolver_type)?; + if let AST::AssertFunctionType(_) = &resolver.resolver_type { + writer.write(&resolver.resolver_type)?; + } } Ok(()) } @@ -594,16 +625,15 @@ fn write_split_raw_response_type_imports( } else if let Some(imported_raw_response_document_location) = imported_raw_response_document_location { - let path_for_artifact = - typegen_context.project_config.create_path_for_artifact( - imported_raw_response_document_location.source_location(), - imported_raw_response_type.to_string(), - ); - let artifact_import_path = - typegen_context.project_config.js_module_import_path( - typegen_context.definition_source_location, - path_for_artifact.to_str().unwrap().intern(), + typegen_context.project_config.js_module_import_identifier( + &typegen_context.project_config.artifact_path_for_definition( + typegen_context.definition_source_location, + ), + &typegen_context.project_config.create_path_for_artifact( + imported_raw_response_document_location.source_location(), + imported_raw_response_type.to_string(), + ), ); writer.write_import_fragment_type( @@ -632,16 +662,18 @@ fn write_enum_definitions( writer: &mut Box, ) -> FmtResult { let enum_ids = encountered_enums.into_sorted_vec(typegen_context.schema); + let maybe_suffix = &typegen_context + .project_config + .typegen_config + .enum_module_suffix; for enum_id in enum_ids { let enum_type = typegen_context.schema.enum_(enum_id); - if let Some(enum_module_suffix) = &typegen_context - .project_config - .typegen_config - .enum_module_suffix - { + if !enum_type.is_extension && maybe_suffix.is_some() { + // We can't chain `if let` statements, so we need to unwrap here. + let suffix = maybe_suffix.as_ref().unwrap(); writer.write_import_type( &[enum_type.name.item.lookup()], - &format!("{}{}", enum_type.name.item, enum_module_suffix), + &format!("{}{}", enum_type.name.item, suffix), )?; } else { let mut members: Vec = enum_type @@ -650,11 +682,21 @@ fn write_enum_definitions( .map(|enum_value| AST::StringLiteral(StringLiteral(enum_value.value))) .collect(); - if !typegen_context - .project_config - .typegen_config - .flow_typegen - .no_future_proof_enums + // Users can specify a config option to disable the inclusion of + // FUTURE_ENUM_VALUE in the enum union. Additionally we want to avoid + // emitting FUTURE_ENUM_VALUE if the enum is actually defined on the + // client. For example in Client Schema Extensions or (some day) + // Relay Resolvers. + // + // In the case of a client defined enum, we don't need to enforce + // the breaking change semantics dictated by the GraphQL spec + // because new fields added to the client schema will simply result + // in fixable Flow/TypeScript errors elsewhere in the codebase. + if !(enum_type.is_extension + || typegen_context + .project_config + .typegen_config + .no_future_proof_enums) { members.push(AST::StringLiteral(StringLiteral(*FUTURE_ENUM_VALUE))); } @@ -834,13 +876,23 @@ fn write_abstract_validator_function( writer.write(&return_type)?; write!( writer, - "{} {{\n return value.{} != null ? (value{}: ", + "{} {{\n return value.{} != null ? ", &close_comment, abstract_fragment_spread_marker.lookup(), - open_comment )?; - writer.write(&AST::Any)?; - write!(writer, "{}) : false;\n}}", &close_comment)?; + + match language { + TypegenLanguage::Flow | TypegenLanguage::JavaScript => { + write!(writer, "(value{}: ", &open_comment)?; + writer.write(&AST::Any)?; + write!(writer, "{}) ", &close_comment)?; + } + TypegenLanguage::TypeScript => { + write!(writer, "value ")?; + } + } + + write!(writer, ": false;\n}}")?; Ok(()) } @@ -907,8 +959,8 @@ fn write_concrete_validator_function( AST::RawType(intern!("false")), ])); - let (open_comment, close_comment) = match typegen_context.project_config.typegen_config.language - { + let typegen_language = typegen_context.project_config.typegen_config.language; + let (open_comment, close_comment) = match typegen_language { TypegenLanguage::Flow | TypegenLanguage::JavaScript => ("/*", "*/"), TypegenLanguage::TypeScript => ("", ""), }; @@ -923,14 +975,24 @@ fn write_concrete_validator_function( writer.write(&return_type)?; write!( writer, - "{} {{\n return value.{} === '{}' ? (value{}: ", + "{} {{\n return value.{} === '{}' ? ", &close_comment, KEY_TYPENAME.lookup(), - concrete_typename.lookup(), - open_comment + concrete_typename.lookup() )?; - writer.write(&AST::Any)?; - write!(writer, "{}) : false;\n}}", &close_comment)?; + + match typegen_language { + TypegenLanguage::Flow | TypegenLanguage::JavaScript => { + write!(writer, "(value{}: ", &open_comment)?; + writer.write(&AST::Any)?; + write!(writer, "{}) ", &close_comment)?; + } + TypegenLanguage::TypeScript => { + write!(writer, "value ")?; + } + } + + write!(writer, ": false;\n}}")?; Ok(()) } diff --git a/compiler/crates/relay-typegen/src/writer.rs b/compiler/crates/relay-typegen/src/writer.rs index f791f3b9054f2..a067b111cd4ac 100644 --- a/compiler/crates/relay-typegen/src/writer.rs +++ b/compiler/crates/relay-typegen/src/writer.rs @@ -357,7 +357,7 @@ pub trait Writer: Write { fn write(&mut self, ast: &AST) -> FmtResult; - fn write_local_type(&mut self, name: &str, ast: &AST) -> FmtResult; + fn write_type_assertion(&mut self, name: &str, ast: &AST) -> FmtResult; fn write_export_type(&mut self, name: &str, ast: &AST) -> FmtResult; @@ -376,6 +376,7 @@ pub trait Writer: Write { fn write_export_fragment_type(&mut self, name: &str) -> FmtResult; + #[allow(dead_code)] fn write_export_fragment_types( &mut self, fragment_type_name_1: &str, @@ -385,6 +386,14 @@ pub trait Writer: Write { fn write_any_type_definition(&mut self, name: &str) -> FmtResult; } +pub(crate) fn new_writer_from_config(config: &TypegenConfig) -> Box { + match config.language { + TypegenLanguage::JavaScript => Box::::default(), + TypegenLanguage::Flow => Box::new(FlowPrinter::new()), + TypegenLanguage::TypeScript => Box::new(TypeScriptPrinter::new(config)), + } +} + #[cfg(test)] mod tests { use graphql_ir::reexport::Intern; @@ -430,11 +439,3 @@ mod tests { ) } } - -pub(crate) fn new_writer_from_config(config: &TypegenConfig) -> Box { - match config.language { - TypegenLanguage::JavaScript => Box::new(JavaScriptPrinter::default()), - TypegenLanguage::Flow => Box::new(FlowPrinter::new()), - TypegenLanguage::TypeScript => Box::new(TypeScriptPrinter::new(config)), - } -} diff --git a/compiler/crates/relay-typegen/tests/generate_flow.rs b/compiler/crates/relay-typegen/tests/generate_flow.rs new file mode 100644 index 0000000000000..f68603ab598d7 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow.rs @@ -0,0 +1,160 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::ConsoleLogger; +use common::FeatureFlag; +use common::FeatureFlags; +use common::ScalarName; +use common::SourceLocationKey; +use fixture_tests::Fixture; +use fnv::FnvBuildHasher; +use fnv::FnvHashMap; +use graphql_ir::build_ir_in_relay_mode; +use graphql_ir::OperationDefinitionName; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use graphql_test_helpers::diagnostics_to_sorted_string; +use indexmap::IndexMap; +use intern::string_key::Intern; +use relay_codegen::print_provided_variables; +use relay_codegen::JsModuleFormat; +use relay_config::CustomScalarType; +use relay_config::CustomScalarTypeImport; +use relay_config::ProjectConfig; +use relay_config::ProjectName; +use relay_test_schema::get_test_schema; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::apply_transforms; +use relay_typegen::FragmentLocations; +use relay_typegen::TypegenConfig; +use relay_typegen::TypegenLanguage; + +type FnvIndexMap = IndexMap; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts = fixture.content.split("%extensions%").collect::>(); + let (source, schema) = match parts.as_slice() { + [source, extensions] => (source, get_test_schema_with_extensions(extensions)), + [source] => (source, get_test_schema()), + _ => panic!(), + }; + + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let mut sources = FnvHashMap::default(); + sources.insert(source_location, source); + let ast = parse_executable(source, source_location) + .map_err(|diagnostics| diagnostics_to_sorted_string(source, &diagnostics))?; + let feature_flags = FeatureFlags { + no_inline: FeatureFlag::Limited { + allowlist: [ + "noInlineFragment_address".intern(), + "noInlineFragment_user".intern(), + "MarkdownUserNameRenderer_name".intern(), + "Test_userRenderer".intern(), + "PlainUserNameRenderer_name".intern(), + ] + .into_iter() + .collect(), + }, + enable_relay_resolver_transform: true, + relay_resolver_enable_interface_output_type: FeatureFlag::Enabled, + actor_change_support: FeatureFlag::Enabled, + enable_fragment_aliases: FeatureFlag::Enabled, + ..Default::default() + }; + let ir = build_ir_in_relay_mode(&schema, &ast.definitions, &feature_flags) + .map_err(|diagnostics| diagnostics_to_sorted_string(source, &diagnostics))?; + let program = Program::from_definitions(Arc::clone(&schema), ir); + + let mut custom_scalar_types = FnvIndexMap::default(); + + custom_scalar_types.insert( + ScalarName("Boolean".intern()), + CustomScalarType::Name("CustomBoolean".intern()), + ); + custom_scalar_types.insert( + ScalarName("JSON".intern()), + CustomScalarType::Path(CustomScalarTypeImport { + name: "JSON".intern(), + path: "TypeDefsFile".into(), + }), + ); + let project_config = ProjectConfig { + name: ProjectName::default(), + js_module_format: JsModuleFormat::Haste, + feature_flags: Arc::new(feature_flags), + typegen_config: TypegenConfig { + language: TypegenLanguage::Flow, + custom_scalar_types, + experimental_emit_semantic_nullability_types: fixture + .content + .contains("# relay:experimental_emit_semantic_nullability_types"), + ..Default::default() + }, + ..Default::default() + }; + + let programs = apply_transforms( + &project_config, + Arc::new(program), + Default::default(), + Arc::new(ConsoleLogger), + None, + None, + ) + .map_err(|diagnostics| diagnostics_to_sorted_string(source, &diagnostics))?; + + let fragment_locations = FragmentLocations::new(programs.typegen.fragments()); + let mut operations: Vec<_> = programs.typegen.operations().collect(); + operations.sort_by_key(|op| op.name.item.0); + let operation_strings = operations.into_iter().map(|typegen_operation| { + // `normalization` ASTs are present unless we are processing an updatable query + // In that case, `reader` ASTs are present. + let op: &Arc = programs + .normalization + .operation(OperationDefinitionName(typegen_operation.name.item.0)) + .unwrap_or_else(|| { + programs + .reader + .operation(OperationDefinitionName(typegen_operation.name.item.0)) + .unwrap_or_else(|| { + panic!( + "Couldn't find normalization or reader operations for {}", + typegen_operation.name.item + ) + }) + }); + + relay_typegen::generate_operation_type_exports_section( + typegen_operation, + op, + &schema, + &project_config, + &fragment_locations, + print_provided_variables(&schema, op, &project_config), + ) + }); + + let mut fragments: Vec<_> = programs.typegen.fragments().collect(); + fragments.sort_by_key(|frag| frag.name.item); + let fragment_strings = fragments.into_iter().map(|frag| { + relay_typegen::generate_fragment_type_exports_section( + frag, + &schema, + &project_config, + &fragment_locations, + ) + }); + + let mut result: Vec = operation_strings.collect(); + result.extend(fragment_strings); + Ok(result + .join("-------------------------------------------------------------------------------\n")) +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-fragment-raw-response-type.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-fragment-raw-response-type.expected index bfec513f9ac87..6593c2ffccb27 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-fragment-raw-response-type.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-fragment-raw-response-type.expected @@ -16,10 +16,10 @@ import type { MyUserFragment$fragmentType } from "MyUserFragment.graphql"; export type MyQuery$variables = {||}; export type MyQuery$data = {| +me: ?{| - +my_inline_fragment: ?{| + +my_inline_fragment: {| +name: ?string, |}, - +my_user: ?{| + +my_user: {| +$fragmentSpreads: MyUserFragment$fragmentType, |}, |}, diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-fragment-spread.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-fragment-spread.expected index 52b1e61b4f239..387d161ece382 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-fragment-spread.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-fragment-spread.expected @@ -13,7 +13,7 @@ import type { RelayReaderNamedFragmentsTest_user$fragmentType } from "RelayReade export type RelayReaderNamedFragmentsTest2Query$variables = {||}; export type RelayReaderNamedFragmentsTest2Query$data = {| +me: ?{| - +aliased_fragment: ?{| + +aliased_fragment: {| +$fragmentSpreads: RelayReaderNamedFragmentsTest_user$fragmentType, |}, |}, diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.expected index ae484f282d574..a5589fd90c248 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.expected @@ -8,7 +8,7 @@ fragment Foo on User { import type { FragmentType } from "relay-runtime"; declare export opaque type Foo$fragmentType: FragmentType; export type Foo$data = {| - +aliased_fragment: ?{| + +aliased_fragment: {| +name: ?string, |}, +$fragmentType: Foo$fragmentType, diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.expected index bba8eded810bb..32ca2c25ca8b8 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.expected @@ -11,7 +11,7 @@ query RelayReaderNamedFragmentsTest2Query { export type RelayReaderNamedFragmentsTest2Query$variables = {||}; export type RelayReaderNamedFragmentsTest2Query$data = {| +me: ?{| - +aliased_fragment: ?{| + +aliased_fragment: {| +name: ?string, |}, +id: string, diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.expected index dcbc5451f630d..b9bead36b4461 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.expected @@ -10,7 +10,7 @@ query RelayReaderNamedFragmentsTest2Query { ==================================== OUTPUT =================================== export type RelayReaderNamedFragmentsTest2Query$variables = {||}; export type RelayReaderNamedFragmentsTest2Query$data = {| - +aliased_fragment: ?{| + +aliased_fragment: {| +me: ?{| +id: string, +name: ?string, diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread.expected index afebc0c1c73cb..ce0c6580f2847 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/aliased-inline-fragment-spread.expected @@ -11,7 +11,7 @@ query RelayReaderNamedFragmentsTest2Query { export type RelayReaderNamedFragmentsTest2Query$variables = {||}; export type RelayReaderNamedFragmentsTest2Query$data = {| +me: ?{| - +aliased_fragment: ?{| + +aliased_fragment: {| +name: ?string, |}, +id: string, diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/default-input.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/default-input.expected new file mode 100644 index 0000000000000..cd9d85ceeec7c --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/default-input.expected @@ -0,0 +1,27 @@ +==================================== INPUT ==================================== +mutation feedbackUnLikeMutation($input: FeedbackUnLikeInput) { + feedbackUnLike(input: $input) { + feedback { + id + } + } +} +==================================== OUTPUT =================================== +export type FeedbackUnLikeInput = {| + feedbackId?: ?string, + silent?: CustomBoolean, +|}; +export type feedbackUnLikeMutation$variables = {| + input?: ?FeedbackUnLikeInput, +|}; +export type feedbackUnLikeMutation$data = {| + +feedbackUnLike: ?{| + +feedback: ?{| + +id: string, + |}, + |}, +|}; +export type feedbackUnLikeMutation = {| + response: feedbackUnLikeMutation$data, + variables: feedbackUnLikeMutation$variables, +|}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/default-input.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/default-input.graphql new file mode 100644 index 0000000000000..8abff1aa7048e --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/default-input.graphql @@ -0,0 +1,7 @@ +mutation feedbackUnLikeMutation($input: FeedbackUnLikeInput) { + feedbackUnLike(input: $input) { + feedback { + id + } + } +} \ No newline at end of file diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query-mixed-provided-variables.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query-mixed-provided-variables.expected index 172d353d967c4..52937dab47b8a 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query-mixed-provided-variables.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query-mixed-provided-variables.expected @@ -63,7 +63,11 @@ export type queryMixedProvidedVar_MultiFragment = {| response: queryMixedProvidedVar_MultiFragment$data, variables: queryMixedProvidedVar_MultiFragment$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__includeNameProvider": require('includeNameProvider'), + "__relay_internal__pv__numberOfFriendsProvider": require('numberOfFriendsProvider'), + "__relay_internal__pv__skipFirstnameProvider": require('skipFirstnameProvider') +}: {| +__relay_internal__pv__includeNameProvider: {| +get: () => CustomBoolean, |}, @@ -73,7 +77,7 @@ type ProvidedVariablesType = {| +__relay_internal__pv__skipFirstnameProvider: {| +get: () => CustomBoolean, |}, -|}; +|}); ------------------------------------------------------------------------------- import type { FragmentOneProvidedVar$fragmentType } from "FragmentOneProvidedVar.graphql"; export type queryMixedProvidedVar_OneFragment$variables = {| @@ -88,11 +92,13 @@ export type queryMixedProvidedVar_OneFragment = {| response: queryMixedProvidedVar_OneFragment$data, variables: queryMixedProvidedVar_OneFragment$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__includeNameProvider": require('includeNameProvider') +}: {| +__relay_internal__pv__includeNameProvider: {| +get: () => CustomBoolean, |}, -|}; +|}); ------------------------------------------------------------------------------- import type { FragmentType } from "relay-runtime"; declare export opaque type FragmentMultiProvidedVar$fragmentType: FragmentType; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query-only-provided-variables.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query-only-provided-variables.expected index 4c82f5e43ad42..5fd6e655a0fd9 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query-only-provided-variables.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query-only-provided-variables.expected @@ -46,14 +46,17 @@ export type queryOnlyProvidedVar_MultiFragment = {| response: queryOnlyProvidedVar_MultiFragment$data, variables: queryOnlyProvidedVar_MultiFragment$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__includeNameProvider": require('includeNameProvider'), + "__relay_internal__pv__numberOfFriendsProvider": require('numberOfFriendsProvider') +}: {| +__relay_internal__pv__includeNameProvider: {| +get: () => CustomBoolean, |}, +__relay_internal__pv__numberOfFriendsProvider: {| +get: () => number, |}, -|}; +|}); ------------------------------------------------------------------------------- import type { FragmentOneProvidedVar$fragmentType } from "FragmentOneProvidedVar.graphql"; export type queryOnlyProvidedVar_OneFragment$variables = {||}; @@ -66,11 +69,13 @@ export type queryOnlyProvidedVar_OneFragment = {| response: queryOnlyProvidedVar_OneFragment$data, variables: queryOnlyProvidedVar_OneFragment$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__includeNameProvider": require('includeNameProvider') +}: {| +__relay_internal__pv__includeNameProvider: {| +get: () => CustomBoolean, |}, -|}; +|}); ------------------------------------------------------------------------------- import type { FragmentType } from "relay-runtime"; declare export opaque type FragmentMultiProvidedVar$fragmentType: FragmentType; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query-provided-variables-custom-scalar.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query-provided-variables-custom-scalar.expected index bf5396773e0a1..da660832c81ab 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query-provided-variables-custom-scalar.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query-provided-variables-custom-scalar.expected @@ -26,11 +26,13 @@ export type testQuery = {| response: testQuery$data, variables: testQuery$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__includeNameProvider": require('includeNameProvider') +}: {| +__relay_internal__pv__includeNameProvider: {| +get: () => ?JSON, |}, -|}; +|}); ------------------------------------------------------------------------------- import type { FragmentType } from "relay-runtime"; declare export opaque type FragmentWithJSONProvidedVar$fragmentType: FragmentType; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query_with_raw_response_and_client_components.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query_with_raw_response_and_client_components.expected deleted file mode 100644 index 5c86702963661..0000000000000 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query_with_raw_response_and_client_components.expected +++ /dev/null @@ -1,66 +0,0 @@ -==================================== INPUT ==================================== -query queryWithRelayClientComponentQuery @raw_response_type{ - viewer { - ...queryWithRelayClientComponent_f1 @relay_client_component - ...queryWithRelayClientComponent_f2 @relay_client_component - } -} - -fragment queryWithRelayClientComponent_f1 on Viewer { - __typename -} - -fragment queryWithRelayClientComponent_f2 on Viewer { - __typename -} -==================================== OUTPUT =================================== -import type { queryWithRelayClientComponent_f1$fragmentType } from "queryWithRelayClientComponent_f1.graphql"; -import type { queryWithRelayClientComponent_f2$fragmentType } from "queryWithRelayClientComponent_f2.graphql"; -export type queryWithRelayClientComponentQuery$variables = {||}; -export type queryWithRelayClientComponentQuery$data = {| - +viewer: ?{| - +$fragmentSpreads: queryWithRelayClientComponent_f1$fragmentType & queryWithRelayClientComponent_f2$fragmentType, - |}, -|}; -export type queryWithRelayClientComponentQuery$rawResponse = {| - +viewer: ?{||}, -|}; -export type queryWithRelayClientComponentQuery = {| - rawResponse: queryWithRelayClientComponentQuery$rawResponse, - response: queryWithRelayClientComponentQuery$data, - variables: queryWithRelayClientComponentQuery$variables, -|}; -------------------------------------------------------------------------------- -import type { FragmentType } from "relay-runtime"; -declare export opaque type queryWithRelayClientComponent_f1$fragmentType: FragmentType; -export type queryWithRelayClientComponent_f1$data = {| - +__typename: "Viewer", - +$fragmentType: queryWithRelayClientComponent_f1$fragmentType, -|} | {| - // This will never be '%other', but we need some - // value in case none of the concrete values match. - +__typename: "%other", - +$fragmentType: queryWithRelayClientComponent_f1$fragmentType, -|}; -export type queryWithRelayClientComponent_f1$key = { - +$data?: queryWithRelayClientComponent_f1$data, - +$fragmentSpreads: queryWithRelayClientComponent_f1$fragmentType, - ... -}; -------------------------------------------------------------------------------- -import type { FragmentType } from "relay-runtime"; -declare export opaque type queryWithRelayClientComponent_f2$fragmentType: FragmentType; -export type queryWithRelayClientComponent_f2$data = {| - +__typename: "Viewer", - +$fragmentType: queryWithRelayClientComponent_f2$fragmentType, -|} | {| - // This will never be '%other', but we need some - // value in case none of the concrete values match. - +__typename: "%other", - +$fragmentType: queryWithRelayClientComponent_f2$fragmentType, -|}; -export type queryWithRelayClientComponent_f2$key = { - +$data?: queryWithRelayClientComponent_f2$data, - +$fragmentSpreads: queryWithRelayClientComponent_f2$fragmentType, - ... -}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query_with_raw_response_and_client_components.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query_with_raw_response_and_client_components.graphql deleted file mode 100644 index 93990986d6918..0000000000000 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/query_with_raw_response_and_client_components.graphql +++ /dev/null @@ -1,14 +0,0 @@ -query queryWithRelayClientComponentQuery @raw_response_type{ - viewer { - ...queryWithRelayClientComponent_f1 @relay_client_component - ...queryWithRelayClientComponent_f2 @relay_client_component - } -} - -fragment queryWithRelayClientComponent_f1 on Viewer { - __typename -} - -fragment queryWithRelayClientComponent_f2 on Viewer { - __typename -} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver-no-fragment.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver-no-fragment.expected index f5e950dae3b9b..32f198108bde7 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver-no-fragment.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver-no-fragment.expected @@ -15,15 +15,15 @@ extend type User { ) } ==================================== OUTPUT =================================== -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import userPopStarNameResolverType from "PopStarNameResolver"; // Type assertion validating that `userPopStarNameResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. -(userPopStarNameResolverType: () => LiveState); +(userPopStarNameResolverType: () => LiveState); export type relayResolver_Query$variables = {||}; export type relayResolver_Query$data = {| +me: ?{| - +pop_star_name: ?$Call<$Call<((...empty[]) => R) => R, typeof userPopStarNameResolverType>["read"]>, + +pop_star_name: ?ReturnType["read"]>, |}, |}; export type relayResolver_Query = {| diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver-with-field-args-no-fragment.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver-with-field-args-no-fragment.expected index 5503adf38e9d5..ba73d2e45fd65 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver-with-field-args-no-fragment.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver-with-field-args-no-fragment.expected @@ -19,7 +19,7 @@ extend type User { ) } ==================================== OUTPUT =================================== -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import userPopStarNameResolverType from "PopStarNameResolver"; // Type assertion validating that `userPopStarNameResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. @@ -28,11 +28,11 @@ import userPopStarNameResolverType from "PopStarNameResolver"; count: ?number, greeting: string, |}, -) => LiveState); +) => LiveState); export type relayResolver_Query$variables = {||}; export type relayResolver_Query$data = {| +me: ?{| - +pop_star_name: ?$Call<$Call<((...empty[]) => R) => R, typeof userPopStarNameResolverType>["read"]>, + +pop_star_name: ?ReturnType["read"]>, |}, |}; export type relayResolver_Query = {| diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver-with-field-args.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver-with-field-args.expected index 1eb089632c628..3ebfcc159a796 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver-with-field-args.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver-with-field-args.expected @@ -20,7 +20,7 @@ extend type User { ) } ==================================== OUTPUT =================================== -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { relayResolver_PopStarNameResolverFragment_name$key } from "relayResolver_PopStarNameResolverFragment_name.graphql"; import userPopStarNameResolverType from "PopStarNameResolver"; // Type assertion validating that `userPopStarNameResolverType` resolver is correctly implemented. @@ -31,11 +31,11 @@ import userPopStarNameResolverType from "PopStarNameResolver"; count: ?number, greeting: string, |}, -) => LiveState); +) => LiveState); export type relayResolver_Query$variables = {||}; export type relayResolver_Query$data = {| +me: ?{| - +pop_star_name: ?$Call<$Call<((...empty[]) => R) => R, typeof userPopStarNameResolverType>["read"]>, + +pop_star_name: ?ReturnType["read"]>, |}, |}; export type relayResolver_Query = {| diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver.expected index 792616cd8f66e..462f24cfabefc 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-live-resolver.expected @@ -20,18 +20,18 @@ extend type User { ) } ==================================== OUTPUT =================================== -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { relayResolver_PopStarNameResolverFragment_name$key } from "relayResolver_PopStarNameResolverFragment_name.graphql"; import userPopStarNameResolverType from "PopStarNameResolver"; // Type assertion validating that `userPopStarNameResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (userPopStarNameResolverType: ( rootKey: relayResolver_PopStarNameResolverFragment_name$key, -) => LiveState); +) => LiveState); export type relayResolver_Query$variables = {||}; export type relayResolver_Query$data = {| +me: ?{| - +pop_star_name: ?$Call<$Call<((...empty[]) => R) => R, typeof userPopStarNameResolverType>["read"]>, + +pop_star_name: ?ReturnType["read"]>, |}, |}; export type relayResolver_Query = {| diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-in-fragment.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-in-fragment.expected index 6f709c1be3eda..d727c3adf96bb 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-in-fragment.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-in-fragment.expected @@ -36,10 +36,10 @@ import userPopStarNameResolverType from "PopStarNameResolver"; // A type error here indicates that the type signature of the resolver module is incorrect. (userPopStarNameResolverType: ( rootKey: relayResolver_PopStarNameResolverFragment_name$key, -) => mixed); +) => ?mixed); declare export opaque type relayResolver_consumer$fragmentType: FragmentType; export type relayResolver_consumer$data = {| - +pop_star_name: ?$Call<((...empty[]) => R) => R, typeof userPopStarNameResolverType>, + +pop_star_name: ?ReturnType, +$fragmentType: relayResolver_consumer$fragmentType, |}; export type relayResolver_consumer$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-inject-fragment-data.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-inject-fragment-data.expected index ffd2b403c1399..3f1152c1dc7d7 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-inject-fragment-data.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-inject-fragment-data.expected @@ -21,11 +21,11 @@ import userPopStarNameResolverType from "PopStarNameResolver"; // A type error here indicates that the type signature of the resolver module is incorrect. (userPopStarNameResolverType: ( name: relayResolver_PopStarNameResolverFragment_name$data['name'], -) => mixed); +) => ?mixed); export type relayResolver_Query$variables = {||}; export type relayResolver_Query$data = {| +me: ?{| - +pop_star_name: ?$Call<((...empty[]) => R) => R, typeof userPopStarNameResolverType>, + +pop_star_name: ?ReturnType, |}, |}; export type relayResolver_Query = {| diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-live-client-edge.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-live-client-edge.expected index 0dbd9252c91ff..10e0840c3eaed 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-live-client-edge.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-live-client-edge.expected @@ -31,8 +31,7 @@ export type ClientEdgeQuery_relayResolver_Query_me__best_friend = {| variables: ClientEdgeQuery_relayResolver_Query_me__best_friend$variables, |}; ------------------------------------------------------------------------------- -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { DataID } from "relay-runtime"; +import type { LiveState, DataID } from "relay-runtime"; import type { relayResolver_BestFriendResolverFragment_name$key } from "relayResolver_BestFriendResolverFragment_name.graphql"; import userBestFriendResolverType from "BestFriendResolver"; // Type assertion validating that `userBestFriendResolverType` resolver is correctly implemented. diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-multiple-consumers.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-multiple-consumers.expected index 9fa01ea5e75fb..9024d4afc6f53 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-multiple-consumers.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-multiple-consumers.expected @@ -28,14 +28,14 @@ import userPopStarNameResolverType from "PopStarNameResolver"; // A type error here indicates that the type signature of the resolver module is incorrect. (userPopStarNameResolverType: ( rootKey: relayResolver_PopStarNameResolverFragment_name$key, -) => mixed); +) => ?mixed); export type relayResolver_Query$variables = {||}; export type relayResolver_Query$data = {| +me: ?{| +parents: $ReadOnlyArray<{| - +pop_star_name: ?$Call<((...empty[]) => R) => R, typeof userPopStarNameResolverType>, + +pop_star_name: ?ReturnType, |}>, - +pop_star_name: ?$Call<((...empty[]) => R) => R, typeof userPopStarNameResolverType>, + +pop_star_name: ?ReturnType, |}, |}; export type relayResolver_Query = {| diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-named-import.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-named-import.expected index b3e908e2a979d..4bbe5d468b8e8 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-named-import.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-named-import.expected @@ -26,11 +26,11 @@ import {pop_star_name as userPopStarNameResolverType} from "PopStarNameResolver" // A type error here indicates that the type signature of the resolver module is incorrect. (userPopStarNameResolverType: ( rootKey: relayResolverNamedImport_PopStarNameResolverFragment_name$key, -) => mixed); +) => ?mixed); export type relayResolverNamedImport_Query$variables = {||}; export type relayResolverNamedImport_Query$data = {| +me: ?{| - +pop_star_name: ?$Call<((...empty[]) => R) => R, typeof userPopStarNameResolverType>, + +pop_star_name: ?ReturnType, |}, |}; export type relayResolverNamedImport_Query = {| diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-raw-response.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-raw-response.expected index 5c4633e02e982..00cd81ef72abb 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-raw-response.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-raw-response.expected @@ -63,10 +63,10 @@ import userPopStarNameResolverType from "PopStarNameResolver"; // A type error here indicates that the type signature of the resolver module is incorrect. (userPopStarNameResolverType: ( rootKey: relayResolver_PopStarNameResolverFragment_name$key, -) => mixed); +) => ?mixed); declare export opaque type relayResolver_user$fragmentType: FragmentType; export type relayResolver_user$data = {| - +pop_star_name: ?$Call<((...empty[]) => R) => R, typeof userPopStarNameResolverType>, + +pop_star_name: ?ReturnType, +$fragmentType: relayResolver_user$fragmentType, |}; export type relayResolver_user$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-required.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-required.expected index becd60e35f871..cadb487e33b50 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-required.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-required.expected @@ -25,11 +25,11 @@ import userPopStarNameResolverType from "PopStarNameResolver"; // A type error here indicates that the type signature of the resolver module is incorrect. (userPopStarNameResolverType: ( rootKey: relayResolver_PopStarNameResolverFragment_name$key, -) => mixed); +) => ?mixed); export type relayResolver_Query$variables = {||}; export type relayResolver_Query$data = {| +me: ?{| - +pop_star_name: $NonMaybeType<$Call<((...empty[]) => R) => R, typeof userPopStarNameResolverType>>, + +pop_star_name: $NonMaybeType>, |}, |}; export type relayResolver_Query = {| diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-scalar-plural.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-scalar-plural.expected new file mode 100644 index 0000000000000..0714a76024ea1 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-scalar-plural.expected @@ -0,0 +1,50 @@ +==================================== INPUT ==================================== +fragment relayResolver_PopStarNameResolverFragment_name on User { + name +} + +query relayResolver_Query { + me { + pop_star_name + } +} + +# %extensions% + +extend type User { + pop_star_name: [RelayResolverValue] + @relay_resolver( + fragment_name: "relayResolver_PopStarNameResolverFragment_name" + import_path: "./foo/bar/baz/PopStarNameResolver.js" + ) +} +==================================== OUTPUT =================================== +import type { relayResolver_PopStarNameResolverFragment_name$key } from "relayResolver_PopStarNameResolverFragment_name.graphql"; +import userPopStarNameResolverType from "PopStarNameResolver"; +// Type assertion validating that `userPopStarNameResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userPopStarNameResolverType: ( + rootKey: relayResolver_PopStarNameResolverFragment_name$key, +) => ?$ReadOnlyArray); +export type relayResolver_Query$variables = {||}; +export type relayResolver_Query$data = {| + +me: ?{| + +pop_star_name: ?ReturnType, + |}, +|}; +export type relayResolver_Query = {| + response: relayResolver_Query$data, + variables: relayResolver_Query$variables, +|}; +------------------------------------------------------------------------------- +import type { FragmentType } from "relay-runtime"; +declare export opaque type relayResolver_PopStarNameResolverFragment_name$fragmentType: FragmentType; +export type relayResolver_PopStarNameResolverFragment_name$data = {| + +name: ?string, + +$fragmentType: relayResolver_PopStarNameResolverFragment_name$fragmentType, +|}; +export type relayResolver_PopStarNameResolverFragment_name$key = { + +$data?: relayResolver_PopStarNameResolverFragment_name$data, + +$fragmentSpreads: relayResolver_PopStarNameResolverFragment_name$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-scalar-plural.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-scalar-plural.graphql new file mode 100644 index 0000000000000..de716eea41c10 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-scalar-plural.graphql @@ -0,0 +1,19 @@ +fragment relayResolver_PopStarNameResolverFragment_name on User { + name +} + +query relayResolver_Query { + me { + pop_star_name + } +} + +# %extensions% + +extend type User { + pop_star_name: [RelayResolverValue] + @relay_resolver( + fragment_name: "relayResolver_PopStarNameResolverFragment_name" + import_path: "./foo/bar/baz/PopStarNameResolver.js" + ) +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-client-interface.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-client-interface.expected index f8fe3a10d05ed..272236b150b4e 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-client-interface.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-client-interface.expected @@ -25,6 +25,7 @@ interface ClientInterface { type ClientType implements ClientInterface { name: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "ClientTypeResolver" fragment_name: "ClientType__id", generated_fragment: true, inject_fragment_data: "id", import_name: "ClientType") } type ClientTypeWithNestedInterface { @@ -32,8 +33,18 @@ type ClientTypeWithNestedInterface { } extend type User { - pop_star_name: ClientInterface! @relay_resolver(fragment_name: "PopStarNameResolverFragment_name", import_path: "PopStarNameResolver", has_output_type: true) - pop_star_game: ClientTypeWithNestedInterface! @relay_resolver(fragment_name: "PopStarNameResolverFragment_name", import_path: "PopStarNameResolver", has_output_type: true) + pop_star_name: ClientInterface + @relay_resolver( + fragment_name: "PopStarNameResolverFragment_name" + import_path: "PopStarNameResolver" + has_output_type: true + ) + pop_star_game: ClientTypeWithNestedInterface + @relay_resolver( + fragment_name: "PopStarNameResolverFragment_name" + import_path: "PopStarNameResolver" + has_output_type: true + ) } ==================================== OUTPUT =================================== export type User__pop_star_game$normalization$variables = {||}; @@ -77,7 +88,7 @@ import userPopStarNameResolverType from "PopStarNameResolver"; ) => ?User__pop_star_name$normalization); declare export opaque type Foo_user$fragmentType: FragmentType; export type Foo_user$data = {| - +poppy: {| + +poppy: ?{| +name: ?string, |}, +$fragmentType: Foo_user$fragmentType, diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-client-interface.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-client-interface.graphql index ee85e6e6155fc..82aa40aac3901 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-client-interface.graphql +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-client-interface.graphql @@ -24,6 +24,7 @@ interface ClientInterface { type ClientType implements ClientInterface { name: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "ClientTypeResolver" fragment_name: "ClientType__id", generated_fragment: true, inject_fragment_data: "id", import_name: "ClientType") } type ClientTypeWithNestedInterface { @@ -31,6 +32,16 @@ type ClientTypeWithNestedInterface { } extend type User { - pop_star_name: ClientInterface! @relay_resolver(fragment_name: "PopStarNameResolverFragment_name", import_path: "PopStarNameResolver", has_output_type: true) - pop_star_game: ClientTypeWithNestedInterface! @relay_resolver(fragment_name: "PopStarNameResolverFragment_name", import_path: "PopStarNameResolver", has_output_type: true) + pop_star_name: ClientInterface + @relay_resolver( + fragment_name: "PopStarNameResolverFragment_name" + import_path: "PopStarNameResolver" + has_output_type: true + ) + pop_star_game: ClientTypeWithNestedInterface + @relay_resolver( + fragment_name: "PopStarNameResolverFragment_name" + import_path: "PopStarNameResolver" + has_output_type: true + ) } diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-client-object-plural.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-client-object-plural.expected new file mode 100644 index 0000000000000..0a3c8df2fd92b --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-client-object-plural.expected @@ -0,0 +1,80 @@ +==================================== INPUT ==================================== +fragment Foo_user on User { + poppy: pop_star_name { + __typename + } +} + +fragment PopStarNameResolverFragment_name on User { + name + address { + street + } + parents { + lastName + } +} + +# %extensions% + +type ClientUser { + name: String +} + +extend type User { + pop_star_name: [ClientUser] + @relay_resolver( + fragment_name: "PopStarNameResolverFragment_name" + import_path: "PopStarNameResolver" + has_output_type: true + ) +} +==================================== OUTPUT =================================== +export type User__pop_star_name$normalization$variables = {||}; +export type User__pop_star_name$normalization$data = {| + +name: ?string, +|}; +export type User__pop_star_name$normalization = {| + response: User__pop_star_name$normalization$data, + variables: User__pop_star_name$normalization$variables, +|}; +------------------------------------------------------------------------------- +import type { PopStarNameResolverFragment_name$key } from "PopStarNameResolverFragment_name.graphql"; +import type { User__pop_star_name$normalization } from "User__pop_star_name$normalization.graphql"; +import type { FragmentType } from "relay-runtime"; +import userPopStarNameResolverType from "PopStarNameResolver"; +// Type assertion validating that `userPopStarNameResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userPopStarNameResolverType: ( + rootKey: PopStarNameResolverFragment_name$key, +) => ?$ReadOnlyArray); +declare export opaque type Foo_user$fragmentType: FragmentType; +export type Foo_user$data = {| + +poppy: ?$ReadOnlyArray, + +$fragmentType: Foo_user$fragmentType, +|}; +export type Foo_user$key = { + +$data?: Foo_user$data, + +$fragmentSpreads: Foo_user$fragmentType, + ... +}; +------------------------------------------------------------------------------- +import type { FragmentType } from "relay-runtime"; +declare export opaque type PopStarNameResolverFragment_name$fragmentType: FragmentType; +export type PopStarNameResolverFragment_name$data = {| + +address: ?{| + +street: ?string, + |}, + +name: ?string, + +parents: $ReadOnlyArray<{| + +lastName: ?string, + |}>, + +$fragmentType: PopStarNameResolverFragment_name$fragmentType, +|}; +export type PopStarNameResolverFragment_name$key = { + +$data?: PopStarNameResolverFragment_name$data, + +$fragmentSpreads: PopStarNameResolverFragment_name$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-client-object-plural.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-client-object-plural.graphql new file mode 100644 index 0000000000000..b3c369666e1ce --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-client-object-plural.graphql @@ -0,0 +1,30 @@ +fragment Foo_user on User { + poppy: pop_star_name { + __typename + } +} + +fragment PopStarNameResolverFragment_name on User { + name + address { + street + } + parents { + lastName + } +} + +# %extensions% + +type ClientUser { + name: String +} + +extend type User { + pop_star_name: [ClientUser] + @relay_resolver( + fragment_name: "PopStarNameResolverFragment_name" + import_path: "PopStarNameResolver" + has_output_type: true + ) +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-enum-plural.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-enum-plural.expected new file mode 100644 index 0000000000000..96b15fee7cb84 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-enum-plural.expected @@ -0,0 +1,63 @@ +==================================== INPUT ==================================== +fragment Foo_user on User { + poppy: pop_star_names +} + +fragment PopStarNameResolverFragment_name on User { + name + address { + street + } + parents { + lastName + } +} + +# %extensions% + +extend type User { + pop_star_names: [TestEnums] + @relay_resolver( + fragment_name: "PopStarNameResolverFragment_name" + import_path: "PopStarNameResolver" + has_output_type: true + ) +} +==================================== OUTPUT =================================== +import type { PopStarNameResolverFragment_name$key } from "PopStarNameResolverFragment_name.graphql"; +export type TestEnums = "mark" | "zuck" | "%future added value"; +import type { FragmentType } from "relay-runtime"; +import userPopStarNamesResolverType from "PopStarNameResolver"; +// Type assertion validating that `userPopStarNamesResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userPopStarNamesResolverType: ( + rootKey: PopStarNameResolverFragment_name$key, +) => ?$ReadOnlyArray); +declare export opaque type Foo_user$fragmentType: FragmentType; +export type Foo_user$data = {| + +poppy: ?$ReadOnlyArray, + +$fragmentType: Foo_user$fragmentType, +|}; +export type Foo_user$key = { + +$data?: Foo_user$data, + +$fragmentSpreads: Foo_user$fragmentType, + ... +}; +------------------------------------------------------------------------------- +import type { FragmentType } from "relay-runtime"; +declare export opaque type PopStarNameResolverFragment_name$fragmentType: FragmentType; +export type PopStarNameResolverFragment_name$data = {| + +address: ?{| + +street: ?string, + |}, + +name: ?string, + +parents: $ReadOnlyArray<{| + +lastName: ?string, + |}>, + +$fragmentType: PopStarNameResolverFragment_name$fragmentType, +|}; +export type PopStarNameResolverFragment_name$key = { + +$data?: PopStarNameResolverFragment_name$data, + +$fragmentSpreads: PopStarNameResolverFragment_name$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-enum-plural.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-enum-plural.graphql new file mode 100644 index 0000000000000..fa5e0781c0efd --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-enum-plural.graphql @@ -0,0 +1,24 @@ +fragment Foo_user on User { + poppy: pop_star_names +} + +fragment PopStarNameResolverFragment_name on User { + name + address { + street + } + parents { + lastName + } +} + +# %extensions% + +extend type User { + pop_star_names: [TestEnums] + @relay_resolver( + fragment_name: "PopStarNameResolverFragment_name" + import_path: "PopStarNameResolver" + has_output_type: true + ) +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-plural.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-plural.expected new file mode 100644 index 0000000000000..97261490cb0a7 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-plural.expected @@ -0,0 +1,62 @@ +==================================== INPUT ==================================== +fragment Foo_user on User { + poppy: pop_star_names +} + +fragment PopStarNameResolverFragment_name on User { + name + address { + street + } + parents { + lastName + } +} + +# %extensions% + +extend type User { + pop_star_names: [RelayResolverValue] + @relay_resolver( + fragment_name: "PopStarNameResolverFragment_name" + import_path: "PopStarNameResolver" + has_output_type: true + ) +} +==================================== OUTPUT =================================== +import type { PopStarNameResolverFragment_name$key } from "PopStarNameResolverFragment_name.graphql"; +import type { FragmentType } from "relay-runtime"; +import userPopStarNamesResolverType from "PopStarNameResolver"; +// Type assertion validating that `userPopStarNamesResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userPopStarNamesResolverType: ( + rootKey: PopStarNameResolverFragment_name$key, +) => ?$ReadOnlyArray); +declare export opaque type Foo_user$fragmentType: FragmentType; +export type Foo_user$data = {| + +poppy: ?ReturnType, + +$fragmentType: Foo_user$fragmentType, +|}; +export type Foo_user$key = { + +$data?: Foo_user$data, + +$fragmentSpreads: Foo_user$fragmentType, + ... +}; +------------------------------------------------------------------------------- +import type { FragmentType } from "relay-runtime"; +declare export opaque type PopStarNameResolverFragment_name$fragmentType: FragmentType; +export type PopStarNameResolverFragment_name$data = {| + +address: ?{| + +street: ?string, + |}, + +name: ?string, + +parents: $ReadOnlyArray<{| + +lastName: ?string, + |}>, + +$fragmentType: PopStarNameResolverFragment_name$fragmentType, +|}; +export type PopStarNameResolverFragment_name$key = { + +$data?: PopStarNameResolverFragment_name$data, + +$fragmentSpreads: PopStarNameResolverFragment_name$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-plural.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-plural.graphql new file mode 100644 index 0000000000000..a55769bc58af2 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-plural.graphql @@ -0,0 +1,24 @@ +fragment Foo_user on User { + poppy: pop_star_names +} + +fragment PopStarNameResolverFragment_name on User { + name + address { + street + } + parents { + lastName + } +} + +# %extensions% + +extend type User { + pop_star_names: [RelayResolverValue] + @relay_resolver( + fragment_name: "PopStarNameResolverFragment_name" + import_path: "PopStarNameResolver" + has_output_type: true + ) +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.expected index 753c613f5b211..3d970186cc8db 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.expected @@ -26,10 +26,10 @@ import userPopStarNameResolverType from "PopStarNameResolver"; // A type error here indicates that the type signature of the resolver module is incorrect. (userPopStarNameResolverType: ( rootKey: PopStarNameResolverFragment_name$key, -) => mixed); +) => ?mixed); declare export opaque type Foo_user$fragmentType: FragmentType; export type Foo_user$data = {| - +poppy: $NonMaybeType<$Call<((...empty[]) => R) => R, typeof userPopStarNameResolverType>>, + +poppy: $NonMaybeType>, +$fragmentType: Foo_user$fragmentType, |}; export type Foo_user$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value.expected index 23fdbbb3f2baa..eb2432c4cdb80 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value.expected @@ -26,10 +26,10 @@ import userPopStarNameResolverType from "PopStarNameResolver"; // A type error here indicates that the type signature of the resolver module is incorrect. (userPopStarNameResolverType: ( rootKey: PopStarNameResolverFragment_name$key, -) => mixed); +) => ?mixed); declare export opaque type Foo_user$fragmentType: FragmentType; export type Foo_user$data = {| - +poppy: ?$Call<((...empty[]) => R) => R, typeof userPopStarNameResolverType>, + +poppy: ?ReturnType, +$fragmentType: Foo_user$fragmentType, |}; export type Foo_user$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-scalar-plural.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-scalar-plural.expected new file mode 100644 index 0000000000000..3a47195d2d21a --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-scalar-plural.expected @@ -0,0 +1,62 @@ +==================================== INPUT ==================================== +fragment Foo_user on User { + poppy: pop_star_names +} + +fragment PopStarNameResolverFragment_name on User { + name + address { + street + } + parents { + lastName + } +} + +# %extensions% + +extend type User { + pop_star_names: [String] + @relay_resolver( + fragment_name: "PopStarNameResolverFragment_name" + import_path: "PopStarNameResolver" + has_output_type: true + ) +} +==================================== OUTPUT =================================== +import type { PopStarNameResolverFragment_name$key } from "PopStarNameResolverFragment_name.graphql"; +import type { FragmentType } from "relay-runtime"; +import userPopStarNamesResolverType from "PopStarNameResolver"; +// Type assertion validating that `userPopStarNamesResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userPopStarNamesResolverType: ( + rootKey: PopStarNameResolverFragment_name$key, +) => ?$ReadOnlyArray); +declare export opaque type Foo_user$fragmentType: FragmentType; +export type Foo_user$data = {| + +poppy: ?$ReadOnlyArray, + +$fragmentType: Foo_user$fragmentType, +|}; +export type Foo_user$key = { + +$data?: Foo_user$data, + +$fragmentSpreads: Foo_user$fragmentType, + ... +}; +------------------------------------------------------------------------------- +import type { FragmentType } from "relay-runtime"; +declare export opaque type PopStarNameResolverFragment_name$fragmentType: FragmentType; +export type PopStarNameResolverFragment_name$data = {| + +address: ?{| + +street: ?string, + |}, + +name: ?string, + +parents: $ReadOnlyArray<{| + +lastName: ?string, + |}>, + +$fragmentType: PopStarNameResolverFragment_name$fragmentType, +|}; +export type PopStarNameResolverFragment_name$key = { + +$data?: PopStarNameResolverFragment_name$data, + +$fragmentSpreads: PopStarNameResolverFragment_name$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-scalar-plural.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-scalar-plural.graphql new file mode 100644 index 0000000000000..fcf08b3fac8e4 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver-with-output-type-scalar-plural.graphql @@ -0,0 +1,24 @@ +fragment Foo_user on User { + poppy: pop_star_names +} + +fragment PopStarNameResolverFragment_name on User { + name + address { + street + } + parents { + lastName + } +} + +# %extensions% + +extend type User { + pop_star_names: [String] + @relay_resolver( + fragment_name: "PopStarNameResolverFragment_name" + import_path: "PopStarNameResolver" + has_output_type: true + ) +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver.expected index 443f012efb3f6..078036327e151 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver.expected +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/relay-resolver.expected @@ -25,11 +25,11 @@ import userPopStarNameResolverType from "PopStarNameResolver"; // A type error here indicates that the type signature of the resolver module is incorrect. (userPopStarNameResolverType: ( rootKey: relayResolver_PopStarNameResolverFragment_name$key, -) => mixed); +) => ?mixed); export type relayResolver_Query$variables = {||}; export type relayResolver_Query$data = {| +me: ?{| - +pop_star_name: ?$Call<((...empty[]) => R) => R, typeof userPopStarNameResolverType>, + +pop_star_name: ?ReturnType, |}, |}; export type relayResolver_Query = {| diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-extension.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-extension.expected new file mode 100644 index 0000000000000..a951ddfb5cda1 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-extension.expected @@ -0,0 +1,41 @@ +==================================== INPUT ==================================== +fragment resolverOnInterfaceOfAllStrongModelTypeWithExtensionFragment on Cat { + description +} + +# %extensions% + +interface Cat { + id: ID! + description: String @relay_resolver(import_path: "CatResolver") +} + +type Tabby implements Cat { + id: ID! + description: String @relay_resolver(import_path: "CatResolver") + __relay_model_instance: RelayResolverValue @relay_resolver(fragment_name: "Tabby__id", import_path: "TabbyResolver", inject_fragment_data: "id") +} + +type Persian implements Cat { + id: ID! + description: String +} +==================================== OUTPUT =================================== +import type { Tabby____relay_model_instance$data } from "Tabby____relay_model_instance.graphql"; +import type { FragmentType } from "relay-runtime"; +import catDescriptionResolverType from "CatResolver"; +// Type assertion validating that `catDescriptionResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(catDescriptionResolverType: ( + model: Tabby____relay_model_instance$data['__relay_model_instance'], +) => ?mixed); +declare export opaque type resolverOnInterfaceOfAllStrongModelTypeWithExtensionFragment$fragmentType: FragmentType; +export type resolverOnInterfaceOfAllStrongModelTypeWithExtensionFragment$data = {| + +description: ?ReturnType, + +$fragmentType: resolverOnInterfaceOfAllStrongModelTypeWithExtensionFragment$fragmentType, +|}; +export type resolverOnInterfaceOfAllStrongModelTypeWithExtensionFragment$key = { + +$data?: resolverOnInterfaceOfAllStrongModelTypeWithExtensionFragment$data, + +$fragmentSpreads: resolverOnInterfaceOfAllStrongModelTypeWithExtensionFragment$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-extension.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-extension.graphql new file mode 100644 index 0000000000000..d80e4cfd5d9b5 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-extension.graphql @@ -0,0 +1,21 @@ +fragment resolverOnInterfaceOfAllStrongModelTypeWithExtensionFragment on Cat { + description +} + +# %extensions% + +interface Cat { + id: ID! + description: String @relay_resolver(import_path: "CatResolver") +} + +type Tabby implements Cat { + id: ID! + description: String @relay_resolver(import_path: "CatResolver") + __relay_model_instance: RelayResolverValue @relay_resolver(fragment_name: "Tabby__id", import_path: "TabbyResolver", inject_fragment_data: "id") +} + +type Persian implements Cat { + id: ID! + description: String +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-root-fragment.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-root-fragment.expected new file mode 100644 index 0000000000000..072eca9fb431e --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-root-fragment.expected @@ -0,0 +1,62 @@ +==================================== INPUT ==================================== +fragment resolverOnInterfaceOfAllStrongModelTypeWithRootFragment on Cat { + description +} + +fragment description_Fragment on Cat { + description +} + +# %extensions% + +interface Cat { + id: ID! + description: String @relay_resolver(import_path: "CatResolver", fragment_name: "description_Fragment") +} + +type Tabby implements Cat { + id: ID! + description: String @relay_resolver(import_path: "CatResolver", fragment_name: "description_Fragment") +} + +type Persian implements Cat { + id: ID! + description: String @relay_resolver(import_path: "CatResolver", fragment_name: "description_Fragment") +} +==================================== OUTPUT =================================== +import type { FragmentType } from "relay-runtime"; +import catDescriptionResolverType from "CatResolver"; +// Type assertion validating that `catDescriptionResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(catDescriptionResolverType: ( + rootKey: description_Fragment$key, +) => ?mixed); +declare export opaque type description_Fragment$fragmentType: FragmentType; +export type description_Fragment$data = {| + +description: ?ReturnType, + +$fragmentType: description_Fragment$fragmentType, +|}; +export type description_Fragment$key = { + +$data?: description_Fragment$data, + +$fragmentSpreads: description_Fragment$fragmentType, + ... +}; +------------------------------------------------------------------------------- +import type { description_Fragment$key } from "description_Fragment.graphql"; +import type { FragmentType } from "relay-runtime"; +import catDescriptionResolverType from "CatResolver"; +// Type assertion validating that `catDescriptionResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(catDescriptionResolverType: ( + rootKey: description_Fragment$key, +) => ?mixed); +declare export opaque type resolverOnInterfaceOfAllStrongModelTypeWithRootFragment$fragmentType: FragmentType; +export type resolverOnInterfaceOfAllStrongModelTypeWithRootFragment$data = {| + +description: ?ReturnType, + +$fragmentType: resolverOnInterfaceOfAllStrongModelTypeWithRootFragment$fragmentType, +|}; +export type resolverOnInterfaceOfAllStrongModelTypeWithRootFragment$key = { + +$data?: resolverOnInterfaceOfAllStrongModelTypeWithRootFragment$data, + +$fragmentSpreads: resolverOnInterfaceOfAllStrongModelTypeWithRootFragment$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-root-fragment.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-root-fragment.graphql new file mode 100644 index 0000000000000..aa173582dce6c --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-root-fragment.graphql @@ -0,0 +1,24 @@ +fragment resolverOnInterfaceOfAllStrongModelTypeWithRootFragment on Cat { + description +} + +fragment description_Fragment on Cat { + description +} + +# %extensions% + +interface Cat { + id: ID! + description: String @relay_resolver(import_path: "CatResolver", fragment_name: "description_Fragment") +} + +type Tabby implements Cat { + id: ID! + description: String @relay_resolver(import_path: "CatResolver", fragment_name: "description_Fragment") +} + +type Persian implements Cat { + id: ID! + description: String @relay_resolver(import_path: "CatResolver", fragment_name: "description_Fragment") +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type.expected new file mode 100644 index 0000000000000..270ea7438b8ad --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type.expected @@ -0,0 +1,43 @@ +==================================== INPUT ==================================== +fragment resolverOnInterfaceOfAllStrongModelTypeFragment on Cat { + description +} + +# %extensions% + +interface Cat { + id: ID! + description: String @relay_resolver(import_path: "CatResolver") +} + +type Tabby implements Cat { + id: ID! + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(fragment_name: "Tabby__id", import_path: "TabbyResolver", inject_fragment_data: "id") +} + +type Persian implements Cat { + id: ID! + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(fragment_name: "Persian__id", import_path: "PersianResolver", inject_fragment_data: "id") +} +==================================== OUTPUT =================================== +import type { Persian____relay_model_instance$data } from "Persian____relay_model_instance.graphql"; +import type { Tabby____relay_model_instance$data } from "Tabby____relay_model_instance.graphql"; +import type { FragmentType } from "relay-runtime"; +import catDescriptionResolverType from "CatResolver"; +// Type assertion validating that `catDescriptionResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(catDescriptionResolverType: ( + model: Persian____relay_model_instance$data['__relay_model_instance'] | Tabby____relay_model_instance$data['__relay_model_instance'], +) => ?mixed); +declare export opaque type resolverOnInterfaceOfAllStrongModelTypeFragment$fragmentType: FragmentType; +export type resolverOnInterfaceOfAllStrongModelTypeFragment$data = {| + +description: ?ReturnType, + +$fragmentType: resolverOnInterfaceOfAllStrongModelTypeFragment$fragmentType, +|}; +export type resolverOnInterfaceOfAllStrongModelTypeFragment$key = { + +$data?: resolverOnInterfaceOfAllStrongModelTypeFragment$data, + +$fragmentSpreads: resolverOnInterfaceOfAllStrongModelTypeFragment$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type.graphql new file mode 100644 index 0000000000000..6be155ae471ea --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type.graphql @@ -0,0 +1,22 @@ +fragment resolverOnInterfaceOfAllStrongModelTypeFragment on Cat { + description +} + +# %extensions% + +interface Cat { + id: ID! + description: String @relay_resolver(import_path: "CatResolver") +} + +type Tabby implements Cat { + id: ID! + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(fragment_name: "Tabby__id", import_path: "TabbyResolver", inject_fragment_data: "id") +} + +type Persian implements Cat { + id: ID! + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(fragment_name: "Persian__id", import_path: "PersianResolver", inject_fragment_data: "id") +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-weak-model-type.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-weak-model-type.expected new file mode 100644 index 0000000000000..a9c8504ec1c95 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-weak-model-type.expected @@ -0,0 +1,40 @@ +==================================== INPUT ==================================== +fragment resolverOnInterfaceOfAllWeakModelTypeFragment on Cat { + description +} + +# %extensions% + +interface Cat { + description: String @relay_resolver(import_path: "CatResolver") +} + +type Tabby implements Cat { + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(fragment_name: "Tabby__id", import_path: "TabbyResolver", inject_fragment_data: "id") +} + +type Persian implements Cat { + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(fragment_name: "Persian__id", import_path: "PersianResolver", inject_fragment_data: "id") +} +==================================== OUTPUT =================================== +import type { Persian____relay_model_instance$data } from "Persian____relay_model_instance.graphql"; +import type { Tabby____relay_model_instance$data } from "Tabby____relay_model_instance.graphql"; +import type { FragmentType } from "relay-runtime"; +import catDescriptionResolverType from "CatResolver"; +// Type assertion validating that `catDescriptionResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(catDescriptionResolverType: ( + model: Persian____relay_model_instance$data['__relay_model_instance'] | Tabby____relay_model_instance$data['__relay_model_instance'], +) => ?mixed); +declare export opaque type resolverOnInterfaceOfAllWeakModelTypeFragment$fragmentType: FragmentType; +export type resolverOnInterfaceOfAllWeakModelTypeFragment$data = {| + +description: ?ReturnType, + +$fragmentType: resolverOnInterfaceOfAllWeakModelTypeFragment$fragmentType, +|}; +export type resolverOnInterfaceOfAllWeakModelTypeFragment$key = { + +$data?: resolverOnInterfaceOfAllWeakModelTypeFragment$data, + +$fragmentSpreads: resolverOnInterfaceOfAllWeakModelTypeFragment$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-weak-model-type.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-weak-model-type.graphql new file mode 100644 index 0000000000000..2afeafd3e4902 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/resolver-on-interface-of-all-weak-model-type.graphql @@ -0,0 +1,19 @@ +fragment resolverOnInterfaceOfAllWeakModelTypeFragment on Cat { + description +} + +# %extensions% + +interface Cat { + description: String @relay_resolver(import_path: "CatResolver") +} + +type Tabby implements Cat { + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(fragment_name: "Tabby__id", import_path: "TabbyResolver", inject_fragment_data: "id") +} + +type Persian implements Cat { + description: String + __relay_model_instance: RelayResolverValue @relay_resolver(fragment_name: "Persian__id", import_path: "PersianResolver", inject_fragment_data: "id") +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_in_raw_response.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_in_raw_response.expected new file mode 100644 index 0000000000000..02cfb1b856ee6 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_in_raw_response.expected @@ -0,0 +1,50 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +query MyQuery @raw_response_type @throwOnFieldError { + opera { + composer { + name + } + cast { + singer { + name + } + character + } + } +} +==================================== OUTPUT =================================== +export type MyQuery$variables = {||}; +export type MyQuery$data = {| + +opera: ?{| + +cast: $ReadOnlyArray<{| + +character: string, + +singer: {| + +name: ?string, + |}, + |}>, + +composer: {| + +name: ?string, + |}, + |}, +|}; +export type MyQuery$rawResponse = {| + +opera?: ?{| + +cast: ?$ReadOnlyArray, + +composer: ?{| + +id: string, + +name: ?string, + |}, + |}, +|}; +export type MyQuery = {| + rawResponse: MyQuery$rawResponse, + response: MyQuery$data, + variables: MyQuery$variables, +|}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_in_raw_response.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_in_raw_response.graphql new file mode 100644 index 0000000000000..9467ca18b3f1b --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_in_raw_response.graphql @@ -0,0 +1,14 @@ +# relay:experimental_emit_semantic_nullability_types +query MyQuery @raw_response_type @throwOnFieldError { + opera { + composer { + name + } + cast { + singer { + name + } + character + } + } +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_items_in_matrix.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_items_in_matrix.expected new file mode 100644 index 0000000000000..08bf50318c9d9 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_items_in_matrix.expected @@ -0,0 +1,23 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on Screen @throwOnFieldError { + pixels +} + +%extensions% + +type Screen { + pixels: [[Int]] @semanticNonNull(levels: [2]) +} +==================================== OUTPUT =================================== +import type { FragmentType } from "relay-runtime"; +declare export opaque type MyFragment$fragmentType: FragmentType; +export type MyFragment$data = {| + +pixels: ?$ReadOnlyArray>, + +$fragmentType: MyFragment$fragmentType, +|}; +export type MyFragment$key = { + +$data?: MyFragment$data, + +$fragmentSpreads: MyFragment$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_items_in_matrix.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_items_in_matrix.graphql new file mode 100644 index 0000000000000..b47dcf2474175 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_items_in_matrix.graphql @@ -0,0 +1,10 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on Screen @throwOnFieldError { + pixels +} + +%extensions% + +type Screen { + pixels: [[Int]] @semanticNonNull(levels: [2]) +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_liked_field_resolver.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_liked_field_resolver.expected new file mode 100644 index 0000000000000..325fbef507f2e --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_liked_field_resolver.expected @@ -0,0 +1,63 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + best_friend @waterfall { + name + } +} + +%extensions% + +type ClientUser { + best_friend: User @semanticNonNull @relay_resolver( + import_path: "./foo/bar.js" + ) +} +==================================== OUTPUT =================================== +import type { RefetchableClientEdgeQuery_MyFragment_best_friend$fragmentType } from "RefetchableClientEdgeQuery_MyFragment_best_friend.graphql"; +export type ClientEdgeQuery_MyFragment_best_friend$variables = {| + id: string, +|}; +export type ClientEdgeQuery_MyFragment_best_friend$data = {| + +node: ?{| + +$fragmentSpreads: RefetchableClientEdgeQuery_MyFragment_best_friend$fragmentType, + |}, +|}; +export type ClientEdgeQuery_MyFragment_best_friend = {| + response: ClientEdgeQuery_MyFragment_best_friend$data, + variables: ClientEdgeQuery_MyFragment_best_friend$variables, +|}; +------------------------------------------------------------------------------- +import type { FragmentType, DataID } from "relay-runtime"; +import clientUserBestFriendResolverType from "bar"; +// Type assertion validating that `clientUserBestFriendResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(clientUserBestFriendResolverType: () => {| + +id: DataID, +|}); +declare export opaque type MyFragment$fragmentType: FragmentType; +export type MyFragment$data = {| + +best_friend: {| + +name: ?string, + |}, + +$fragmentType: MyFragment$fragmentType, +|}; +export type MyFragment$key = { + +$data?: MyFragment$data, + +$fragmentSpreads: MyFragment$fragmentType, + ... +}; +------------------------------------------------------------------------------- +import type { FragmentType } from "relay-runtime"; +declare export opaque type RefetchableClientEdgeQuery_MyFragment_best_friend$fragmentType: FragmentType; +import type { ClientEdgeQuery_MyFragment_best_friend$variables } from "ClientEdgeQuery_MyFragment_best_friend.graphql"; +export type RefetchableClientEdgeQuery_MyFragment_best_friend$data = {| + +id: string, + +name: ?string, + +$fragmentType: RefetchableClientEdgeQuery_MyFragment_best_friend$fragmentType, +|}; +export type RefetchableClientEdgeQuery_MyFragment_best_friend$key = { + +$data?: RefetchableClientEdgeQuery_MyFragment_best_friend$data, + +$fragmentSpreads: RefetchableClientEdgeQuery_MyFragment_best_friend$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_liked_field_resolver.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_liked_field_resolver.graphql new file mode 100644 index 0000000000000..cf8fb71f1bd2e --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_liked_field_resolver.graphql @@ -0,0 +1,14 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + best_friend @waterfall { + name + } +} + +%extensions% + +type ClientUser { + best_friend: User @semanticNonNull @relay_resolver( + import_path: "./foo/bar.js" + ) +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_liked_field_weak_resolver.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_liked_field_weak_resolver.expected new file mode 100644 index 0000000000000..afc16969dc600 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_liked_field_weak_resolver.expected @@ -0,0 +1,48 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + blob { + data + } +} + +%extensions% + +type ClientUser { + blob: Blob @semanticNonNull @relay_resolver( + import_path: "./foo/bar.js" + has_output_type: true + ) +} + +type Blob { + data: String +} +==================================== OUTPUT =================================== +export type ClientUser__blob$normalization$variables = {||}; +export type ClientUser__blob$normalization$data = {| + +data: ?string, +|}; +export type ClientUser__blob$normalization = {| + response: ClientUser__blob$normalization$data, + variables: ClientUser__blob$normalization$variables, +|}; +------------------------------------------------------------------------------- +import type { ClientUser__blob$normalization } from "ClientUser__blob$normalization.graphql"; +import type { FragmentType } from "relay-runtime"; +import clientUserBlobResolverType from "bar"; +// Type assertion validating that `clientUserBlobResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(clientUserBlobResolverType: () => ClientUser__blob$normalization); +declare export opaque type MyFragment$fragmentType: FragmentType; +export type MyFragment$data = {| + +blob: {| + +data: ?string, + |}, + +$fragmentType: MyFragment$fragmentType, +|}; +export type MyFragment$key = { + +$data?: MyFragment$data, + +$fragmentSpreads: MyFragment$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_liked_field_weak_resolver.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_liked_field_weak_resolver.graphql new file mode 100644 index 0000000000000..195659e6e9ef8 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_liked_field_weak_resolver.graphql @@ -0,0 +1,19 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + blob { + data + } +} + +%extensions% + +type ClientUser { + blob: Blob @semanticNonNull @relay_resolver( + import_path: "./foo/bar.js" + has_output_type: true + ) +} + +type Blob { + data: String +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_linked_field.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_linked_field.expected new file mode 100644 index 0000000000000..5842f6d34efef --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_linked_field.expected @@ -0,0 +1,27 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + best_friend { + name + } +} + +%extensions% + +type ClientUser { + best_friend: User @semanticNonNull +} +==================================== OUTPUT =================================== +import type { FragmentType } from "relay-runtime"; +declare export opaque type MyFragment$fragmentType: FragmentType; +export type MyFragment$data = {| + +best_friend: {| + +name: ?string, + |}, + +$fragmentType: MyFragment$fragmentType, +|}; +export type MyFragment$key = { + +$data?: MyFragment$data, + +$fragmentSpreads: MyFragment$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_linked_field.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_linked_field.graphql new file mode 100644 index 0000000000000..ef205348eb5ba --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_linked_field.graphql @@ -0,0 +1,12 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + best_friend { + name + } +} + +%extensions% + +type ClientUser { + best_friend: User @semanticNonNull +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_list_and_list_item.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_list_and_list_item.expected new file mode 100644 index 0000000000000..61b3a35e2c638 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_list_and_list_item.expected @@ -0,0 +1,23 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + favorite_numbers +} + +%extensions% + +type ClientUser { + favorite_numbers: [Int] @semanticNonNull(levels: [0, 1]) +} +==================================== OUTPUT =================================== +import type { FragmentType } from "relay-runtime"; +declare export opaque type MyFragment$fragmentType: FragmentType; +export type MyFragment$data = {| + +favorite_numbers: $ReadOnlyArray, + +$fragmentType: MyFragment$fragmentType, +|}; +export type MyFragment$key = { + +$data?: MyFragment$data, + +$fragmentSpreads: MyFragment$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_list_and_list_item.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_list_and_list_item.graphql new file mode 100644 index 0000000000000..9a0128042baa9 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_list_and_list_item.graphql @@ -0,0 +1,10 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + favorite_numbers +} + +%extensions% + +type ClientUser { + favorite_numbers: [Int] @semanticNonNull(levels: [0, 1]) +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_list_item.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_list_item.expected new file mode 100644 index 0000000000000..bfd6b44773a57 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_list_item.expected @@ -0,0 +1,23 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + favorite_numbers +} + +%extensions% + +type ClientUser { + favorite_numbers: [Int] @semanticNonNull(levels: [1]) +} +==================================== OUTPUT =================================== +import type { FragmentType } from "relay-runtime"; +declare export opaque type MyFragment$fragmentType: FragmentType; +export type MyFragment$data = {| + +favorite_numbers: ?$ReadOnlyArray, + +$fragmentType: MyFragment$fragmentType, +|}; +export type MyFragment$key = { + +$data?: MyFragment$data, + +$fragmentSpreads: MyFragment$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_list_item.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_list_item.graphql new file mode 100644 index 0000000000000..e9c0a45b573ad --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_list_item.graphql @@ -0,0 +1,10 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + favorite_numbers +} + +%extensions% + +type ClientUser { + favorite_numbers: [Int] @semanticNonNull(levels: [1]) +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar.expected new file mode 100644 index 0000000000000..4b095cbc73196 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar.expected @@ -0,0 +1,23 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + name +} + +%extensions% + +type ClientUser { + name: String @semanticNonNull +} +==================================== OUTPUT =================================== +import type { FragmentType } from "relay-runtime"; +declare export opaque type MyFragment$fragmentType: FragmentType; +export type MyFragment$data = {| + +name: string, + +$fragmentType: MyFragment$fragmentType, +|}; +export type MyFragment$key = { + +$data?: MyFragment$data, + +$fragmentSpreads: MyFragment$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar.graphql new file mode 100644 index 0000000000000..4ff85acbc9368 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar.graphql @@ -0,0 +1,10 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + name +} + +%extensions% + +type ClientUser { + name: String @semanticNonNull +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar_feature_disabled.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar_feature_disabled.expected new file mode 100644 index 0000000000000..a5f0034010bc3 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar_feature_disabled.expected @@ -0,0 +1,23 @@ +==================================== INPUT ==================================== +# Note: No comment here enabling `experimental_emit_semantic_nullability_types` +fragment MyFragment on ClientUser { + name +} + +%extensions% + +type ClientUser { + name: String @semanticNonNull +} +==================================== OUTPUT =================================== +import type { FragmentType } from "relay-runtime"; +declare export opaque type MyFragment$fragmentType: FragmentType; +export type MyFragment$data = {| + +name: ?string, + +$fragmentType: MyFragment$fragmentType, +|}; +export type MyFragment$key = { + +$data?: MyFragment$data, + +$fragmentSpreads: MyFragment$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar_feature_disabled.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar_feature_disabled.graphql new file mode 100644 index 0000000000000..c0d5cee2e7273 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar_feature_disabled.graphql @@ -0,0 +1,10 @@ +# Note: No comment here enabling `experimental_emit_semantic_nullability_types` +fragment MyFragment on ClientUser { + name +} + +%extensions% + +type ClientUser { + name: String @semanticNonNull +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar_resolver.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar_resolver.expected new file mode 100644 index 0000000000000..f9434201982e5 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar_resolver.expected @@ -0,0 +1,29 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + name +} + +%extensions% + +type ClientUser { + name: String @semanticNonNull @relay_resolver( + import_path: "./foo/bar.js" + ) +} +==================================== OUTPUT =================================== +import type { FragmentType } from "relay-runtime"; +import clientUserNameResolverType from "bar"; +// Type assertion validating that `clientUserNameResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(clientUserNameResolverType: () => mixed); +declare export opaque type MyFragment$fragmentType: FragmentType; +export type MyFragment$data = {| + +name: $NonMaybeType>, + +$fragmentType: MyFragment$fragmentType, +|}; +export type MyFragment$key = { + +$data?: MyFragment$data, + +$fragmentSpreads: MyFragment$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar_resolver.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar_resolver.graphql new file mode 100644 index 0000000000000..ad5aacd8f6e8c --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_non_null_scalar_resolver.graphql @@ -0,0 +1,12 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + name +} + +%extensions% + +type ClientUser { + name: String @semanticNonNull @relay_resolver( + import_path: "./foo/bar.js" + ) +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_null_required_throw_on_error.expected b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_null_required_throw_on_error.expected new file mode 100644 index 0000000000000..c69ffd11cf3d2 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_null_required_throw_on_error.expected @@ -0,0 +1,30 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + nullable_name @required(action: LOG) + resolver_name @required(action: LOG) +} + +%extensions% + +type ClientUser { + nullable_name: String + resolver_name: RelayResolverValue @relay_resolver(import_path: "CatResolver") +} +==================================== OUTPUT =================================== +import type { FragmentType } from "relay-runtime"; +import clientUserResolverNameResolverType from "CatResolver"; +// Type assertion validating that `clientUserResolverNameResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(clientUserResolverNameResolverType: () => ?mixed); +declare export opaque type MyFragment$fragmentType: FragmentType; +export type MyFragment$data = ?{| + +nullable_name: string, + +resolver_name: $NonMaybeType>, + +$fragmentType: MyFragment$fragmentType, +|}; +export type MyFragment$key = { + +$data?: MyFragment$data, + +$fragmentSpreads: MyFragment$fragmentType, + ... +}; diff --git a/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_null_required_throw_on_error.graphql b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_null_required_throw_on_error.graphql new file mode 100644 index 0000000000000..2f263396d941c --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow/fixtures/semantic_null_required_throw_on_error.graphql @@ -0,0 +1,12 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + nullable_name @required(action: LOG) + resolver_name @required(action: LOG) +} + +%extensions% + +type ClientUser { + nullable_name: String + resolver_name: RelayResolverValue @relay_resolver(import_path: "CatResolver") +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow/mod.rs b/compiler/crates/relay-typegen/tests/generate_flow/mod.rs deleted file mode 100644 index 4dcfdd65f42fd..0000000000000 --- a/compiler/crates/relay-typegen/tests/generate_flow/mod.rs +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::ConsoleLogger; -use common::FeatureFlag; -use common::FeatureFlags; -use common::ScalarName; -use common::SourceLocationKey; -use fixture_tests::Fixture; -use fnv::FnvBuildHasher; -use fnv::FnvHashMap; -use graphql_ir::build_ir_in_relay_mode; -use graphql_ir::OperationDefinitionName; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use graphql_test_helpers::diagnostics_to_sorted_string; -use indexmap::IndexMap; -use intern::string_key::Intern; -use relay_codegen::JsModuleFormat; -use relay_config::CustomScalarType; -use relay_config::CustomScalarTypeImport; -use relay_config::ProjectConfig; -use relay_test_schema::get_test_schema; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::apply_transforms; -use relay_typegen::FragmentLocations; -use relay_typegen::TypegenConfig; -use relay_typegen::TypegenLanguage; - -type FnvIndexMap = IndexMap; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts = fixture.content.split("%extensions%").collect::>(); - let (source, schema) = match parts.as_slice() { - [source, extensions] => (source, get_test_schema_with_extensions(extensions)), - [source] => (source, get_test_schema()), - _ => panic!(), - }; - - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let mut sources = FnvHashMap::default(); - sources.insert(source_location, source); - let ast = parse_executable(source, source_location) - .map_err(|diagnostics| diagnostics_to_sorted_string(source, &diagnostics))?; - let feature_flags = FeatureFlags { - no_inline: FeatureFlag::Limited { - allowlist: [ - "noInlineFragment_address".intern(), - "noInlineFragment_user".intern(), - "MarkdownUserNameRenderer_name".intern(), - "Test_userRenderer".intern(), - "PlainUserNameRenderer_name".intern(), - ] - .into_iter() - .collect(), - }, - enable_flight_transform: true, - enable_relay_resolver_transform: true, - actor_change_support: FeatureFlag::Enabled, - enable_fragment_aliases: FeatureFlag::Enabled, - ..Default::default() - }; - let ir = build_ir_in_relay_mode(&schema, &ast.definitions) - .map_err(|diagnostics| diagnostics_to_sorted_string(source, &diagnostics))?; - let program = Program::from_definitions(Arc::clone(&schema), ir); - - let mut custom_scalar_types = FnvIndexMap::default(); - - custom_scalar_types.insert( - ScalarName("Boolean".intern()), - CustomScalarType::Name("CustomBoolean".intern()), - ); - custom_scalar_types.insert( - ScalarName("JSON".intern()), - CustomScalarType::Path(CustomScalarTypeImport { - name: "JSON".intern(), - path: "TypeDefsFile".into(), - }), - ); - let project_config = ProjectConfig { - name: "test".intern(), - js_module_format: JsModuleFormat::Haste, - feature_flags: Arc::new(feature_flags), - typegen_config: TypegenConfig { - language: TypegenLanguage::Flow, - custom_scalar_types, - ..Default::default() - }, - ..Default::default() - }; - - let programs = apply_transforms( - &project_config, - Arc::new(program), - Default::default(), - Arc::new(ConsoleLogger), - None, - None, - ) - .map_err(|diagnostics| diagnostics_to_sorted_string(source, &diagnostics))?; - - let fragment_locations = FragmentLocations::new(programs.typegen.fragments()); - let mut operations: Vec<_> = programs.typegen.operations().collect(); - operations.sort_by_key(|op| op.name.item.0); - let operation_strings = operations.into_iter().map(|typegen_operation| { - // `normalization` ASTs are present unless we are processing an updatable query - // In that case, `reader` ASTs are present. - let op = programs - .normalization - .operation(OperationDefinitionName(typegen_operation.name.item.0)) - .unwrap_or_else(|| { - programs - .reader - .operation(OperationDefinitionName(typegen_operation.name.item.0)) - .unwrap_or_else(|| { - panic!( - "Couldn't find normalization or reader operations for {}", - typegen_operation.name.item - ) - }) - }); - - relay_typegen::generate_operation_type_exports_section( - typegen_operation, - op, - &schema, - &project_config, - &fragment_locations, - ) - }); - - let mut fragments: Vec<_> = programs.typegen.fragments().collect(); - fragments.sort_by_key(|frag| frag.name.item); - let fragment_strings = fragments.into_iter().map(|frag| { - relay_typegen::generate_fragment_type_exports_section( - frag, - &schema, - &project_config, - &fragment_locations, - ) - }); - - let mut result: Vec = operation_strings.collect(); - result.extend(fragment_strings); - Ok(result - .join("-------------------------------------------------------------------------------\n")) -} diff --git a/compiler/crates/relay-typegen/tests/generate_flow_test.rs b/compiler/crates/relay-typegen/tests/generate_flow_test.rs index 2264196f311af..57fe70aa83c75 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow_test.rs +++ b/compiler/crates/relay-typegen/tests/generate_flow_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<> */ mod generate_flow; @@ -12,772 +12,912 @@ mod generate_flow; use generate_flow::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn actor_change() { +#[tokio::test] +async fn actor_change() { let input = include_str!("generate_flow/fixtures/actor-change.graphql"); let expected = include_str!("generate_flow/fixtures/actor-change.expected"); - test_fixture(transform_fixture, "actor-change.graphql", "generate_flow/fixtures/actor-change.expected", input, expected); + test_fixture(transform_fixture, file!(), "actor-change.graphql", "generate_flow/fixtures/actor-change.expected", input, expected).await; } -#[test] -fn actor_change_with_query() { +#[tokio::test] +async fn actor_change_with_query() { let input = include_str!("generate_flow/fixtures/actor-change-with-query.graphql"); let expected = include_str!("generate_flow/fixtures/actor-change-with-query.expected"); - test_fixture(transform_fixture, "actor-change-with-query.graphql", "generate_flow/fixtures/actor-change-with-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "actor-change-with-query.graphql", "generate_flow/fixtures/actor-change-with-query.expected", input, expected).await; } -#[test] -fn aliased_fragment_raw_response_type() { +#[tokio::test] +async fn aliased_fragment_raw_response_type() { let input = include_str!("generate_flow/fixtures/aliased-fragment-raw-response-type.graphql"); let expected = include_str!("generate_flow/fixtures/aliased-fragment-raw-response-type.expected"); - test_fixture(transform_fixture, "aliased-fragment-raw-response-type.graphql", "generate_flow/fixtures/aliased-fragment-raw-response-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased-fragment-raw-response-type.graphql", "generate_flow/fixtures/aliased-fragment-raw-response-type.expected", input, expected).await; } -#[test] -fn aliased_fragment_spread() { +#[tokio::test] +async fn aliased_fragment_spread() { let input = include_str!("generate_flow/fixtures/aliased-fragment-spread.graphql"); let expected = include_str!("generate_flow/fixtures/aliased-fragment-spread.expected"); - test_fixture(transform_fixture, "aliased-fragment-spread.graphql", "generate_flow/fixtures/aliased-fragment-spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased-fragment-spread.graphql", "generate_flow/fixtures/aliased-fragment-spread.expected", input, expected).await; } -#[test] -fn aliased_fragment_spread_in_abstract_selection() { +#[tokio::test] +async fn aliased_fragment_spread_in_abstract_selection() { let input = include_str!("generate_flow/fixtures/aliased-fragment-spread-in-abstract-selection.graphql"); let expected = include_str!("generate_flow/fixtures/aliased-fragment-spread-in-abstract-selection.expected"); - test_fixture(transform_fixture, "aliased-fragment-spread-in-abstract-selection.graphql", "generate_flow/fixtures/aliased-fragment-spread-in-abstract-selection.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased-fragment-spread-in-abstract-selection.graphql", "generate_flow/fixtures/aliased-fragment-spread-in-abstract-selection.expected", input, expected).await; } -#[test] -fn aliased_inline_fragment_spread() { +#[tokio::test] +async fn aliased_inline_fragment_spread() { let input = include_str!("generate_flow/fixtures/aliased-inline-fragment-spread.graphql"); let expected = include_str!("generate_flow/fixtures/aliased-inline-fragment-spread.expected"); - test_fixture(transform_fixture, "aliased-inline-fragment-spread.graphql", "generate_flow/fixtures/aliased-inline-fragment-spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased-inline-fragment-spread.graphql", "generate_flow/fixtures/aliased-inline-fragment-spread.expected", input, expected).await; } -#[test] -fn aliased_inline_fragment_spread_without_type_condition_fragment_root() { +#[tokio::test] +async fn aliased_inline_fragment_spread_without_type_condition_fragment_root() { let input = include_str!("generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.graphql"); let expected = include_str!("generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.expected"); - test_fixture(transform_fixture, "aliased-inline-fragment-spread-without-type-condition-fragment-root.graphql", "generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased-inline-fragment-spread-without-type-condition-fragment-root.graphql", "generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.expected", input, expected).await; } -#[test] -fn aliased_inline_fragment_spread_without_type_condition_linked_field() { +#[tokio::test] +async fn aliased_inline_fragment_spread_without_type_condition_linked_field() { let input = include_str!("generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.graphql"); let expected = include_str!("generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.expected"); - test_fixture(transform_fixture, "aliased-inline-fragment-spread-without-type-condition-linked-field.graphql", "generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased-inline-fragment-spread-without-type-condition-linked-field.graphql", "generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.expected", input, expected).await; } -#[test] -fn aliased_inline_fragment_spread_without_type_condition_query_root() { +#[tokio::test] +async fn aliased_inline_fragment_spread_without_type_condition_query_root() { let input = include_str!("generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.graphql"); let expected = include_str!("generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.expected"); - test_fixture(transform_fixture, "aliased-inline-fragment-spread-without-type-condition-query-root.graphql", "generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased-inline-fragment-spread-without-type-condition-query-root.graphql", "generate_flow/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.expected", input, expected).await; } -#[test] -fn conditional() { +#[tokio::test] +async fn conditional() { let input = include_str!("generate_flow/fixtures/conditional.graphql"); let expected = include_str!("generate_flow/fixtures/conditional.expected"); - test_fixture(transform_fixture, "conditional.graphql", "generate_flow/fixtures/conditional.expected", input, expected); + test_fixture(transform_fixture, file!(), "conditional.graphql", "generate_flow/fixtures/conditional.expected", input, expected).await; } -#[test] -fn custom_scalar_type_import() { +#[tokio::test] +async fn custom_scalar_type_import() { let input = include_str!("generate_flow/fixtures/custom-scalar-type-import.graphql"); let expected = include_str!("generate_flow/fixtures/custom-scalar-type-import.expected"); - test_fixture(transform_fixture, "custom-scalar-type-import.graphql", "generate_flow/fixtures/custom-scalar-type-import.expected", input, expected); + test_fixture(transform_fixture, file!(), "custom-scalar-type-import.graphql", "generate_flow/fixtures/custom-scalar-type-import.expected", input, expected).await; } -#[test] -fn fragment_spread() { +#[tokio::test] +async fn default_input() { + let input = include_str!("generate_flow/fixtures/default-input.graphql"); + let expected = include_str!("generate_flow/fixtures/default-input.expected"); + test_fixture(transform_fixture, file!(), "default-input.graphql", "generate_flow/fixtures/default-input.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_spread() { let input = include_str!("generate_flow/fixtures/fragment-spread.graphql"); let expected = include_str!("generate_flow/fixtures/fragment-spread.expected"); - test_fixture(transform_fixture, "fragment-spread.graphql", "generate_flow/fixtures/fragment-spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-spread.graphql", "generate_flow/fixtures/fragment-spread.expected", input, expected).await; } -#[test] -fn inline_fragment() { +#[tokio::test] +async fn inline_fragment() { let input = include_str!("generate_flow/fixtures/inline-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/inline-fragment.expected"); - test_fixture(transform_fixture, "inline-fragment.graphql", "generate_flow/fixtures/inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment.graphql", "generate_flow/fixtures/inline-fragment.expected", input, expected).await; } -#[test] -fn linked_field() { +#[tokio::test] +async fn linked_field() { let input = include_str!("generate_flow/fixtures/linked-field.graphql"); let expected = include_str!("generate_flow/fixtures/linked-field.expected"); - test_fixture(transform_fixture, "linked-field.graphql", "generate_flow/fixtures/linked-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "linked-field.graphql", "generate_flow/fixtures/linked-field.expected", input, expected).await; } -#[test] -fn match_field() { +#[tokio::test] +async fn match_field() { let input = include_str!("generate_flow/fixtures/match-field.graphql"); let expected = include_str!("generate_flow/fixtures/match-field.expected"); - test_fixture(transform_fixture, "match-field.graphql", "generate_flow/fixtures/match-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-field.graphql", "generate_flow/fixtures/match-field.expected", input, expected).await; } -#[test] -fn match_field_in_query() { +#[tokio::test] +async fn match_field_in_query() { let input = include_str!("generate_flow/fixtures/match-field-in-query.graphql"); let expected = include_str!("generate_flow/fixtures/match-field-in-query.expected"); - test_fixture(transform_fixture, "match-field-in-query.graphql", "generate_flow/fixtures/match-field-in-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-field-in-query.graphql", "generate_flow/fixtures/match-field-in-query.expected", input, expected).await; } -#[test] -fn mutation() { +#[tokio::test] +async fn mutation() { let input = include_str!("generate_flow/fixtures/mutation.graphql"); let expected = include_str!("generate_flow/fixtures/mutation.expected"); - test_fixture(transform_fixture, "mutation.graphql", "generate_flow/fixtures/mutation.expected", input, expected); + test_fixture(transform_fixture, file!(), "mutation.graphql", "generate_flow/fixtures/mutation.expected", input, expected).await; } -#[test] -fn mutation_input_has_array() { +#[tokio::test] +async fn mutation_input_has_array() { let input = include_str!("generate_flow/fixtures/mutation-input-has-array.graphql"); let expected = include_str!("generate_flow/fixtures/mutation-input-has-array.expected"); - test_fixture(transform_fixture, "mutation-input-has-array.graphql", "generate_flow/fixtures/mutation-input-has-array.expected", input, expected); + test_fixture(transform_fixture, file!(), "mutation-input-has-array.graphql", "generate_flow/fixtures/mutation-input-has-array.expected", input, expected).await; } -#[test] -fn mutation_with_client_extension() { +#[tokio::test] +async fn mutation_with_client_extension() { let input = include_str!("generate_flow/fixtures/mutation-with-client-extension.graphql"); let expected = include_str!("generate_flow/fixtures/mutation-with-client-extension.expected"); - test_fixture(transform_fixture, "mutation-with-client-extension.graphql", "generate_flow/fixtures/mutation-with-client-extension.expected", input, expected); + test_fixture(transform_fixture, file!(), "mutation-with-client-extension.graphql", "generate_flow/fixtures/mutation-with-client-extension.expected", input, expected).await; } -#[test] -fn mutation_with_enums_on_fragment() { +#[tokio::test] +async fn mutation_with_enums_on_fragment() { let input = include_str!("generate_flow/fixtures/mutation-with-enums-on-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/mutation-with-enums-on-fragment.expected"); - test_fixture(transform_fixture, "mutation-with-enums-on-fragment.graphql", "generate_flow/fixtures/mutation-with-enums-on-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "mutation-with-enums-on-fragment.graphql", "generate_flow/fixtures/mutation-with-enums-on-fragment.expected", input, expected).await; } -#[test] -fn mutation_with_nested_fragments() { +#[tokio::test] +async fn mutation_with_nested_fragments() { let input = include_str!("generate_flow/fixtures/mutation-with-nested-fragments.graphql"); let expected = include_str!("generate_flow/fixtures/mutation-with-nested-fragments.expected"); - test_fixture(transform_fixture, "mutation-with-nested-fragments.graphql", "generate_flow/fixtures/mutation-with-nested-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "mutation-with-nested-fragments.graphql", "generate_flow/fixtures/mutation-with-nested-fragments.expected", input, expected).await; } -#[test] -fn mutation_with_response_on_inline_fragments() { +#[tokio::test] +async fn mutation_with_response_on_inline_fragments() { let input = include_str!("generate_flow/fixtures/mutation-with-response-on-inline-fragments.graphql"); let expected = include_str!("generate_flow/fixtures/mutation-with-response-on-inline-fragments.expected"); - test_fixture(transform_fixture, "mutation-with-response-on-inline-fragments.graphql", "generate_flow/fixtures/mutation-with-response-on-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "mutation-with-response-on-inline-fragments.graphql", "generate_flow/fixtures/mutation-with-response-on-inline-fragments.expected", input, expected).await; } -#[test] -fn no_inline_fragment() { +#[tokio::test] +async fn no_inline_fragment() { let input = include_str!("generate_flow/fixtures/no-inline-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/no-inline-fragment.expected"); - test_fixture(transform_fixture, "no-inline-fragment.graphql", "generate_flow/fixtures/no-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "no-inline-fragment.graphql", "generate_flow/fixtures/no-inline-fragment.expected", input, expected).await; } -#[test] -fn plural_fragment() { +#[tokio::test] +async fn plural_fragment() { let input = include_str!("generate_flow/fixtures/plural-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/plural-fragment.expected"); - test_fixture(transform_fixture, "plural-fragment.graphql", "generate_flow/fixtures/plural-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "plural-fragment.graphql", "generate_flow/fixtures/plural-fragment.expected", input, expected).await; } -#[test] -fn query_mixed_provided_variables() { +#[tokio::test] +async fn query_mixed_provided_variables() { let input = include_str!("generate_flow/fixtures/query-mixed-provided-variables.graphql"); let expected = include_str!("generate_flow/fixtures/query-mixed-provided-variables.expected"); - test_fixture(transform_fixture, "query-mixed-provided-variables.graphql", "generate_flow/fixtures/query-mixed-provided-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-mixed-provided-variables.graphql", "generate_flow/fixtures/query-mixed-provided-variables.expected", input, expected).await; } -#[test] -fn query_only_provided_variables() { +#[tokio::test] +async fn query_only_provided_variables() { let input = include_str!("generate_flow/fixtures/query-only-provided-variables.graphql"); let expected = include_str!("generate_flow/fixtures/query-only-provided-variables.expected"); - test_fixture(transform_fixture, "query-only-provided-variables.graphql", "generate_flow/fixtures/query-only-provided-variables.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-only-provided-variables.graphql", "generate_flow/fixtures/query-only-provided-variables.expected", input, expected).await; } -#[test] -fn query_provided_variables_custom_scalar() { +#[tokio::test] +async fn query_provided_variables_custom_scalar() { let input = include_str!("generate_flow/fixtures/query-provided-variables-custom-scalar.graphql"); let expected = include_str!("generate_flow/fixtures/query-provided-variables-custom-scalar.expected"); - test_fixture(transform_fixture, "query-provided-variables-custom-scalar.graphql", "generate_flow/fixtures/query-provided-variables-custom-scalar.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-provided-variables-custom-scalar.graphql", "generate_flow/fixtures/query-provided-variables-custom-scalar.expected", input, expected).await; } -#[test] -fn query_with_handles() { +#[tokio::test] +async fn query_with_handles() { let input = include_str!("generate_flow/fixtures/query-with-handles.graphql"); let expected = include_str!("generate_flow/fixtures/query-with-handles.expected"); - test_fixture(transform_fixture, "query-with-handles.graphql", "generate_flow/fixtures/query-with-handles.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-handles.graphql", "generate_flow/fixtures/query-with-handles.expected", input, expected).await; } -#[test] -fn query_with_match_fields() { +#[tokio::test] +async fn query_with_match_fields() { let input = include_str!("generate_flow/fixtures/query-with-match-fields.graphql"); let expected = include_str!("generate_flow/fixtures/query-with-match-fields.expected"); - test_fixture(transform_fixture, "query-with-match-fields.graphql", "generate_flow/fixtures/query-with-match-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-match-fields.graphql", "generate_flow/fixtures/query-with-match-fields.expected", input, expected).await; } -#[test] -fn query_with_module_field() { +#[tokio::test] +async fn query_with_module_field() { let input = include_str!("generate_flow/fixtures/query-with-module-field.graphql"); let expected = include_str!("generate_flow/fixtures/query-with-module-field.expected"); - test_fixture(transform_fixture, "query-with-module-field.graphql", "generate_flow/fixtures/query-with-module-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-module-field.graphql", "generate_flow/fixtures/query-with-module-field.expected", input, expected).await; } -#[test] -fn query_with_multiple_match_fields() { +#[tokio::test] +async fn query_with_multiple_match_fields() { let input = include_str!("generate_flow/fixtures/query-with-multiple-match-fields.graphql"); let expected = include_str!("generate_flow/fixtures/query-with-multiple-match-fields.expected"); - test_fixture(transform_fixture, "query-with-multiple-match-fields.graphql", "generate_flow/fixtures/query-with-multiple-match-fields.expected", input, expected); -} - -#[test] -fn query_with_raw_response_and_client_components() { - let input = include_str!("generate_flow/fixtures/query_with_raw_response_and_client_components.graphql"); - let expected = include_str!("generate_flow/fixtures/query_with_raw_response_and_client_components.expected"); - test_fixture(transform_fixture, "query_with_raw_response_and_client_components.graphql", "generate_flow/fixtures/query_with_raw_response_and_client_components.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-multiple-match-fields.graphql", "generate_flow/fixtures/query-with-multiple-match-fields.expected", input, expected).await; } -#[test] -fn query_with_raw_response_on_conditional() { +#[tokio::test] +async fn query_with_raw_response_on_conditional() { let input = include_str!("generate_flow/fixtures/query-with-raw-response-on-conditional.graphql"); let expected = include_str!("generate_flow/fixtures/query-with-raw-response-on-conditional.expected"); - test_fixture(transform_fixture, "query-with-raw-response-on-conditional.graphql", "generate_flow/fixtures/query-with-raw-response-on-conditional.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-raw-response-on-conditional.graphql", "generate_flow/fixtures/query-with-raw-response-on-conditional.expected", input, expected).await; } -#[test] -fn query_with_raw_response_on_literal_conditional() { +#[tokio::test] +async fn query_with_raw_response_on_literal_conditional() { let input = include_str!("generate_flow/fixtures/query-with-raw-response-on-literal-conditional.graphql"); let expected = include_str!("generate_flow/fixtures/query-with-raw-response-on-literal-conditional.expected"); - test_fixture(transform_fixture, "query-with-raw-response-on-literal-conditional.graphql", "generate_flow/fixtures/query-with-raw-response-on-literal-conditional.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-raw-response-on-literal-conditional.graphql", "generate_flow/fixtures/query-with-raw-response-on-literal-conditional.expected", input, expected).await; } -#[test] -fn query_with_stream() { +#[tokio::test] +async fn query_with_stream() { let input = include_str!("generate_flow/fixtures/query-with-stream.graphql"); let expected = include_str!("generate_flow/fixtures/query-with-stream.expected"); - test_fixture(transform_fixture, "query-with-stream.graphql", "generate_flow/fixtures/query-with-stream.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-stream.graphql", "generate_flow/fixtures/query-with-stream.expected", input, expected).await; } -#[test] -fn query_with_stream_connection() { +#[tokio::test] +async fn query_with_stream_connection() { let input = include_str!("generate_flow/fixtures/query-with-stream-connection.graphql"); let expected = include_str!("generate_flow/fixtures/query-with-stream-connection.expected"); - test_fixture(transform_fixture, "query-with-stream-connection.graphql", "generate_flow/fixtures/query-with-stream-connection.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-stream-connection.graphql", "generate_flow/fixtures/query-with-stream-connection.expected", input, expected).await; } -#[test] -fn recursive_fragments() { +#[tokio::test] +async fn recursive_fragments() { let input = include_str!("generate_flow/fixtures/recursive-fragments.graphql"); let expected = include_str!("generate_flow/fixtures/recursive-fragments.expected"); - test_fixture(transform_fixture, "recursive-fragments.graphql", "generate_flow/fixtures/recursive-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "recursive-fragments.graphql", "generate_flow/fixtures/recursive-fragments.expected", input, expected).await; } -#[test] -fn refetchable() { +#[tokio::test] +async fn refetchable() { let input = include_str!("generate_flow/fixtures/refetchable.graphql"); let expected = include_str!("generate_flow/fixtures/refetchable.expected"); - test_fixture(transform_fixture, "refetchable.graphql", "generate_flow/fixtures/refetchable.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable.graphql", "generate_flow/fixtures/refetchable.expected", input, expected).await; } -#[test] -fn refetchable_fragment() { +#[tokio::test] +async fn refetchable_fragment() { let input = include_str!("generate_flow/fixtures/refetchable-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/refetchable-fragment.expected"); - test_fixture(transform_fixture, "refetchable-fragment.graphql", "generate_flow/fixtures/refetchable-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-fragment.graphql", "generate_flow/fixtures/refetchable-fragment.expected", input, expected).await; } -#[test] -fn regular_query_with_assignable_fragment_spread() { +#[tokio::test] +async fn regular_query_with_assignable_fragment_spread() { let input = include_str!("generate_flow/fixtures/regular-query-with-assignable-fragment-spread.graphql"); let expected = include_str!("generate_flow/fixtures/regular-query-with-assignable-fragment-spread.expected"); - test_fixture(transform_fixture, "regular-query-with-assignable-fragment-spread.graphql", "generate_flow/fixtures/regular-query-with-assignable-fragment-spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "regular-query-with-assignable-fragment-spread.graphql", "generate_flow/fixtures/regular-query-with-assignable-fragment-spread.expected", input, expected).await; } -#[test] -fn relay_client_id_field() { +#[tokio::test] +async fn relay_client_id_field() { let input = include_str!("generate_flow/fixtures/relay-client-id-field.graphql"); let expected = include_str!("generate_flow/fixtures/relay-client-id-field.expected"); - test_fixture(transform_fixture, "relay-client-id-field.graphql", "generate_flow/fixtures/relay-client-id-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-client-id-field.graphql", "generate_flow/fixtures/relay-client-id-field.expected", input, expected).await; } -#[test] -fn relay_live_resolver() { +#[tokio::test] +async fn relay_live_resolver() { let input = include_str!("generate_flow/fixtures/relay-live-resolver.graphql"); let expected = include_str!("generate_flow/fixtures/relay-live-resolver.expected"); - test_fixture(transform_fixture, "relay-live-resolver.graphql", "generate_flow/fixtures/relay-live-resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-live-resolver.graphql", "generate_flow/fixtures/relay-live-resolver.expected", input, expected).await; } -#[test] -fn relay_live_resolver_no_fragment() { +#[tokio::test] +async fn relay_live_resolver_no_fragment() { let input = include_str!("generate_flow/fixtures/relay-live-resolver-no-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/relay-live-resolver-no-fragment.expected"); - test_fixture(transform_fixture, "relay-live-resolver-no-fragment.graphql", "generate_flow/fixtures/relay-live-resolver-no-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-live-resolver-no-fragment.graphql", "generate_flow/fixtures/relay-live-resolver-no-fragment.expected", input, expected).await; } -#[test] -fn relay_live_resolver_with_field_args() { +#[tokio::test] +async fn relay_live_resolver_with_field_args() { let input = include_str!("generate_flow/fixtures/relay-live-resolver-with-field-args.graphql"); let expected = include_str!("generate_flow/fixtures/relay-live-resolver-with-field-args.expected"); - test_fixture(transform_fixture, "relay-live-resolver-with-field-args.graphql", "generate_flow/fixtures/relay-live-resolver-with-field-args.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-live-resolver-with-field-args.graphql", "generate_flow/fixtures/relay-live-resolver-with-field-args.expected", input, expected).await; } -#[test] -fn relay_live_resolver_with_field_args_no_fragment() { +#[tokio::test] +async fn relay_live_resolver_with_field_args_no_fragment() { let input = include_str!("generate_flow/fixtures/relay-live-resolver-with-field-args-no-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/relay-live-resolver-with-field-args-no-fragment.expected"); - test_fixture(transform_fixture, "relay-live-resolver-with-field-args-no-fragment.graphql", "generate_flow/fixtures/relay-live-resolver-with-field-args-no-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-live-resolver-with-field-args-no-fragment.graphql", "generate_flow/fixtures/relay-live-resolver-with-field-args-no-fragment.expected", input, expected).await; } -#[test] -fn relay_resolver() { +#[tokio::test] +async fn relay_resolver() { let input = include_str!("generate_flow/fixtures/relay-resolver.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver.expected"); - test_fixture(transform_fixture, "relay-resolver.graphql", "generate_flow/fixtures/relay-resolver.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver.graphql", "generate_flow/fixtures/relay-resolver.expected", input, expected).await; } -#[test] -fn relay_resolver_client_edge() { +#[tokio::test] +async fn relay_resolver_client_edge() { let input = include_str!("generate_flow/fixtures/relay-resolver-client-edge.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-client-edge.expected"); - test_fixture(transform_fixture, "relay-resolver-client-edge.graphql", "generate_flow/fixtures/relay-resolver-client-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-client-edge.graphql", "generate_flow/fixtures/relay-resolver-client-edge.expected", input, expected).await; } -#[test] -fn relay_resolver_client_edge_required() { +#[tokio::test] +async fn relay_resolver_client_edge_required() { let input = include_str!("generate_flow/fixtures/relay-resolver-client-edge-required.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-client-edge-required.expected"); - test_fixture(transform_fixture, "relay-resolver-client-edge-required.graphql", "generate_flow/fixtures/relay-resolver-client-edge-required.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-client-edge-required.graphql", "generate_flow/fixtures/relay-resolver-client-edge-required.expected", input, expected).await; } -#[test] -fn relay_resolver_client_edge_required_edge() { +#[tokio::test] +async fn relay_resolver_client_edge_required_edge() { let input = include_str!("generate_flow/fixtures/relay-resolver-client-edge-required-edge.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-client-edge-required-edge.expected"); - test_fixture(transform_fixture, "relay-resolver-client-edge-required-edge.graphql", "generate_flow/fixtures/relay-resolver-client-edge-required-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-client-edge-required-edge.graphql", "generate_flow/fixtures/relay-resolver-client-edge-required-edge.expected", input, expected).await; } -#[test] -fn relay_resolver_in_fragment() { +#[tokio::test] +async fn relay_resolver_in_fragment() { let input = include_str!("generate_flow/fixtures/relay-resolver-in-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-in-fragment.expected"); - test_fixture(transform_fixture, "relay-resolver-in-fragment.graphql", "generate_flow/fixtures/relay-resolver-in-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-in-fragment.graphql", "generate_flow/fixtures/relay-resolver-in-fragment.expected", input, expected).await; } -#[test] -fn relay_resolver_inject_fragment_data() { +#[tokio::test] +async fn relay_resolver_inject_fragment_data() { let input = include_str!("generate_flow/fixtures/relay-resolver-inject-fragment-data.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-inject-fragment-data.expected"); - test_fixture(transform_fixture, "relay-resolver-inject-fragment-data.graphql", "generate_flow/fixtures/relay-resolver-inject-fragment-data.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-inject-fragment-data.graphql", "generate_flow/fixtures/relay-resolver-inject-fragment-data.expected", input, expected).await; } -#[test] -fn relay_resolver_live_client_edge() { +#[tokio::test] +async fn relay_resolver_live_client_edge() { let input = include_str!("generate_flow/fixtures/relay-resolver-live-client-edge.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-live-client-edge.expected"); - test_fixture(transform_fixture, "relay-resolver-live-client-edge.graphql", "generate_flow/fixtures/relay-resolver-live-client-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-live-client-edge.graphql", "generate_flow/fixtures/relay-resolver-live-client-edge.expected", input, expected).await; } -#[test] -fn relay_resolver_multiple_consumers() { +#[tokio::test] +async fn relay_resolver_multiple_consumers() { let input = include_str!("generate_flow/fixtures/relay-resolver-multiple-consumers.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-multiple-consumers.expected"); - test_fixture(transform_fixture, "relay-resolver-multiple-consumers.graphql", "generate_flow/fixtures/relay-resolver-multiple-consumers.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-multiple-consumers.graphql", "generate_flow/fixtures/relay-resolver-multiple-consumers.expected", input, expected).await; } -#[test] -fn relay_resolver_named_import() { +#[tokio::test] +async fn relay_resolver_named_import() { let input = include_str!("generate_flow/fixtures/relay-resolver-named-import.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-named-import.expected"); - test_fixture(transform_fixture, "relay-resolver-named-import.graphql", "generate_flow/fixtures/relay-resolver-named-import.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-named-import.graphql", "generate_flow/fixtures/relay-resolver-named-import.expected", input, expected).await; } -#[test] -fn relay_resolver_plural_client_edge() { +#[tokio::test] +async fn relay_resolver_plural_client_edge() { let input = include_str!("generate_flow/fixtures/relay-resolver-plural-client-edge.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-plural-client-edge.expected"); - test_fixture(transform_fixture, "relay-resolver-plural-client-edge.graphql", "generate_flow/fixtures/relay-resolver-plural-client-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-plural-client-edge.graphql", "generate_flow/fixtures/relay-resolver-plural-client-edge.expected", input, expected).await; } -#[test] -fn relay_resolver_plural_client_edge_with_required_edge() { +#[tokio::test] +async fn relay_resolver_plural_client_edge_with_required_edge() { let input = include_str!("generate_flow/fixtures/relay-resolver-plural-client-edge-with-required-edge.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-plural-client-edge-with-required-edge.expected"); - test_fixture(transform_fixture, "relay-resolver-plural-client-edge-with-required-edge.graphql", "generate_flow/fixtures/relay-resolver-plural-client-edge-with-required-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-plural-client-edge-with-required-edge.graphql", "generate_flow/fixtures/relay-resolver-plural-client-edge-with-required-edge.expected", input, expected).await; } -#[test] -fn relay_resolver_plural_required_client_edge() { +#[tokio::test] +async fn relay_resolver_plural_required_client_edge() { let input = include_str!("generate_flow/fixtures/relay-resolver-plural-required-client-edge.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-plural-required-client-edge.expected"); - test_fixture(transform_fixture, "relay-resolver-plural-required-client-edge.graphql", "generate_flow/fixtures/relay-resolver-plural-required-client-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-plural-required-client-edge.graphql", "generate_flow/fixtures/relay-resolver-plural-required-client-edge.expected", input, expected).await; } -#[test] -fn relay_resolver_plural_required_client_edge_with_required_edge() { +#[tokio::test] +async fn relay_resolver_plural_required_client_edge_with_required_edge() { let input = include_str!("generate_flow/fixtures/relay-resolver-plural-required-client-edge-with-required-edge.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-plural-required-client-edge-with-required-edge.expected"); - test_fixture(transform_fixture, "relay-resolver-plural-required-client-edge-with-required-edge.graphql", "generate_flow/fixtures/relay-resolver-plural-required-client-edge-with-required-edge.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-plural-required-client-edge-with-required-edge.graphql", "generate_flow/fixtures/relay-resolver-plural-required-client-edge-with-required-edge.expected", input, expected).await; } -#[test] -fn relay_resolver_raw_response() { +#[tokio::test] +async fn relay_resolver_raw_response() { let input = include_str!("generate_flow/fixtures/relay-resolver-raw-response.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-raw-response.expected"); - test_fixture(transform_fixture, "relay-resolver-raw-response.graphql", "generate_flow/fixtures/relay-resolver-raw-response.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-raw-response.graphql", "generate_flow/fixtures/relay-resolver-raw-response.expected", input, expected).await; } -#[test] -fn relay_resolver_required() { +#[tokio::test] +async fn relay_resolver_required() { let input = include_str!("generate_flow/fixtures/relay-resolver-required.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-required.expected"); - test_fixture(transform_fixture, "relay-resolver-required.graphql", "generate_flow/fixtures/relay-resolver-required.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-required.graphql", "generate_flow/fixtures/relay-resolver-required.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_scalar_plural() { + let input = include_str!("generate_flow/fixtures/relay-resolver-scalar-plural.graphql"); + let expected = include_str!("generate_flow/fixtures/relay-resolver-scalar-plural.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-scalar-plural.graphql", "generate_flow/fixtures/relay-resolver-scalar-plural.expected", input, expected).await; } -#[test] -fn relay_resolver_with_output_type_client_interface() { +#[tokio::test] +async fn relay_resolver_with_output_type_client_interface() { let input = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-client-interface.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-client-interface.expected"); - test_fixture(transform_fixture, "relay-resolver-with-output-type-client-interface.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-client-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-client-interface.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-client-interface.expected", input, expected).await; } -#[test] -fn relay_resolver_with_output_type_client_object() { +#[tokio::test] +async fn relay_resolver_with_output_type_client_object() { let input = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-client-object.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-client-object.expected"); - test_fixture(transform_fixture, "relay-resolver-with-output-type-client-object.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-client-object.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-client-object.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-client-object.expected", input, expected).await; } -#[test] -fn relay_resolver_with_output_type_enum() { +#[tokio::test] +async fn relay_resolver_with_output_type_client_object_plural() { + let input = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-client-object-plural.graphql"); + let expected = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-client-object-plural.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-client-object-plural.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-client-object-plural.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_with_output_type_enum() { let input = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-enum.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-enum.expected"); - test_fixture(transform_fixture, "relay-resolver-with-output-type-enum.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-enum.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-enum.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-enum.expected", input, expected).await; } -#[test] -fn relay_resolver_with_output_type_relay_resolver_value() { +#[tokio::test] +async fn relay_resolver_with_output_type_enum_plural() { + let input = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-enum-plural.graphql"); + let expected = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-enum-plural.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-enum-plural.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-enum-plural.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_with_output_type_relay_resolver_value() { let input = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value.expected"); - test_fixture(transform_fixture, "relay-resolver-with-output-type-relay-resolver-value.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-relay-resolver-value.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_with_output_type_relay_resolver_value_plural() { + let input = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-plural.graphql"); + let expected = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-plural.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-relay-resolver-value-plural.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-plural.expected", input, expected).await; } -#[test] -fn relay_resolver_with_output_type_relay_resolver_value_required() { +#[tokio::test] +async fn relay_resolver_with_output_type_relay_resolver_value_required() { let input = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.expected"); - test_fixture(transform_fixture, "relay-resolver-with-output-type-relay-resolver-value-required.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-relay-resolver-value-required.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.expected", input, expected).await; } -#[test] -fn relay_resolver_with_output_type_scalar() { +#[tokio::test] +async fn relay_resolver_with_output_type_scalar() { let input = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-scalar.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-scalar.expected"); - test_fixture(transform_fixture, "relay-resolver-with-output-type-scalar.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-scalar.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-scalar.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-scalar.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_with_output_type_scalar_plural() { + let input = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-scalar-plural.graphql"); + let expected = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-scalar-plural.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-scalar-plural.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-scalar-plural.expected", input, expected).await; } -#[test] -fn relay_resolver_with_output_type_scalar_required() { +#[tokio::test] +async fn relay_resolver_with_output_type_scalar_required() { let input = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-scalar-required.graphql"); let expected = include_str!("generate_flow/fixtures/relay-resolver-with-output-type-scalar-required.expected"); - test_fixture(transform_fixture, "relay-resolver-with-output-type-scalar-required.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-scalar-required.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-scalar-required.graphql", "generate_flow/fixtures/relay-resolver-with-output-type-scalar-required.expected", input, expected).await; } -#[test] -fn relay_weak_client_type() { +#[tokio::test] +async fn relay_weak_client_type() { let input = include_str!("generate_flow/fixtures/relay-weak-client-type.graphql"); let expected = include_str!("generate_flow/fixtures/relay-weak-client-type.expected"); - test_fixture(transform_fixture, "relay-weak-client-type.graphql", "generate_flow/fixtures/relay-weak-client-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-weak-client-type.graphql", "generate_flow/fixtures/relay-weak-client-type.expected", input, expected).await; } -#[test] -fn required() { +#[tokio::test] +async fn required() { let input = include_str!("generate_flow/fixtures/required.graphql"); let expected = include_str!("generate_flow/fixtures/required.expected"); - test_fixture(transform_fixture, "required.graphql", "generate_flow/fixtures/required.expected", input, expected); + test_fixture(transform_fixture, file!(), "required.graphql", "generate_flow/fixtures/required.expected", input, expected).await; } -#[test] -fn required_bubbles_through_inline_fragments_to_fragment() { +#[tokio::test] +async fn required_bubbles_through_inline_fragments_to_fragment() { let input = include_str!("generate_flow/fixtures/required-bubbles-through-inline-fragments-to-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/required-bubbles-through-inline-fragments-to-fragment.expected"); - test_fixture(transform_fixture, "required-bubbles-through-inline-fragments-to-fragment.graphql", "generate_flow/fixtures/required-bubbles-through-inline-fragments-to-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-through-inline-fragments-to-fragment.graphql", "generate_flow/fixtures/required-bubbles-through-inline-fragments-to-fragment.expected", input, expected).await; } -#[test] -fn required_bubbles_to_fragment() { +#[tokio::test] +async fn required_bubbles_to_fragment() { let input = include_str!("generate_flow/fixtures/required-bubbles-to-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/required-bubbles-to-fragment.expected"); - test_fixture(transform_fixture, "required-bubbles-to-fragment.graphql", "generate_flow/fixtures/required-bubbles-to-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-to-fragment.graphql", "generate_flow/fixtures/required-bubbles-to-fragment.expected", input, expected).await; } -#[test] -fn required_bubbles_to_item_in_plural_field() { +#[tokio::test] +async fn required_bubbles_to_item_in_plural_field() { let input = include_str!("generate_flow/fixtures/required-bubbles-to-item-in-plural-field.graphql"); let expected = include_str!("generate_flow/fixtures/required-bubbles-to-item-in-plural-field.expected"); - test_fixture(transform_fixture, "required-bubbles-to-item-in-plural-field.graphql", "generate_flow/fixtures/required-bubbles-to-item-in-plural-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-to-item-in-plural-field.graphql", "generate_flow/fixtures/required-bubbles-to-item-in-plural-field.expected", input, expected).await; } -#[test] -fn required_bubbles_to_item_in_required_plural_field() { +#[tokio::test] +async fn required_bubbles_to_item_in_required_plural_field() { let input = include_str!("generate_flow/fixtures/required-bubbles-to-item-in-required-plural-field.graphql"); let expected = include_str!("generate_flow/fixtures/required-bubbles-to-item-in-required-plural-field.expected"); - test_fixture(transform_fixture, "required-bubbles-to-item-in-required-plural-field.graphql", "generate_flow/fixtures/required-bubbles-to-item-in-required-plural-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-to-item-in-required-plural-field.graphql", "generate_flow/fixtures/required-bubbles-to-item-in-required-plural-field.expected", input, expected).await; } -#[test] -fn required_bubbles_to_non_null_item_in_non_null_plural_linked_field() { +#[tokio::test] +async fn required_bubbles_to_non_null_item_in_non_null_plural_linked_field() { let input = include_str!("generate_flow/fixtures/required-bubbles-to-non-null-item-in-non-null-plural-linked-field.graphql"); let expected = include_str!("generate_flow/fixtures/required-bubbles-to-non-null-item-in-non-null-plural-linked-field.expected"); - test_fixture(transform_fixture, "required-bubbles-to-non-null-item-in-non-null-plural-linked-field.graphql", "generate_flow/fixtures/required-bubbles-to-non-null-item-in-non-null-plural-linked-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-to-non-null-item-in-non-null-plural-linked-field.graphql", "generate_flow/fixtures/required-bubbles-to-non-null-item-in-non-null-plural-linked-field.expected", input, expected).await; } -#[test] -fn required_bubbles_to_non_null_linked_field() { +#[tokio::test] +async fn required_bubbles_to_non_null_linked_field() { let input = include_str!("generate_flow/fixtures/required-bubbles-to-non-null-linked-field.graphql"); let expected = include_str!("generate_flow/fixtures/required-bubbles-to-non-null-linked-field.expected"); - test_fixture(transform_fixture, "required-bubbles-to-non-null-linked-field.graphql", "generate_flow/fixtures/required-bubbles-to-non-null-linked-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-to-non-null-linked-field.graphql", "generate_flow/fixtures/required-bubbles-to-non-null-linked-field.expected", input, expected).await; } -#[test] -fn required_bubbles_to_non_null_linked_field_through_inline_fragment() { +#[tokio::test] +async fn required_bubbles_to_non_null_linked_field_through_inline_fragment() { let input = include_str!("generate_flow/fixtures/required-bubbles-to-non-null-linked-field-through-inline-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/required-bubbles-to-non-null-linked-field-through-inline-fragment.expected"); - test_fixture(transform_fixture, "required-bubbles-to-non-null-linked-field-through-inline-fragment.graphql", "generate_flow/fixtures/required-bubbles-to-non-null-linked-field-through-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-to-non-null-linked-field-through-inline-fragment.graphql", "generate_flow/fixtures/required-bubbles-to-non-null-linked-field-through-inline-fragment.expected", input, expected).await; } -#[test] -fn required_bubbles_to_non_null_plural_linked_field() { +#[tokio::test] +async fn required_bubbles_to_non_null_plural_linked_field() { let input = include_str!("generate_flow/fixtures/required-bubbles-to-non-null-plural-linked-field.graphql"); let expected = include_str!("generate_flow/fixtures/required-bubbles-to-non-null-plural-linked-field.expected"); - test_fixture(transform_fixture, "required-bubbles-to-non-null-plural-linked-field.graphql", "generate_flow/fixtures/required-bubbles-to-non-null-plural-linked-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-to-non-null-plural-linked-field.graphql", "generate_flow/fixtures/required-bubbles-to-non-null-plural-linked-field.expected", input, expected).await; } -#[test] -fn required_bubbles_to_plural_fragment_root() { +#[tokio::test] +async fn required_bubbles_to_plural_fragment_root() { let input = include_str!("generate_flow/fixtures/required-bubbles-to-plural-fragment-root.graphql"); let expected = include_str!("generate_flow/fixtures/required-bubbles-to-plural-fragment-root.expected"); - test_fixture(transform_fixture, "required-bubbles-to-plural-fragment-root.graphql", "generate_flow/fixtures/required-bubbles-to-plural-fragment-root.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-to-plural-fragment-root.graphql", "generate_flow/fixtures/required-bubbles-to-plural-fragment-root.expected", input, expected).await; } -#[test] -fn required_bubbles_to_query() { +#[tokio::test] +async fn required_bubbles_to_query() { let input = include_str!("generate_flow/fixtures/required-bubbles-to-query.graphql"); let expected = include_str!("generate_flow/fixtures/required-bubbles-to-query.expected"); - test_fixture(transform_fixture, "required-bubbles-to-query.graphql", "generate_flow/fixtures/required-bubbles-to-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-to-query.graphql", "generate_flow/fixtures/required-bubbles-to-query.expected", input, expected).await; } -#[test] -fn required_bubbles_up_to_mutation_response() { +#[tokio::test] +async fn required_bubbles_up_to_mutation_response() { let input = include_str!("generate_flow/fixtures/required-bubbles-up-to-mutation-response.graphql"); let expected = include_str!("generate_flow/fixtures/required-bubbles-up-to-mutation-response.expected"); - test_fixture(transform_fixture, "required-bubbles-up-to-mutation-response.graphql", "generate_flow/fixtures/required-bubbles-up-to-mutation-response.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-up-to-mutation-response.graphql", "generate_flow/fixtures/required-bubbles-up-to-mutation-response.expected", input, expected).await; } -#[test] -fn required_chain_bubbles_to_non_null_linked_field_through_inline_fragment() { +#[tokio::test] +async fn required_chain_bubbles_to_non_null_linked_field_through_inline_fragment() { let input = include_str!("generate_flow/fixtures/required-chain-bubbles-to-non-null-linked-field-through-inline-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/required-chain-bubbles-to-non-null-linked-field-through-inline-fragment.expected"); - test_fixture(transform_fixture, "required-chain-bubbles-to-non-null-linked-field-through-inline-fragment.graphql", "generate_flow/fixtures/required-chain-bubbles-to-non-null-linked-field-through-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-chain-bubbles-to-non-null-linked-field-through-inline-fragment.graphql", "generate_flow/fixtures/required-chain-bubbles-to-non-null-linked-field-through-inline-fragment.expected", input, expected).await; } -#[test] -fn required_isolates_concrete_inline_fragments() { +#[tokio::test] +async fn required_isolates_concrete_inline_fragments() { let input = include_str!("generate_flow/fixtures/required-isolates-concrete-inline-fragments.graphql"); let expected = include_str!("generate_flow/fixtures/required-isolates-concrete-inline-fragments.expected"); - test_fixture(transform_fixture, "required-isolates-concrete-inline-fragments.graphql", "generate_flow/fixtures/required-isolates-concrete-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-isolates-concrete-inline-fragments.graphql", "generate_flow/fixtures/required-isolates-concrete-inline-fragments.expected", input, expected).await; } -#[test] -fn required_raw_response_type() { +#[tokio::test] +async fn required_raw_response_type() { let input = include_str!("generate_flow/fixtures/required-raw-response-type.graphql"); let expected = include_str!("generate_flow/fixtures/required-raw-response-type.expected"); - test_fixture(transform_fixture, "required-raw-response-type.graphql", "generate_flow/fixtures/required-raw-response-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-raw-response-type.graphql", "generate_flow/fixtures/required-raw-response-type.expected", input, expected).await; } -#[test] -fn required_throw_doesnt_bubbles_to_fragment() { +#[tokio::test] +async fn required_throw_doesnt_bubbles_to_fragment() { let input = include_str!("generate_flow/fixtures/required-throw-doesnt-bubbles-to-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/required-throw-doesnt-bubbles-to-fragment.expected"); - test_fixture(transform_fixture, "required-throw-doesnt-bubbles-to-fragment.graphql", "generate_flow/fixtures/required-throw-doesnt-bubbles-to-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-throw-doesnt-bubbles-to-fragment.graphql", "generate_flow/fixtures/required-throw-doesnt-bubbles-to-fragment.expected", input, expected).await; } -#[test] -fn required_throw_doesnt_bubbles_to_query() { +#[tokio::test] +async fn required_throw_doesnt_bubbles_to_query() { let input = include_str!("generate_flow/fixtures/required-throw-doesnt-bubbles-to-query.graphql"); let expected = include_str!("generate_flow/fixtures/required-throw-doesnt-bubbles-to-query.expected"); - test_fixture(transform_fixture, "required-throw-doesnt-bubbles-to-query.graphql", "generate_flow/fixtures/required-throw-doesnt-bubbles-to-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-throw-doesnt-bubbles-to-query.graphql", "generate_flow/fixtures/required-throw-doesnt-bubbles-to-query.expected", input, expected).await; } -#[test] -fn required_throws_nested() { +#[tokio::test] +async fn required_throws_nested() { let input = include_str!("generate_flow/fixtures/required-throws-nested.graphql"); let expected = include_str!("generate_flow/fixtures/required-throws-nested.expected"); - test_fixture(transform_fixture, "required-throws-nested.graphql", "generate_flow/fixtures/required-throws-nested.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-throws-nested.graphql", "generate_flow/fixtures/required-throws-nested.expected", input, expected).await; } -#[test] -fn required_throws_within_non_null_linked_field() { +#[tokio::test] +async fn required_throws_within_non_null_linked_field() { let input = include_str!("generate_flow/fixtures/required-throws-within-non-null-linked-field.graphql"); let expected = include_str!("generate_flow/fixtures/required-throws-within-non-null-linked-field.expected"); - test_fixture(transform_fixture, "required-throws-within-non-null-linked-field.graphql", "generate_flow/fixtures/required-throws-within-non-null-linked-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-throws-within-non-null-linked-field.graphql", "generate_flow/fixtures/required-throws-within-non-null-linked-field.expected", input, expected).await; } -#[test] -fn required_throws_within_non_null_plural_linked_field() { +#[tokio::test] +async fn required_throws_within_non_null_plural_linked_field() { let input = include_str!("generate_flow/fixtures/required-throws-within-non-null-plural-linked-field.graphql"); let expected = include_str!("generate_flow/fixtures/required-throws-within-non-null-plural-linked-field.expected"); - test_fixture(transform_fixture, "required-throws-within-non-null-plural-linked-field.graphql", "generate_flow/fixtures/required-throws-within-non-null-plural-linked-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-throws-within-non-null-plural-linked-field.graphql", "generate_flow/fixtures/required-throws-within-non-null-plural-linked-field.expected", input, expected).await; } -#[test] -fn required_within_aliased_inline_fragment() { +#[tokio::test] +async fn required_within_aliased_inline_fragment() { let input = include_str!("generate_flow/fixtures/required-within-aliased-inline-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/required-within-aliased-inline-fragment.expected"); - test_fixture(transform_fixture, "required-within-aliased-inline-fragment.graphql", "generate_flow/fixtures/required-within-aliased-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-within-aliased-inline-fragment.graphql", "generate_flow/fixtures/required-within-aliased-inline-fragment.expected", input, expected).await; } -#[test] -fn required_within_aliased_inline_fragment_on_abstract() { +#[tokio::test] +async fn required_within_aliased_inline_fragment_on_abstract() { let input = include_str!("generate_flow/fixtures/required-within-aliased-inline-fragment-on-abstract.graphql"); let expected = include_str!("generate_flow/fixtures/required-within-aliased-inline-fragment-on-abstract.expected"); - test_fixture(transform_fixture, "required-within-aliased-inline-fragment-on-abstract.graphql", "generate_flow/fixtures/required-within-aliased-inline-fragment-on-abstract.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-within-aliased-inline-fragment-on-abstract.graphql", "generate_flow/fixtures/required-within-aliased-inline-fragment-on-abstract.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_on_interface_of_all_strong_model_type() { + let input = include_str!("generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type.graphql"); + let expected = include_str!("generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type.expected"); + test_fixture(transform_fixture, file!(), "resolver-on-interface-of-all-strong-model-type.graphql", "generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_on_interface_of_all_strong_model_type_with_extension() { + let input = include_str!("generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-extension.graphql"); + let expected = include_str!("generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-extension.expected"); + test_fixture(transform_fixture, file!(), "resolver-on-interface-of-all-strong-model-type-with-extension.graphql", "generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-extension.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_on_interface_of_all_strong_model_type_with_root_fragment() { + let input = include_str!("generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-root-fragment.graphql"); + let expected = include_str!("generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-root-fragment.expected"); + test_fixture(transform_fixture, file!(), "resolver-on-interface-of-all-strong-model-type-with-root-fragment.graphql", "generate_flow/fixtures/resolver-on-interface-of-all-strong-model-type-with-root-fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn resolver_on_interface_of_all_weak_model_type() { + let input = include_str!("generate_flow/fixtures/resolver-on-interface-of-all-weak-model-type.graphql"); + let expected = include_str!("generate_flow/fixtures/resolver-on-interface-of-all-weak-model-type.expected"); + test_fixture(transform_fixture, file!(), "resolver-on-interface-of-all-weak-model-type.graphql", "generate_flow/fixtures/resolver-on-interface-of-all-weak-model-type.expected", input, expected).await; } -#[test] -fn roots() { +#[tokio::test] +async fn roots() { let input = include_str!("generate_flow/fixtures/roots.graphql"); let expected = include_str!("generate_flow/fixtures/roots.expected"); - test_fixture(transform_fixture, "roots.graphql", "generate_flow/fixtures/roots.expected", input, expected); + test_fixture(transform_fixture, file!(), "roots.graphql", "generate_flow/fixtures/roots.expected", input, expected).await; } -#[test] -fn scalar_field() { +#[tokio::test] +async fn scalar_field() { let input = include_str!("generate_flow/fixtures/scalar-field.graphql"); let expected = include_str!("generate_flow/fixtures/scalar-field.expected"); - test_fixture(transform_fixture, "scalar-field.graphql", "generate_flow/fixtures/scalar-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "scalar-field.graphql", "generate_flow/fixtures/scalar-field.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_in_raw_response() { + let input = include_str!("generate_flow/fixtures/semantic_non_null_in_raw_response.graphql"); + let expected = include_str!("generate_flow/fixtures/semantic_non_null_in_raw_response.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_in_raw_response.graphql", "generate_flow/fixtures/semantic_non_null_in_raw_response.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_items_in_matrix() { + let input = include_str!("generate_flow/fixtures/semantic_non_null_items_in_matrix.graphql"); + let expected = include_str!("generate_flow/fixtures/semantic_non_null_items_in_matrix.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_items_in_matrix.graphql", "generate_flow/fixtures/semantic_non_null_items_in_matrix.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_liked_field_resolver() { + let input = include_str!("generate_flow/fixtures/semantic_non_null_liked_field_resolver.graphql"); + let expected = include_str!("generate_flow/fixtures/semantic_non_null_liked_field_resolver.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_liked_field_resolver.graphql", "generate_flow/fixtures/semantic_non_null_liked_field_resolver.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_liked_field_weak_resolver() { + let input = include_str!("generate_flow/fixtures/semantic_non_null_liked_field_weak_resolver.graphql"); + let expected = include_str!("generate_flow/fixtures/semantic_non_null_liked_field_weak_resolver.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_liked_field_weak_resolver.graphql", "generate_flow/fixtures/semantic_non_null_liked_field_weak_resolver.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_linked_field() { + let input = include_str!("generate_flow/fixtures/semantic_non_null_linked_field.graphql"); + let expected = include_str!("generate_flow/fixtures/semantic_non_null_linked_field.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_linked_field.graphql", "generate_flow/fixtures/semantic_non_null_linked_field.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_list_and_list_item() { + let input = include_str!("generate_flow/fixtures/semantic_non_null_list_and_list_item.graphql"); + let expected = include_str!("generate_flow/fixtures/semantic_non_null_list_and_list_item.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_list_and_list_item.graphql", "generate_flow/fixtures/semantic_non_null_list_and_list_item.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_list_item() { + let input = include_str!("generate_flow/fixtures/semantic_non_null_list_item.graphql"); + let expected = include_str!("generate_flow/fixtures/semantic_non_null_list_item.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_list_item.graphql", "generate_flow/fixtures/semantic_non_null_list_item.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_scalar() { + let input = include_str!("generate_flow/fixtures/semantic_non_null_scalar.graphql"); + let expected = include_str!("generate_flow/fixtures/semantic_non_null_scalar.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_scalar.graphql", "generate_flow/fixtures/semantic_non_null_scalar.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_scalar_feature_disabled() { + let input = include_str!("generate_flow/fixtures/semantic_non_null_scalar_feature_disabled.graphql"); + let expected = include_str!("generate_flow/fixtures/semantic_non_null_scalar_feature_disabled.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_scalar_feature_disabled.graphql", "generate_flow/fixtures/semantic_non_null_scalar_feature_disabled.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_scalar_resolver() { + let input = include_str!("generate_flow/fixtures/semantic_non_null_scalar_resolver.graphql"); + let expected = include_str!("generate_flow/fixtures/semantic_non_null_scalar_resolver.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_scalar_resolver.graphql", "generate_flow/fixtures/semantic_non_null_scalar_resolver.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_null_required_throw_on_error() { + let input = include_str!("generate_flow/fixtures/semantic_null_required_throw_on_error.graphql"); + let expected = include_str!("generate_flow/fixtures/semantic_null_required_throw_on_error.expected"); + test_fixture(transform_fixture, file!(), "semantic_null_required_throw_on_error.graphql", "generate_flow/fixtures/semantic_null_required_throw_on_error.expected", input, expected).await; } -#[test] -fn simple() { +#[tokio::test] +async fn simple() { let input = include_str!("generate_flow/fixtures/simple.graphql"); let expected = include_str!("generate_flow/fixtures/simple.expected"); - test_fixture(transform_fixture, "simple.graphql", "generate_flow/fixtures/simple.expected", input, expected); + test_fixture(transform_fixture, file!(), "simple.graphql", "generate_flow/fixtures/simple.expected", input, expected).await; } -#[test] -fn typename_in_union_with_other_fields() { +#[tokio::test] +async fn typename_in_union_with_other_fields() { let input = include_str!("generate_flow/fixtures/typename-in-union-with-other-fields.graphql"); let expected = include_str!("generate_flow/fixtures/typename-in-union-with-other-fields.expected"); - test_fixture(transform_fixture, "typename-in-union-with-other-fields.graphql", "generate_flow/fixtures/typename-in-union-with-other-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "typename-in-union-with-other-fields.graphql", "generate_flow/fixtures/typename-in-union-with-other-fields.expected", input, expected).await; } -#[test] -fn typename_inside_with_overlapping_fields() { +#[tokio::test] +async fn typename_inside_with_overlapping_fields() { let input = include_str!("generate_flow/fixtures/typename-inside-with-overlapping-fields.graphql"); let expected = include_str!("generate_flow/fixtures/typename-inside-with-overlapping-fields.expected"); - test_fixture(transform_fixture, "typename-inside-with-overlapping-fields.graphql", "generate_flow/fixtures/typename-inside-with-overlapping-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "typename-inside-with-overlapping-fields.graphql", "generate_flow/fixtures/typename-inside-with-overlapping-fields.expected", input, expected).await; } -#[test] -fn typename_on_union() { +#[tokio::test] +async fn typename_on_union() { let input = include_str!("generate_flow/fixtures/typename-on-union.graphql"); let expected = include_str!("generate_flow/fixtures/typename-on-union.expected"); - test_fixture(transform_fixture, "typename-on-union.graphql", "generate_flow/fixtures/typename-on-union.expected", input, expected); + test_fixture(transform_fixture, file!(), "typename-on-union.graphql", "generate_flow/fixtures/typename-on-union.expected", input, expected).await; } -#[test] -fn typename_on_union_with_non_matching_aliases() { +#[tokio::test] +async fn typename_on_union_with_non_matching_aliases() { let input = include_str!("generate_flow/fixtures/typename-on-union-with-non-matching-aliases.graphql"); let expected = include_str!("generate_flow/fixtures/typename-on-union-with-non-matching-aliases.expected"); - test_fixture(transform_fixture, "typename-on-union-with-non-matching-aliases.graphql", "generate_flow/fixtures/typename-on-union-with-non-matching-aliases.expected", input, expected); + test_fixture(transform_fixture, file!(), "typename-on-union-with-non-matching-aliases.graphql", "generate_flow/fixtures/typename-on-union-with-non-matching-aliases.expected", input, expected).await; } -#[test] -fn unmasked_fragment_spreads() { +#[tokio::test] +async fn unmasked_fragment_spreads() { let input = include_str!("generate_flow/fixtures/unmasked-fragment-spreads.graphql"); let expected = include_str!("generate_flow/fixtures/unmasked-fragment-spreads.expected"); - test_fixture(transform_fixture, "unmasked-fragment-spreads.graphql", "generate_flow/fixtures/unmasked-fragment-spreads.expected", input, expected); + test_fixture(transform_fixture, file!(), "unmasked-fragment-spreads.graphql", "generate_flow/fixtures/unmasked-fragment-spreads.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread() { +#[tokio::test] +async fn updatable_fragment_spread() { let input = include_str!("generate_flow/fixtures/updatable-fragment-spread.graphql"); let expected = include_str!("generate_flow/fixtures/updatable-fragment-spread.expected"); - test_fixture(transform_fixture, "updatable-fragment-spread.graphql", "generate_flow/fixtures/updatable-fragment-spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-fragment-spread.graphql", "generate_flow/fixtures/updatable-fragment-spread.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_and_regular_spread() { +#[tokio::test] +async fn updatable_fragment_spread_and_regular_spread() { let input = include_str!("generate_flow/fixtures/updatable-fragment-spread-and-regular-spread.graphql"); let expected = include_str!("generate_flow/fixtures/updatable-fragment-spread-and-regular-spread.expected"); - test_fixture(transform_fixture, "updatable-fragment-spread-and-regular-spread.graphql", "generate_flow/fixtures/updatable-fragment-spread-and-regular-spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-fragment-spread-and-regular-spread.graphql", "generate_flow/fixtures/updatable-fragment-spread-and-regular-spread.expected", input, expected).await; } -#[test] -fn updatable_fragment_spread_multiple() { +#[tokio::test] +async fn updatable_fragment_spread_multiple() { let input = include_str!("generate_flow/fixtures/updatable-fragment-spread-multiple.graphql"); let expected = include_str!("generate_flow/fixtures/updatable-fragment-spread-multiple.expected"); - test_fixture(transform_fixture, "updatable-fragment-spread-multiple.graphql", "generate_flow/fixtures/updatable-fragment-spread-multiple.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-fragment-spread-multiple.graphql", "generate_flow/fixtures/updatable-fragment-spread-multiple.expected", input, expected).await; } -#[test] -fn updatable_operation() { +#[tokio::test] +async fn updatable_operation() { let input = include_str!("generate_flow/fixtures/updatable-operation.graphql"); let expected = include_str!("generate_flow/fixtures/updatable-operation.expected"); - test_fixture(transform_fixture, "updatable-operation.graphql", "generate_flow/fixtures/updatable-operation.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-operation.graphql", "generate_flow/fixtures/updatable-operation.expected", input, expected).await; } -#[test] -fn updatable_operation_assignable_fragment() { +#[tokio::test] +async fn updatable_operation_assignable_fragment() { let input = include_str!("generate_flow/fixtures/updatable-operation-assignable-fragment.graphql"); let expected = include_str!("generate_flow/fixtures/updatable-operation-assignable-fragment.expected"); - test_fixture(transform_fixture, "updatable-operation-assignable-fragment.graphql", "generate_flow/fixtures/updatable-operation-assignable-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-operation-assignable-fragment.graphql", "generate_flow/fixtures/updatable-operation-assignable-fragment.expected", input, expected).await; } -#[test] -fn updatable_operation_assignable_fragment_plural() { +#[tokio::test] +async fn updatable_operation_assignable_fragment_plural() { let input = include_str!("generate_flow/fixtures/updatable-operation-assignable-fragment-plural.graphql"); let expected = include_str!("generate_flow/fixtures/updatable-operation-assignable-fragment-plural.expected"); - test_fixture(transform_fixture, "updatable-operation-assignable-fragment-plural.graphql", "generate_flow/fixtures/updatable-operation-assignable-fragment-plural.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-operation-assignable-fragment-plural.graphql", "generate_flow/fixtures/updatable-operation-assignable-fragment-plural.expected", input, expected).await; } -#[test] -fn updatable_operation_assignable_fragments_within_narrowing() { +#[tokio::test] +async fn updatable_operation_assignable_fragments_within_narrowing() { let input = include_str!("generate_flow/fixtures/updatable-operation-assignable-fragments-within-narrowing.graphql"); let expected = include_str!("generate_flow/fixtures/updatable-operation-assignable-fragments-within-narrowing.expected"); - test_fixture(transform_fixture, "updatable-operation-assignable-fragments-within-narrowing.graphql", "generate_flow/fixtures/updatable-operation-assignable-fragments-within-narrowing.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-operation-assignable-fragments-within-narrowing.graphql", "generate_flow/fixtures/updatable-operation-assignable-fragments-within-narrowing.expected", input, expected).await; } -#[test] -fn updatable_operation_multiple_assignable_fragments() { +#[tokio::test] +async fn updatable_operation_multiple_assignable_fragments() { let input = include_str!("generate_flow/fixtures/updatable-operation-multiple-assignable-fragments.graphql"); let expected = include_str!("generate_flow/fixtures/updatable-operation-multiple-assignable-fragments.expected"); - test_fixture(transform_fixture, "updatable-operation-multiple-assignable-fragments.graphql", "generate_flow/fixtures/updatable-operation-multiple-assignable-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-operation-multiple-assignable-fragments.graphql", "generate_flow/fixtures/updatable-operation-multiple-assignable-fragments.expected", input, expected).await; } -#[test] -fn updatable_operation_plural_field_no_spreads() { +#[tokio::test] +async fn updatable_operation_plural_field_no_spreads() { let input = include_str!("generate_flow/fixtures/updatable-operation-plural-field-no-spreads.graphql"); let expected = include_str!("generate_flow/fixtures/updatable-operation-plural-field-no-spreads.expected"); - test_fixture(transform_fixture, "updatable-operation-plural-field-no-spreads.graphql", "generate_flow/fixtures/updatable-operation-plural-field-no-spreads.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-operation-plural-field-no-spreads.graphql", "generate_flow/fixtures/updatable-operation-plural-field-no-spreads.expected", input, expected).await; } -#[test] -fn updatable_operation_plural_field_with_spreads() { +#[tokio::test] +async fn updatable_operation_plural_field_with_spreads() { let input = include_str!("generate_flow/fixtures/updatable-operation-plural-field-with-spreads.graphql"); let expected = include_str!("generate_flow/fixtures/updatable-operation-plural-field-with-spreads.expected"); - test_fixture(transform_fixture, "updatable-operation-plural-field-with-spreads.graphql", "generate_flow/fixtures/updatable-operation-plural-field-with-spreads.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-operation-plural-field-with-spreads.graphql", "generate_flow/fixtures/updatable-operation-plural-field-with-spreads.expected", input, expected).await; } -#[test] -fn updatable_operation_special_fields() { +#[tokio::test] +async fn updatable_operation_special_fields() { let input = include_str!("generate_flow/fixtures/updatable-operation-special-fields.graphql"); let expected = include_str!("generate_flow/fixtures/updatable-operation-special-fields.expected"); - test_fixture(transform_fixture, "updatable-operation-special-fields.graphql", "generate_flow/fixtures/updatable-operation-special-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-operation-special-fields.graphql", "generate_flow/fixtures/updatable-operation-special-fields.expected", input, expected).await; } -#[test] -fn updatable_operation_type_refinement() { +#[tokio::test] +async fn updatable_operation_type_refinement() { let input = include_str!("generate_flow/fixtures/updatable-operation-type-refinement.graphql"); let expected = include_str!("generate_flow/fixtures/updatable-operation-type-refinement.expected"); - test_fixture(transform_fixture, "updatable-operation-type-refinement.graphql", "generate_flow/fixtures/updatable-operation-type-refinement.expected", input, expected); + test_fixture(transform_fixture, file!(), "updatable-operation-type-refinement.graphql", "generate_flow/fixtures/updatable-operation-type-refinement.expected", input, expected).await; } diff --git a/compiler/crates/relay-typegen/tests/generate_flow_with_custom_id.rs b/compiler/crates/relay-typegen/tests/generate_flow_with_custom_id.rs new file mode 100644 index 0000000000000..6f4bc558dc017 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_flow_with_custom_id.rs @@ -0,0 +1,132 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::ConsoleLogger; +use common::FeatureFlag; +use common::FeatureFlags; +use common::ScalarName; +use common::SourceLocationKey; +use fixture_tests::Fixture; +use fnv::FnvBuildHasher; +use fnv::FnvHashMap; +use graphql_ir::build_ir_in_relay_mode; +use graphql_ir::OperationDefinitionName; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use indexmap::IndexMap; +use intern::string_key::Intern; +use relay_codegen::print_provided_variables; +use relay_codegen::JsModuleFormat; +use relay_config::ProjectConfig; +use relay_config::ProjectName; +use relay_config::SchemaConfig; +use relay_test_schema::get_test_schema_with_custom_id; +use relay_test_schema::get_test_schema_with_custom_id_with_extensions; +use relay_transforms::apply_transforms; +use relay_typegen::FragmentLocations; +use relay_typegen::TypegenConfig; +use relay_typegen::TypegenLanguage; + +type FnvIndexMap = IndexMap; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts = fixture.content.split("%extensions%").collect::>(); + let (source, schema) = match parts.as_slice() { + [source, extensions] => ( + source, + get_test_schema_with_custom_id_with_extensions(extensions), + ), + [source] => (source, get_test_schema_with_custom_id()), + _ => panic!(), + }; + + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let mut sources = FnvHashMap::default(); + sources.insert(source_location, source); + let ast = parse_executable(source, source_location).unwrap_or_else(|e| { + panic!("Encountered error building AST: {:?}", e); + }); + let feature_flags = FeatureFlags { + no_inline: FeatureFlag::Enabled, + enable_relay_resolver_transform: true, + actor_change_support: FeatureFlag::Enabled, + ..Default::default() + }; + let ir = + build_ir_in_relay_mode(&schema, &ast.definitions, &feature_flags).unwrap_or_else(|e| { + panic!("Encountered error building IR {:?}", e); + }); + let program = Program::from_definitions(Arc::clone(&schema), ir); + + let mut custom_scalar_types = FnvIndexMap::default(); + custom_scalar_types.insert( + ScalarName("Boolean".intern()), + relay_config::CustomScalarType::Name("CustomBoolean".intern()), + ); + let project_config = ProjectConfig { + name: ProjectName::default(), + js_module_format: JsModuleFormat::Haste, + feature_flags: Arc::new(feature_flags), + schema_config: SchemaConfig { + node_interface_id_field: "global_id".intern(), + ..Default::default() + }, + typegen_config: TypegenConfig { + language: TypegenLanguage::Flow, + custom_scalar_types, + ..Default::default() + }, + ..Default::default() + }; + + let programs = apply_transforms( + &project_config, + Arc::new(program), + Default::default(), + Arc::new(ConsoleLogger), + None, + None, + ) + .unwrap(); + + let fragment_locations = FragmentLocations::new(programs.typegen.fragments()); + let mut operations: Vec<_> = programs.typegen.operations().collect(); + operations.sort_by_key(|op| op.name.item.0); + let operation_strings = operations.into_iter().map(|typegen_operation| { + let normalization_operation = programs + .normalization + .operation(OperationDefinitionName(typegen_operation.name.item.0)) + .unwrap(); + relay_typegen::generate_operation_type_exports_section( + typegen_operation, + normalization_operation, + &schema, + &project_config, + &fragment_locations, + print_provided_variables(&schema, normalization_operation, &project_config), + ) + }); + + let mut fragments: Vec<_> = programs.typegen.fragments().collect(); + fragments.sort_by_key(|frag| frag.name.item); + let fragment_strings = fragments.into_iter().map(|frag| { + relay_typegen::generate_fragment_type_exports_section( + frag, + &schema, + &project_config, + &fragment_locations, + ) + }); + + let mut result: Vec = operation_strings.collect(); + result.extend(fragment_strings); + Ok(result + .join("-------------------------------------------------------------------------------\n")) +} diff --git a/compiler/crates/relay-typegen/tests/generate_flow_with_custom_id/mod.rs b/compiler/crates/relay-typegen/tests/generate_flow_with_custom_id/mod.rs deleted file mode 100644 index c3e91622f650d..0000000000000 --- a/compiler/crates/relay-typegen/tests/generate_flow_with_custom_id/mod.rs +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::ConsoleLogger; -use common::FeatureFlag; -use common::FeatureFlags; -use common::ScalarName; -use common::SourceLocationKey; -use fixture_tests::Fixture; -use fnv::FnvBuildHasher; -use fnv::FnvHashMap; -use graphql_ir::build_ir_in_relay_mode; -use graphql_ir::OperationDefinitionName; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use indexmap::IndexMap; -use intern::string_key::Intern; -use relay_codegen::JsModuleFormat; -use relay_config::ProjectConfig; -use relay_config::SchemaConfig; -use relay_test_schema::get_test_schema_with_custom_id; -use relay_test_schema::get_test_schema_with_custom_id_with_extensions; -use relay_transforms::apply_transforms; -use relay_typegen::FragmentLocations; -use relay_typegen::TypegenConfig; -use relay_typegen::TypegenLanguage; - -type FnvIndexMap = IndexMap; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts = fixture.content.split("%extensions%").collect::>(); - let (source, schema) = match parts.as_slice() { - [source, extensions] => ( - source, - get_test_schema_with_custom_id_with_extensions(extensions), - ), - [source] => (source, get_test_schema_with_custom_id()), - _ => panic!(), - }; - - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let mut sources = FnvHashMap::default(); - sources.insert(source_location, source); - let ast = parse_executable(source, source_location).unwrap_or_else(|e| { - panic!("Encountered error building AST: {:?}", e); - }); - let feature_flags = FeatureFlags { - no_inline: FeatureFlag::Enabled, - enable_relay_resolver_transform: true, - actor_change_support: FeatureFlag::Enabled, - ..Default::default() - }; - let ir = build_ir_in_relay_mode(&schema, &ast.definitions).unwrap_or_else(|e| { - panic!("Encountered error building IR {:?}", e); - }); - let program = Program::from_definitions(Arc::clone(&schema), ir); - - let mut custom_scalar_types = FnvIndexMap::default(); - custom_scalar_types.insert( - ScalarName("Boolean".intern()), - relay_config::CustomScalarType::Name("CustomBoolean".intern()), - ); - let project_config = ProjectConfig { - name: "test".intern(), - js_module_format: JsModuleFormat::Haste, - feature_flags: Arc::new(feature_flags), - schema_config: SchemaConfig { - node_interface_id_field: "global_id".intern(), - ..Default::default() - }, - typegen_config: TypegenConfig { - language: TypegenLanguage::Flow, - custom_scalar_types, - flow_typegen: Default::default(), - ..Default::default() - }, - ..Default::default() - }; - - let programs = apply_transforms( - &project_config, - Arc::new(program), - Default::default(), - Arc::new(ConsoleLogger), - None, - None, - ) - .unwrap(); - - let fragment_locations = FragmentLocations::new(programs.typegen.fragments()); - let mut operations: Vec<_> = programs.typegen.operations().collect(); - operations.sort_by_key(|op| op.name.item.0); - let operation_strings = operations.into_iter().map(|typegen_operation| { - let normalization_operation = programs - .normalization - .operation(OperationDefinitionName(typegen_operation.name.item.0)) - .unwrap(); - relay_typegen::generate_operation_type_exports_section( - typegen_operation, - normalization_operation, - &schema, - &project_config, - &fragment_locations, - ) - }); - - let mut fragments: Vec<_> = programs.typegen.fragments().collect(); - fragments.sort_by_key(|frag| frag.name.item); - let fragment_strings = fragments.into_iter().map(|frag| { - relay_typegen::generate_fragment_type_exports_section( - frag, - &schema, - &project_config, - &fragment_locations, - ) - }); - - let mut result: Vec = operation_strings.collect(); - result.extend(fragment_strings); - Ok(result - .join("-------------------------------------------------------------------------------\n")) -} diff --git a/compiler/crates/relay-typegen/tests/generate_flow_with_custom_id_test.rs b/compiler/crates/relay-typegen/tests/generate_flow_with_custom_id_test.rs index c83eeb0046d19..579b4cd5215cd 100644 --- a/compiler/crates/relay-typegen/tests/generate_flow_with_custom_id_test.rs +++ b/compiler/crates/relay-typegen/tests/generate_flow_with_custom_id_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<404c5d236c3730337a395ae6aefa158c>> + * @generated SignedSource<> */ mod generate_flow_with_custom_id; @@ -12,16 +12,16 @@ mod generate_flow_with_custom_id; use generate_flow_with_custom_id::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn relay_client_id_field() { +#[tokio::test] +async fn relay_client_id_field() { let input = include_str!("generate_flow_with_custom_id/fixtures/relay-client-id-field.graphql"); let expected = include_str!("generate_flow_with_custom_id/fixtures/relay-client-id-field.expected"); - test_fixture(transform_fixture, "relay-client-id-field.graphql", "generate_flow_with_custom_id/fixtures/relay-client-id-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-client-id-field.graphql", "generate_flow_with_custom_id/fixtures/relay-client-id-field.expected", input, expected).await; } -#[test] -fn simple() { +#[tokio::test] +async fn simple() { let input = include_str!("generate_flow_with_custom_id/fixtures/simple.graphql"); let expected = include_str!("generate_flow_with_custom_id/fixtures/simple.expected"); - test_fixture(transform_fixture, "simple.graphql", "generate_flow_with_custom_id/fixtures/simple.expected", input, expected); + test_fixture(transform_fixture, file!(), "simple.graphql", "generate_flow_with_custom_id/fixtures/simple.expected", input, expected).await; } diff --git a/compiler/crates/relay-typegen/tests/generate_typescript.rs b/compiler/crates/relay-typegen/tests/generate_typescript.rs new file mode 100644 index 0000000000000..3252ba944a62f --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript.rs @@ -0,0 +1,143 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::sync::Arc; + +use common::ConsoleLogger; +use common::FeatureFlag; +use common::FeatureFlags; +use common::ScalarName; +use common::SourceLocationKey; +use fixture_tests::Fixture; +use fnv::FnvBuildHasher; +use fnv::FnvHashMap; +use graphql_ir::build; +use graphql_ir::OperationDefinitionName; +use graphql_ir::Program; +use graphql_syntax::parse_executable; +use indexmap::IndexMap; +use intern::string_key::Intern; +use relay_codegen::print_provided_variables; +use relay_codegen::JsModuleFormat; +use relay_config::CustomScalarType; +use relay_config::CustomScalarTypeImport; +use relay_config::ProjectConfig; +use relay_config::ProjectName; +use relay_test_schema::get_test_schema; +use relay_test_schema::get_test_schema_with_extensions; +use relay_transforms::apply_transforms; +use relay_typegen::FragmentLocations; +use relay_typegen::TypegenConfig; +use relay_typegen::TypegenLanguage; + +type FnvIndexMap = IndexMap; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts = fixture.content.split("%extensions%").collect::>(); + let (source, schema) = match parts.as_slice() { + [source, extensions] => (source, get_test_schema_with_extensions(extensions)), + [source] => (source, get_test_schema()), + _ => panic!(), + }; + + let source_location = SourceLocationKey::standalone(fixture.file_name); + + let mut sources = FnvHashMap::default(); + sources.insert(source_location, source); + let ast = parse_executable(source, source_location).unwrap_or_else(|e| { + panic!("Encountered error building AST: {:?}", e); + }); + let ir = build(&schema, &ast.definitions).unwrap_or_else(|e| { + panic!("Encountered error building IR {:?}", e); + }); + let program = Program::from_definitions(Arc::clone(&schema), ir); + let mut custom_scalar_types = FnvIndexMap::default(); + custom_scalar_types.insert( + ScalarName("JSON".intern()), + CustomScalarType::Path(CustomScalarTypeImport { + name: "JSON".intern(), + path: "TypeDefsFile".into(), + }), + ); + let project_config = ProjectConfig { + name: ProjectName::default(), + js_module_format: JsModuleFormat::Haste, + typegen_config: TypegenConfig { + language: TypegenLanguage::TypeScript, + custom_scalar_types, + use_import_type_syntax: fixture + .content + .contains("# typegen_config.use_import_type_syntax = true"), + experimental_emit_semantic_nullability_types: fixture + .content + .contains("# relay:experimental_emit_semantic_nullability_types"), + ..Default::default() + }, + feature_flags: Arc::new(FeatureFlags { + enable_fragment_aliases: FeatureFlag::Enabled, + enable_relay_resolver_transform: true, + ..Default::default() + }), + ..Default::default() + }; + let programs = apply_transforms( + &project_config, + Arc::new(program), + Default::default(), + Arc::new(ConsoleLogger), + None, + None, + ) + .unwrap(); + + let fragment_locations = FragmentLocations::new(programs.typegen.fragments()); + let mut operations: Vec<_> = programs.typegen.operations().collect(); + operations.sort_by_key(|op| op.name.item.0); + let operation_strings = operations.into_iter().map(|typegen_operation| { + // `normalization` ASTs are present unless we are processing an updatable query + // In that case, `reader` ASTs are present. + let op = programs + .normalization + .operation(OperationDefinitionName(typegen_operation.name.item.0)) + .unwrap_or_else(|| { + programs + .reader + .operation(OperationDefinitionName(typegen_operation.name.item.0)) + .unwrap_or_else(|| { + panic!( + "Couldn't find normalization or reader operations for {}", + typegen_operation.name.item + ) + }) + }); + + relay_typegen::generate_operation_type_exports_section( + typegen_operation, + op, + &schema, + &project_config, + &fragment_locations, + print_provided_variables(&schema, typegen_operation, &project_config), + ) + }); + + let mut fragments: Vec<_> = programs.typegen.fragments().collect(); + fragments.sort_by_key(|frag| frag.name.item); + let fragment_strings = fragments.into_iter().map(|frag| { + relay_typegen::generate_fragment_type_exports_section( + frag, + &schema, + &project_config, + &fragment_locations, + ) + }); + + let mut result: Vec = operation_strings.collect(); + result.extend(fragment_strings); + Ok(result + .join("-------------------------------------------------------------------------------\n")) +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-fragment-raw-response-type.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-fragment-raw-response-type.expected index bf6999b61878f..21a507439f13c 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-fragment-raw-response-type.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-fragment-raw-response-type.expected @@ -13,22 +13,22 @@ fragment MyUserFragment on User { } ==================================== OUTPUT =================================== import { FragmentRefs } from "relay-runtime"; -export type MyQuery$variables = {}; +export type MyQuery$variables = Record; export type MyQuery$data = { readonly me: { readonly my_inline_fragment: { - readonly name: string | null; - } | null; + readonly name: string | null | undefined; + }; readonly my_user: { readonly " $fragmentSpreads": FragmentRefs<"MyUserFragment">; - } | null; - } | null; + }; + } | null | undefined; }; export type MyQuery$rawResponse = { readonly me: { readonly id: string; - readonly name: string | null; - } | null; + readonly name: string | null | undefined; + } | null | undefined; }; export type MyQuery = { rawResponse: MyQuery$rawResponse; @@ -38,7 +38,7 @@ export type MyQuery = { ------------------------------------------------------------------------------- import { FragmentRefs } from "relay-runtime"; export type MyUserFragment$data = { - readonly name: string | null; + readonly name: string | null | undefined; readonly " $fragmentType": "MyUserFragment"; }; export type MyUserFragment$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-fragment-spread-in-abstract-selection.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-fragment-spread-in-abstract-selection.expected index 246a14f087f82..ee52ca1cce108 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-fragment-spread-in-abstract-selection.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-fragment-spread-in-abstract-selection.expected @@ -10,13 +10,13 @@ query RelayReaderNamedFragmentsTest2Query { } ==================================== OUTPUT =================================== import { FragmentRefs } from "relay-runtime"; -export type RelayReaderNamedFragmentsTest2Query$variables = {}; +export type RelayReaderNamedFragmentsTest2Query$variables = Record; export type RelayReaderNamedFragmentsTest2Query$data = { readonly node: { readonly named_fragment: { readonly " $fragmentSpreads": FragmentRefs<"RelayReaderNamedFragmentsTest_maybe_node_interface">; - } | null; - } | null; + } | null | undefined; + } | null | undefined; }; export type RelayReaderNamedFragmentsTest2Query = { response: RelayReaderNamedFragmentsTest2Query$data; @@ -25,7 +25,7 @@ export type RelayReaderNamedFragmentsTest2Query = { ------------------------------------------------------------------------------- import { FragmentRefs } from "relay-runtime"; export type RelayReaderNamedFragmentsTest_maybe_node_interface$data = { - readonly name: string | null; + readonly name: string | null | undefined; readonly " $fragmentType": "RelayReaderNamedFragmentsTest_maybe_node_interface"; }; export type RelayReaderNamedFragmentsTest_maybe_node_interface$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-fragment-spread.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-fragment-spread.expected index bce6b59e6a84a..ce41250e1f381 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-fragment-spread.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-fragment-spread.expected @@ -10,13 +10,13 @@ query RelayReaderNamedFragmentsTest2Query { } ==================================== OUTPUT =================================== import { FragmentRefs } from "relay-runtime"; -export type RelayReaderNamedFragmentsTest2Query$variables = {}; +export type RelayReaderNamedFragmentsTest2Query$variables = Record; export type RelayReaderNamedFragmentsTest2Query$data = { readonly me: { readonly named_fragment: { readonly " $fragmentSpreads": FragmentRefs<"RelayReaderNamedFragmentsTest_user">; - } | null; - } | null; + }; + } | null | undefined; }; export type RelayReaderNamedFragmentsTest2Query = { response: RelayReaderNamedFragmentsTest2Query$data; @@ -25,7 +25,7 @@ export type RelayReaderNamedFragmentsTest2Query = { ------------------------------------------------------------------------------- import { FragmentRefs } from "relay-runtime"; export type RelayReaderNamedFragmentsTest_user$data = { - readonly name: string | null; + readonly name: string | null | undefined; readonly " $fragmentType": "RelayReaderNamedFragmentsTest_user"; }; export type RelayReaderNamedFragmentsTest_user$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.expected index 44fabd657fda3..fae15f9a17bf6 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.expected @@ -8,8 +8,8 @@ fragment Foo on User { import { FragmentRefs } from "relay-runtime"; export type Foo$data = { readonly named_fragment: { - readonly name: string | null; - } | null; + readonly name: string | null | undefined; + }; readonly " $fragmentType": "Foo"; }; export type Foo$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.expected index 062d12b43ae8a..753a13d1374ef 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.expected @@ -8,14 +8,14 @@ query RelayReaderNamedFragmentsTest2Query { } } ==================================== OUTPUT =================================== -export type RelayReaderNamedFragmentsTest2Query$variables = {}; +export type RelayReaderNamedFragmentsTest2Query$variables = Record; export type RelayReaderNamedFragmentsTest2Query$data = { readonly me: { readonly id: string; readonly named_fragment: { - readonly name: string | null; - } | null; - } | null; + readonly name: string | null | undefined; + }; + } | null | undefined; }; export type RelayReaderNamedFragmentsTest2Query = { response: RelayReaderNamedFragmentsTest2Query$data; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.expected index 1a9841fc87258..b76d1dcd524e9 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.expected @@ -8,14 +8,14 @@ query RelayReaderNamedFragmentsTest2Query { } } ==================================== OUTPUT =================================== -export type RelayReaderNamedFragmentsTest2Query$variables = {}; +export type RelayReaderNamedFragmentsTest2Query$variables = Record; export type RelayReaderNamedFragmentsTest2Query$data = { readonly named_fragment: { readonly me: { readonly id: string; - readonly name: string | null; - } | null; - } | null; + readonly name: string | null | undefined; + } | null | undefined; + }; }; export type RelayReaderNamedFragmentsTest2Query = { response: RelayReaderNamedFragmentsTest2Query$data; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread.expected index 2a14f21138395..1979e86100c10 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/aliased-inline-fragment-spread.expected @@ -8,14 +8,14 @@ query RelayReaderNamedFragmentsTest2Query { } } ==================================== OUTPUT =================================== -export type RelayReaderNamedFragmentsTest2Query$variables = {}; +export type RelayReaderNamedFragmentsTest2Query$variables = Record; export type RelayReaderNamedFragmentsTest2Query$data = { readonly me: { readonly id: string; readonly named_fragment: { - readonly name: string | null; - } | null; - } | null; + readonly name: string | null | undefined; + }; + } | null | undefined; }; export type RelayReaderNamedFragmentsTest2Query = { response: RelayReaderNamedFragmentsTest2Query$data; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/custom-scalar-type-import.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/custom-scalar-type-import.expected index d16050c74ffb2..f3b2016ec0a1e 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/custom-scalar-type-import.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/custom-scalar-type-import.expected @@ -13,16 +13,16 @@ query Viewer($params: JSON) { ==================================== OUTPUT =================================== import { JSON } from "TypeDefsFile"; export type Viewer$variables = { - params?: JSON | null; + params?: JSON | null | undefined; }; export type Viewer$data = { readonly viewer: { readonly actor: { readonly profilePicture2?: { readonly __typename: "Image"; - } | null; - } | null; - } | null; + } | null | undefined; + } | null | undefined; + } | null | undefined; }; export type Viewer = { response: Viewer$data; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/default-input.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/default-input.expected new file mode 100644 index 0000000000000..3f77d194dd0f6 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/default-input.expected @@ -0,0 +1,27 @@ +==================================== INPUT ==================================== +mutation feedbackUnLikeMutation($input: FeedbackUnLikeInput) { + feedbackUnLike(input: $input) { + feedback { + id + } + } +} +==================================== OUTPUT =================================== +export type FeedbackUnLikeInput = { + feedbackId?: string | null | undefined; + silent?: boolean; +}; +export type feedbackUnLikeMutation$variables = { + input?: FeedbackUnLikeInput | null | undefined; +}; +export type feedbackUnLikeMutation$data = { + readonly feedbackUnLike: { + readonly feedback: { + readonly id: string; + } | null | undefined; + } | null | undefined; +}; +export type feedbackUnLikeMutation = { + response: feedbackUnLikeMutation$data; + variables: feedbackUnLikeMutation$variables; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/default-input.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/default-input.graphql new file mode 100644 index 0000000000000..8abff1aa7048e --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/default-input.graphql @@ -0,0 +1,7 @@ +mutation feedbackUnLikeMutation($input: FeedbackUnLikeInput) { + feedbackUnLike(input: $input) { + feedback { + id + } + } +} \ No newline at end of file diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/fragment-spread.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/fragment-spread.expected index 6972092c662a4..c97ede67cd471 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/fragment-spread.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/fragment-spread.expected @@ -56,12 +56,12 @@ export type ConcreateTypes$data = { readonly " $fragmentSpreads": FragmentRefs<"PageFragment">; } | { readonly __typename: "User"; - readonly name: string | null; + readonly name: string | null | undefined; } | { // This will never be '%other', but we need some // value in case none of the concrete values match. readonly __typename: "%other"; - } | null; + } | null | undefined; readonly " $fragmentType": "ConcreateTypes"; }; export type ConcreateTypes$key = { @@ -72,13 +72,13 @@ export type ConcreateTypes$key = { import { FragmentRefs } from "relay-runtime"; export type FragmentSpread$data = { readonly fragAndField: { - readonly uri: string | null; + readonly uri: string | null | undefined; readonly " $fragmentSpreads": FragmentRefs<"PictureFragment">; - } | null; + } | null | undefined; readonly id: string; readonly justFrag: { readonly " $fragmentSpreads": FragmentRefs<"PictureFragment">; - } | null; + } | null | undefined; readonly " $fragmentSpreads": FragmentRefs<"OtherFragment" | "UserFrag1" | "UserFrag2">; readonly " $fragmentType": "FragmentSpread"; }; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/inline-fragment.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/inline-fragment.expected index c604753353db8..a68de1296e88c 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/inline-fragment.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/inline-fragment.expected @@ -68,9 +68,9 @@ import { FragmentRefs } from "relay-runtime"; export type InlineFragment$data = { readonly id: string; readonly message?: { - readonly text: string | null; - } | null; - readonly name?: string | null; + readonly text: string | null | undefined; + } | null | undefined; + readonly name?: string | null | undefined; readonly " $fragmentType": "InlineFragment"; }; export type InlineFragment$key = { @@ -81,7 +81,7 @@ export type InlineFragment$key = { import { FragmentRefs } from "relay-runtime"; export type InlineFragmentConditionalID$data = { readonly id?: string; - readonly name?: string | null; + readonly name?: string | null | undefined; readonly " $fragmentType": "InlineFragmentConditionalID"; }; export type InlineFragmentConditionalID$key = { @@ -93,14 +93,14 @@ import { FragmentRefs } from "relay-runtime"; export type InlineFragmentKitchenSink$data = { readonly actor: { readonly id: string; - readonly name?: string | null; + readonly name?: string | null | undefined; readonly profilePicture: { - readonly height?: number | null; - readonly uri: string | null; - readonly width?: number | null; - } | null; + readonly height?: number | null | undefined; + readonly uri: string | null | undefined; + readonly width?: number | null | undefined; + } | null | undefined; readonly " $fragmentSpreads": FragmentRefs<"SomeFragment">; - } | null; + } | null | undefined; readonly " $fragmentType": "InlineFragmentKitchenSink"; }; export type InlineFragmentKitchenSink$key = { @@ -113,11 +113,11 @@ export type InlineFragmentWithOverlappingFields$data = { readonly hometown?: { readonly id: string; readonly message?: { - readonly text: string | null; - } | null; - readonly name: string | null; - } | null; - readonly name?: string | null; + readonly text: string | null | undefined; + } | null | undefined; + readonly name: string | null | undefined; + } | null | undefined; + readonly name?: string | null | undefined; readonly " $fragmentType": "InlineFragmentWithOverlappingFields"; }; export type InlineFragmentWithOverlappingFields$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/linked-field.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/linked-field.expected index e7d1d82c7ca8e..a66a53fe7bd4d 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/linked-field.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/linked-field.expected @@ -27,7 +27,7 @@ query UnionTypeTest { } } ==================================== OUTPUT =================================== -export type UnionTypeTest$variables = {}; +export type UnionTypeTest$variables = Record; export type UnionTypeTest$data = { readonly neverNode: { readonly __typename: "FakeNode"; @@ -36,7 +36,7 @@ export type UnionTypeTest$data = { // This will never be '%other', but we need some // value in case none of the concrete values match. readonly __typename: "%other"; - } | null; + } | null | undefined; }; export type UnionTypeTest = { response: UnionTypeTest$data; @@ -47,18 +47,18 @@ import { FragmentRefs } from "relay-runtime"; export type LinkedField$data = { readonly actor: { readonly id: string; - } | null; + } | null | undefined; readonly hometown: { readonly id: string; readonly profilePicture: { - readonly uri: string | null; - } | null; - } | null; + readonly uri: string | null | undefined; + } | null | undefined; + } | null | undefined; readonly profilePicture: { - readonly height: number | null; - readonly uri: string | null; - readonly width: number | null; - } | null; + readonly height: number | null | undefined; + readonly uri: string | null | undefined; + readonly width: number | null | undefined; + } | null | undefined; readonly " $fragmentType": "LinkedField"; }; export type LinkedField$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/match-field-in-query.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/match-field-in-query.expected index 5f6c893c76ef6..16ef10bb16b0c 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/match-field-in-query.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/match-field-in-query.expected @@ -24,15 +24,15 @@ fragment MarkdownUserNameRenderer_name on MarkdownUserNameRenderer { } ==================================== OUTPUT =================================== import { FragmentRefs } from "relay-runtime"; -export type NameRendererQuery$variables = {}; +export type NameRendererQuery$variables = Record; export type NameRendererQuery$data = { readonly me: { readonly nameRenderer: { - readonly __fragmentPropName?: string | null; - readonly __module_component?: string | null; + readonly __fragmentPropName?: string | null | undefined; + readonly __module_component?: string | null | undefined; readonly " $fragmentSpreads": FragmentRefs<"MarkdownUserNameRenderer_name" | "PlainUserNameRenderer_name">; - } | null; - } | null; + } | null | undefined; + } | null | undefined; }; export type NameRendererQuery = { response: NameRendererQuery$data; @@ -42,9 +42,9 @@ export type NameRendererQuery = { import { FragmentRefs } from "relay-runtime"; export type MarkdownUserNameRenderer_name$data = { readonly data: { - readonly markup: string | null; - } | null; - readonly markdown: string | null; + readonly markup: string | null | undefined; + } | null | undefined; + readonly markdown: string | null | undefined; readonly " $fragmentType": "MarkdownUserNameRenderer_name"; }; export type MarkdownUserNameRenderer_name$key = { @@ -55,9 +55,9 @@ export type MarkdownUserNameRenderer_name$key = { import { FragmentRefs } from "relay-runtime"; export type PlainUserNameRenderer_name$data = { readonly data: { - readonly text: string | null; - } | null; - readonly plaintext: string | null; + readonly text: string | null | undefined; + } | null | undefined; + readonly plaintext: string | null | undefined; readonly " $fragmentType": "PlainUserNameRenderer_name"; }; export type PlainUserNameRenderer_name$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/match-field.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/match-field.expected index 5f09c34582ba6..04259fbc12e7b 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/match-field.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/match-field.expected @@ -25,9 +25,9 @@ fragment MarkdownUserNameRenderer_name on MarkdownUserNameRenderer { import { FragmentRefs } from "relay-runtime"; export type MarkdownUserNameRenderer_name$data = { readonly data: { - readonly markup: string | null; - } | null; - readonly markdown: string | null; + readonly markup: string | null | undefined; + } | null | undefined; + readonly markdown: string | null | undefined; readonly " $fragmentType": "MarkdownUserNameRenderer_name"; }; export type MarkdownUserNameRenderer_name$key = { @@ -39,10 +39,10 @@ import { FragmentRefs } from "relay-runtime"; export type NameRendererFragment$data = { readonly id: string; readonly nameRenderer: { - readonly __fragmentPropName?: string | null; - readonly __module_component?: string | null; + readonly __fragmentPropName?: string | null | undefined; + readonly __module_component?: string | null | undefined; readonly " $fragmentSpreads": FragmentRefs<"MarkdownUserNameRenderer_name" | "PlainUserNameRenderer_name">; - } | null; + } | null | undefined; readonly " $fragmentType": "NameRendererFragment"; }; export type NameRendererFragment$key = { @@ -53,9 +53,9 @@ export type NameRendererFragment$key = { import { FragmentRefs } from "relay-runtime"; export type PlainUserNameRenderer_name$data = { readonly data: { - readonly text: string | null; - } | null; - readonly plaintext: string | null; + readonly text: string | null | undefined; + } | null | undefined; + readonly plaintext: string | null | undefined; readonly " $fragmentType": "PlainUserNameRenderer_name"; }; export type PlainUserNameRenderer_name$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-input-has-array.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-input-has-array.expected index a309563f6f915..75485c94b3573 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-input-has-array.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-input-has-array.expected @@ -8,25 +8,25 @@ mutation InputHasArray($input: UpdateAllSeenStateInput) @raw_response_type { } ==================================== OUTPUT =================================== export type UpdateAllSeenStateInput = { - storyIds?: ReadonlyArray | null; + storyIds?: ReadonlyArray | null | undefined; }; export type InputHasArray$variables = { - input?: UpdateAllSeenStateInput | null; + input?: UpdateAllSeenStateInput | null | undefined; }; export type InputHasArray$data = { readonly viewerNotificationsUpdateAllSeenState: { readonly stories: ReadonlyArray<{ - readonly actorCount: number | null; - } | null> | null; - } | null; + readonly actorCount: number | null | undefined; + } | null | undefined> | null | undefined; + } | null | undefined; }; export type InputHasArray$rawResponse = { readonly viewerNotificationsUpdateAllSeenState: { readonly stories: ReadonlyArray<{ - readonly actorCount: number | null; + readonly actorCount: number | null | undefined; readonly id: string; - } | null> | null; - } | null; + } | null | undefined> | null | undefined; + } | null | undefined; }; export type InputHasArray = { rawResponse: InputHasArray$rawResponse; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-client-extension.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-client-extension.expected index 1fe8bf40a3a29..c2aaac845d0ea 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-client-extension.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-client-extension.expected @@ -20,29 +20,29 @@ type Foo { } ==================================== OUTPUT =================================== export type UpdateAllSeenStateInput = { - storyIds?: ReadonlyArray | null; + storyIds?: ReadonlyArray | null | undefined; }; export type Test$variables = { - input?: UpdateAllSeenStateInput | null; + input?: UpdateAllSeenStateInput | null | undefined; }; export type Test$data = { readonly viewerNotificationsUpdateAllSeenState: { readonly stories: ReadonlyArray<{ readonly foos: ReadonlyArray<{ - readonly bar: string | null; - } | null> | null; - } | null> | null; - } | null; + readonly bar: string | null | undefined; + } | null | undefined> | null | undefined; + } | null | undefined> | null | undefined; + } | null | undefined; }; export type Test$rawResponse = { readonly viewerNotificationsUpdateAllSeenState: { readonly stories: ReadonlyArray<{ readonly foos?: ReadonlyArray<{ - readonly bar: string | null; - } | null> | null; + readonly bar: string | null | undefined; + } | null | undefined> | null | undefined; readonly id: string; - } | null> | null; - } | null; + } | null | undefined> | null | undefined; + } | null | undefined; }; export type Test = { rawResponse: Test$rawResponse; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-enums-on-fragment.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-enums-on-fragment.expected index 3220cbb6d2de5..d91bfde5546d2 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-enums-on-fragment.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-enums-on-fragment.expected @@ -31,19 +31,19 @@ import { FragmentRefs } from "relay-runtime"; export type TestEnums = "mark" | "zuck" | "%future added value"; export type CommentCreateInput = { client_mutation_id: string; - feedback?: CommentfeedbackFeedback | null; - feedbackId?: string | null; + feedback?: CommentfeedbackFeedback | null | undefined; + feedbackId?: string | null | undefined; }; export type CommentfeedbackFeedback = { - comment?: FeedbackcommentComment | null; + comment?: FeedbackcommentComment | null | undefined; }; export type FeedbackcommentComment = { - feedback?: CommentfeedbackFeedback | null; + feedback?: CommentfeedbackFeedback | null | undefined; }; export type CommentCreateMutation$variables = { - first?: number | null; + first?: number | null | undefined; input: CommentCreateInput; - orderBy?: ReadonlyArray | null; + orderBy?: ReadonlyArray | null | undefined; }; export type CommentCreateMutation$data = { readonly commentCreate: { @@ -54,11 +54,11 @@ export type CommentCreateMutation$data = { readonly __typename: "User"; readonly id: string; readonly " $fragmentSpreads": FragmentRefs<"FriendFragment">; - } | null; - } | null> | null; - } | null; - } | null; - } | null; + } | null | undefined; + } | null | undefined> | null | undefined; + } | null | undefined; + } | null | undefined; + } | null | undefined; }; export type CommentCreateMutation$rawResponse = { readonly commentCreate: { @@ -68,17 +68,17 @@ export type CommentCreateMutation$rawResponse = { readonly node: { readonly __typename: "User"; readonly id: string; - readonly lastName: string | null; - readonly name: string | null; + readonly lastName: string | null | undefined; + readonly name: string | null | undefined; readonly profilePicture2: { - readonly test_enums: TestEnums | null; - } | null; - } | null; - } | null> | null; - } | null; + readonly test_enums: TestEnums | null | undefined; + } | null | undefined; + } | null | undefined; + } | null | undefined> | null | undefined; + } | null | undefined; readonly id: string; - } | null; - } | null; + } | null | undefined; + } | null | undefined; }; export type CommentCreateMutation = { rawResponse: CommentCreateMutation$rawResponse; @@ -89,11 +89,11 @@ export type CommentCreateMutation = { export type TestEnums = "mark" | "zuck" | "%future added value"; import { FragmentRefs } from "relay-runtime"; export type FriendFragment$data = { - readonly lastName: string | null; - readonly name: string | null; + readonly lastName: string | null | undefined; + readonly name: string | null | undefined; readonly profilePicture2: { - readonly test_enums: TestEnums | null; - } | null; + readonly test_enums: TestEnums | null | undefined; + } | null | undefined; readonly " $fragmentType": "FriendFragment"; }; export type FriendFragment$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-nested-fragments.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-nested-fragments.expected index d83334492bec3..32f0b76b3964c 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-nested-fragments.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-nested-fragments.expected @@ -34,19 +34,19 @@ fragment FeedbackFragment on Feedback { import { FragmentRefs } from "relay-runtime"; export type CommentCreateInput = { client_mutation_id: string; - feedback?: CommentfeedbackFeedback | null; - feedbackId?: string | null; + feedback?: CommentfeedbackFeedback | null | undefined; + feedbackId?: string | null | undefined; }; export type CommentfeedbackFeedback = { - comment?: FeedbackcommentComment | null; + comment?: FeedbackcommentComment | null | undefined; }; export type FeedbackcommentComment = { - feedback?: CommentfeedbackFeedback | null; + feedback?: CommentfeedbackFeedback | null | undefined; }; export type CommentCreateMutation$variables = { - first?: number | null; + first?: number | null | undefined; input: CommentCreateInput; - orderBy?: ReadonlyArray | null; + orderBy?: ReadonlyArray | null | undefined; }; export type CommentCreateMutation$data = { readonly commentCreate: { @@ -54,13 +54,13 @@ export type CommentCreateMutation$data = { readonly friends: { readonly edges: ReadonlyArray<{ readonly node: { - readonly lastName: string | null; + readonly lastName: string | null | undefined; readonly " $fragmentSpreads": FragmentRefs<"FriendFragment">; - } | null; - } | null> | null; - } | null; - } | null; - } | null; + } | null | undefined; + } | null | undefined> | null | undefined; + } | null | undefined; + } | null | undefined; + } | null | undefined; }; export type CommentCreateMutation$rawResponse = { readonly commentCreate: { @@ -70,17 +70,17 @@ export type CommentCreateMutation$rawResponse = { readonly node: { readonly feedback: { readonly id: string; - readonly name: string | null; - } | null; + readonly name: string | null | undefined; + } | null | undefined; readonly id: string; - readonly lastName: string | null; - readonly name: string | null; - } | null; - } | null> | null; - } | null; + readonly lastName: string | null | undefined; + readonly name: string | null | undefined; + } | null | undefined; + } | null | undefined> | null | undefined; + } | null | undefined; readonly id: string; - } | null; - } | null; + } | null | undefined; + } | null | undefined; }; export type CommentCreateMutation = { rawResponse: CommentCreateMutation$rawResponse; @@ -91,7 +91,7 @@ export type CommentCreateMutation = { import { FragmentRefs } from "relay-runtime"; export type FeedbackFragment$data = { readonly id: string; - readonly name: string | null; + readonly name: string | null | undefined; readonly " $fragmentType": "FeedbackFragment"; }; export type FeedbackFragment$key = { @@ -103,9 +103,9 @@ import { FragmentRefs } from "relay-runtime"; export type FriendFragment$data = { readonly feedback: { readonly " $fragmentSpreads": FragmentRefs<"FeedbackFragment">; - } | null; - readonly lastName: string | null; - readonly name: string | null; + } | null | undefined; + readonly lastName: string | null | undefined; + readonly name: string | null | undefined; readonly " $fragmentType": "FriendFragment"; }; export type FriendFragment$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-response-on-inline-fragments.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-response-on-inline-fragments.expected index 8ccfbf29abe4a..3a680c2459832 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-response-on-inline-fragments.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation-with-response-on-inline-fragments.expected @@ -30,14 +30,14 @@ fragment InlineFragmentWithOverlappingFields on Actor { import { FragmentRefs } from "relay-runtime"; export type CommentCreateInput = { client_mutation_id: string; - feedback?: CommentfeedbackFeedback | null; - feedbackId?: string | null; + feedback?: CommentfeedbackFeedback | null | undefined; + feedbackId?: string | null | undefined; }; export type CommentfeedbackFeedback = { - comment?: FeedbackcommentComment | null; + comment?: FeedbackcommentComment | null | undefined; }; export type FeedbackcommentComment = { - feedback?: CommentfeedbackFeedback | null; + feedback?: CommentfeedbackFeedback | null | undefined; }; export type TestMutation$variables = { input: CommentCreateInput; @@ -47,9 +47,9 @@ export type TestMutation$data = { readonly viewer: { readonly actor: { readonly " $fragmentSpreads": FragmentRefs<"InlineFragmentWithOverlappingFields">; - } | null; - } | null; - } | null; + } | null | undefined; + } | null | undefined; + } | null | undefined; }; export type TestMutation$rawResponse = { readonly commentCreate: { @@ -60,26 +60,26 @@ export type TestMutation$rawResponse = { readonly hometown: { readonly id: string; readonly message: { - readonly text: string | null; - } | null; - } | null; + readonly text: string | null | undefined; + } | null | undefined; + } | null | undefined; readonly id: string; - readonly name: string | null; + readonly name: string | null | undefined; } | { readonly __typename: "User"; readonly __isActor: "User"; readonly hometown: { readonly id: string; - readonly name: string | null; - } | null; + readonly name: string | null | undefined; + } | null | undefined; readonly id: string; } | { readonly __typename: string; readonly __isActor: string; readonly id: string; - } | null; - } | null; - } | null; + } | null | undefined; + } | null | undefined; + } | null | undefined; }; export type TestMutation = { rawResponse: TestMutation$rawResponse; @@ -92,11 +92,11 @@ export type InlineFragmentWithOverlappingFields$data = { readonly hometown?: { readonly id: string; readonly message?: { - readonly text: string | null; - } | null; - readonly name: string | null; - } | null; - readonly name?: string | null; + readonly text: string | null | undefined; + } | null | undefined; + readonly name: string | null | undefined; + } | null | undefined; + readonly name?: string | null | undefined; readonly " $fragmentType": "InlineFragmentWithOverlappingFields"; }; export type InlineFragmentWithOverlappingFields$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation.expected index 3f1d65decea4d..700ae55562e14 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/mutation.expected @@ -17,30 +17,30 @@ mutation CommentCreateMutation( ==================================== OUTPUT =================================== export type CommentCreateInput = { client_mutation_id: string; - feedback?: CommentfeedbackFeedback | null; - feedbackId?: string | null; + feedback?: CommentfeedbackFeedback | null | undefined; + feedbackId?: string | null | undefined; }; export type CommentfeedbackFeedback = { - comment?: FeedbackcommentComment | null; + comment?: FeedbackcommentComment | null | undefined; }; export type FeedbackcommentComment = { - feedback?: CommentfeedbackFeedback | null; + feedback?: CommentfeedbackFeedback | null | undefined; }; export type CommentCreateMutation$variables = { - first?: number | null; + first?: number | null | undefined; input: CommentCreateInput; - orderBy?: ReadonlyArray | null; + orderBy?: ReadonlyArray | null | undefined; }; export type CommentCreateMutation$data = { readonly commentCreate: { readonly comment: { readonly friends: { - readonly count: number | null; - } | null; + readonly count: number | null | undefined; + } | null | undefined; readonly id: string; - readonly name: string | null; - } | null; - } | null; + readonly name: string | null | undefined; + } | null | undefined; + } | null | undefined; }; export type CommentCreateMutation = { response: CommentCreateMutation$data; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-handles.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-handles.expected index 76c3074aee3b0..8e975f88587cc 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-handles.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-handles.expected @@ -23,21 +23,21 @@ export type LinkedHandleField$variables = { export type LinkedHandleField$data = { readonly node: { readonly friends?: { - readonly count: number | null; - } | null; - } | null; + readonly count: number | null | undefined; + } | null | undefined; + } | null | undefined; }; export type LinkedHandleField$rawResponse = { readonly node: { readonly __typename: "User"; readonly friends: { - readonly count: number | null; - } | null; + readonly count: number | null | undefined; + } | null | undefined; readonly id: string; } | { readonly __typename: string; readonly id: string; - } | null; + } | null | undefined; }; export type LinkedHandleField = { rawResponse: LinkedHandleField$rawResponse; @@ -50,18 +50,18 @@ export type ScalarHandleField$variables = { }; export type ScalarHandleField$data = { readonly node: { - readonly name?: string | null; - } | null; + readonly name?: string | null | undefined; + } | null | undefined; }; export type ScalarHandleField$rawResponse = { readonly node: { readonly __typename: "User"; readonly id: string; - readonly name: string | null; + readonly name: string | null | undefined; } | { readonly __typename: string; readonly id: string; - } | null; + } | null | undefined; }; export type ScalarHandleField = { rawResponse: ScalarHandleField$rawResponse; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-match-fields.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-match-fields.expected index 6aa4c9b074393..3bb12107e067e 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-match-fields.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-match-fields.expected @@ -29,25 +29,25 @@ fragment MarkdownUserNameRenderer_name on MarkdownUserNameRenderer { } ==================================== OUTPUT =================================== import { FragmentRefs, Local3DPayload } from "relay-runtime"; -export type Test$variables = {}; +export type Test$variables = Record; export type Test$data = { readonly node: { readonly " $fragmentSpreads": FragmentRefs<"NameRendererFragment">; - } | null; + } | null | undefined; }; export type PlainUserNameRenderer_name = { readonly data: { - readonly id: string | null; - readonly text: string | null; - } | null; - readonly plaintext: string | null; + readonly id: string | null | undefined; + readonly text: string | null | undefined; + } | null | undefined; + readonly plaintext: string | null | undefined; }; export type MarkdownUserNameRenderer_name = { readonly data: { - readonly id: string | null; - readonly markup: string | null; - } | null; - readonly markdown: string | null; + readonly id: string | null | undefined; + readonly markup: string | null | undefined; + } | null | undefined; + readonly markdown: string | null | undefined; }; export type Test$rawResponse = { readonly node: { @@ -59,19 +59,19 @@ export type Test$rawResponse = { readonly __typename: "PlainUserNameRenderer"; }> | { readonly __typename: "MarkdownUserNameRenderer"; - readonly __module_component_NameRendererFragment: any | null; - readonly __module_operation_NameRendererFragment: any | null; + readonly __module_component_NameRendererFragment: any | null | undefined; + readonly __module_operation_NameRendererFragment: any | null | undefined; } | { readonly __typename: "PlainUserNameRenderer"; - readonly __module_component_NameRendererFragment: any | null; - readonly __module_operation_NameRendererFragment: any | null; + readonly __module_component_NameRendererFragment: any | null | undefined; + readonly __module_operation_NameRendererFragment: any | null | undefined; } | { readonly __typename: string; - } | null; + } | null | undefined; } | { readonly __typename: string; readonly id: string; - } | null; + } | null | undefined; }; export type Test = { rawResponse: Test$rawResponse; @@ -82,9 +82,9 @@ export type Test = { import { FragmentRefs } from "relay-runtime"; export type MarkdownUserNameRenderer_name$data = { readonly data: { - readonly markup: string | null; - } | null; - readonly markdown: string | null; + readonly markup: string | null | undefined; + } | null | undefined; + readonly markdown: string | null | undefined; readonly " $fragmentType": "MarkdownUserNameRenderer_name"; }; export type MarkdownUserNameRenderer_name$key = { @@ -96,10 +96,10 @@ import { FragmentRefs } from "relay-runtime"; export type NameRendererFragment$data = { readonly id: string; readonly nameRenderer: { - readonly __fragmentPropName?: string | null; - readonly __module_component?: string | null; + readonly __fragmentPropName?: string | null | undefined; + readonly __module_component?: string | null | undefined; readonly " $fragmentSpreads": FragmentRefs<"MarkdownUserNameRenderer_name" | "PlainUserNameRenderer_name">; - } | null; + } | null | undefined; readonly " $fragmentType": "NameRendererFragment"; }; export type NameRendererFragment$key = { @@ -110,9 +110,9 @@ export type NameRendererFragment$key = { import { FragmentRefs } from "relay-runtime"; export type PlainUserNameRenderer_name$data = { readonly data: { - readonly text: string | null; - } | null; - readonly plaintext: string | null; + readonly text: string | null | undefined; + } | null | undefined; + readonly plaintext: string | null | undefined; readonly " $fragmentType": "PlainUserNameRenderer_name"; }; export type PlainUserNameRenderer_name$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-module-field.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-module-field.expected index fa08e0e67b1fc..16fbb971d33aa 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-module-field.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-module-field.expected @@ -18,30 +18,30 @@ fragment Test_userRenderer on PlainUserRenderer { } ==================================== OUTPUT =================================== import { FragmentRefs, Local3DPayload } from "relay-runtime"; -export type Test$variables = {}; +export type Test$variables = Record; export type Test$data = { readonly node: { readonly " $fragmentSpreads": FragmentRefs<"Test_user">; - } | null; + } | null | undefined; }; export type Test_userRenderer = { readonly user: { readonly id: string; - readonly username: string | null; - } | null; + readonly username: string | null | undefined; + } | null | undefined; }; export type Test$rawResponse = { readonly node: { readonly __typename: "User"; readonly id: string; - readonly plainUserRenderer: Local3DPayload<"Test_user", {}> | { - readonly __module_component_Test_user: any | null; - readonly __module_operation_Test_user: any | null; - } | null; + readonly plainUserRenderer: Local3DPayload<"Test_user", Record> | { + readonly __module_component_Test_user: any | null | undefined; + readonly __module_operation_Test_user: any | null | undefined; + } | null | undefined; } | { readonly __typename: string; readonly id: string; - } | null; + } | null | undefined; }; export type Test = { rawResponse: Test$rawResponse; @@ -52,10 +52,10 @@ export type Test = { import { FragmentRefs } from "relay-runtime"; export type Test_user$data = { readonly plainUserRenderer: { - readonly __fragmentPropName: string | null; - readonly __module_component: string | null; + readonly __fragmentPropName: string | null | undefined; + readonly __module_component: string | null | undefined; readonly " $fragmentSpreads": FragmentRefs<"Test_userRenderer">; - } | null; + } | null | undefined; readonly " $fragmentType": "Test_user"; }; export type Test_user$key = { @@ -66,8 +66,8 @@ export type Test_user$key = { import { FragmentRefs } from "relay-runtime"; export type Test_userRenderer$data = { readonly user: { - readonly username: string | null; - } | null; + readonly username: string | null | undefined; + } | null | undefined; readonly " $fragmentType": "Test_userRenderer"; }; export type Test_userRenderer$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-multiple-match-fields.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-multiple-match-fields.expected index b94553961b9a7..7b8c387cf7ae1 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-multiple-match-fields.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-multiple-match-fields.expected @@ -49,32 +49,32 @@ fragment MarkdownUserNameRenderer_name on MarkdownUserNameRenderer { } ==================================== OUTPUT =================================== import { FragmentRefs, Local3DPayload } from "relay-runtime"; -export type Test$variables = {}; +export type Test$variables = Record; export type Test$data = { readonly node: { - readonly username?: string | null; + readonly username?: string | null | undefined; readonly " $fragmentSpreads": FragmentRefs<"NameRendererFragment">; - } | null; + } | null | undefined; readonly viewer: { readonly actor: { - readonly name?: string | null; + readonly name?: string | null | undefined; readonly " $fragmentSpreads": FragmentRefs<"AnotherNameRendererFragment">; - } | null; - } | null; + } | null | undefined; + } | null | undefined; }; export type PlainUserNameRenderer_name = { readonly data: { - readonly id: string | null; - readonly text: string | null; - } | null; - readonly plaintext: string | null; + readonly id: string | null | undefined; + readonly text: string | null | undefined; + } | null | undefined; + readonly plaintext: string | null | undefined; }; export type MarkdownUserNameRenderer_name = { readonly data: { - readonly id: string | null; - readonly markup: string | null; - } | null; - readonly markdown: string | null; + readonly id: string | null | undefined; + readonly markup: string | null | undefined; + } | null | undefined; + readonly markdown: string | null | undefined; }; export type Test$rawResponse = { readonly node: { @@ -86,45 +86,45 @@ export type Test$rawResponse = { readonly __typename: "PlainUserNameRenderer"; }> | { readonly __typename: "MarkdownUserNameRenderer"; - readonly __module_component_NameRendererFragment: any | null; - readonly __module_operation_NameRendererFragment: any | null; + readonly __module_component_NameRendererFragment: any | null | undefined; + readonly __module_operation_NameRendererFragment: any | null | undefined; } | { readonly __typename: "PlainUserNameRenderer"; - readonly __module_component_NameRendererFragment: any | null; - readonly __module_operation_NameRendererFragment: any | null; + readonly __module_component_NameRendererFragment: any | null | undefined; + readonly __module_operation_NameRendererFragment: any | null | undefined; } | { readonly __typename: string; - } | null; - readonly username: string | null; + } | null | undefined; + readonly username: string | null | undefined; } | { readonly __typename: string; readonly id: string; - } | null; + } | null | undefined; readonly viewer: { readonly actor: { readonly __typename: "User"; readonly id: string; - readonly name: string | null; + readonly name: string | null | undefined; readonly nameRenderer: Local3DPayload<"AnotherNameRendererFragment", { readonly __typename: "MarkdownUserNameRenderer"; }> | Local3DPayload<"AnotherNameRendererFragment", { readonly __typename: "PlainUserNameRenderer"; }> | { readonly __typename: "MarkdownUserNameRenderer"; - readonly __module_component_AnotherNameRendererFragment: any | null; - readonly __module_operation_AnotherNameRendererFragment: any | null; + readonly __module_component_AnotherNameRendererFragment: any | null | undefined; + readonly __module_operation_AnotherNameRendererFragment: any | null | undefined; } | { readonly __typename: "PlainUserNameRenderer"; - readonly __module_component_AnotherNameRendererFragment: any | null; - readonly __module_operation_AnotherNameRendererFragment: any | null; + readonly __module_component_AnotherNameRendererFragment: any | null | undefined; + readonly __module_operation_AnotherNameRendererFragment: any | null | undefined; } | { readonly __typename: string; - } | null; + } | null | undefined; } | { readonly __typename: string; readonly id: string; - } | null; - } | null; + } | null | undefined; + } | null | undefined; }; export type Test = { rawResponse: Test$rawResponse; @@ -134,12 +134,12 @@ export type Test = { ------------------------------------------------------------------------------- import { FragmentRefs } from "relay-runtime"; export type AnotherNameRendererFragment$data = { - readonly name: string | null; + readonly name: string | null | undefined; readonly nameRenderer: { - readonly __fragmentPropName?: string | null; - readonly __module_component?: string | null; + readonly __fragmentPropName?: string | null | undefined; + readonly __module_component?: string | null | undefined; readonly " $fragmentSpreads": FragmentRefs<"MarkdownUserNameRenderer_name" | "PlainUserNameRenderer_name">; - } | null; + } | null | undefined; readonly " $fragmentType": "AnotherNameRendererFragment"; }; export type AnotherNameRendererFragment$key = { @@ -150,9 +150,9 @@ export type AnotherNameRendererFragment$key = { import { FragmentRefs } from "relay-runtime"; export type MarkdownUserNameRenderer_name$data = { readonly data: { - readonly markup: string | null; - } | null; - readonly markdown: string | null; + readonly markup: string | null | undefined; + } | null | undefined; + readonly markdown: string | null | undefined; readonly " $fragmentType": "MarkdownUserNameRenderer_name"; }; export type MarkdownUserNameRenderer_name$key = { @@ -164,10 +164,10 @@ import { FragmentRefs } from "relay-runtime"; export type NameRendererFragment$data = { readonly id: string; readonly nameRenderer: { - readonly __fragmentPropName?: string | null; - readonly __module_component?: string | null; + readonly __fragmentPropName?: string | null | undefined; + readonly __module_component?: string | null | undefined; readonly " $fragmentSpreads": FragmentRefs<"MarkdownUserNameRenderer_name" | "PlainUserNameRenderer_name">; - } | null; + } | null | undefined; readonly " $fragmentType": "NameRendererFragment"; }; export type NameRendererFragment$key = { @@ -178,9 +178,9 @@ export type NameRendererFragment$key = { import { FragmentRefs } from "relay-runtime"; export type PlainUserNameRenderer_name$data = { readonly data: { - readonly text: string | null; - } | null; - readonly plaintext: string | null; + readonly text: string | null | undefined; + } | null | undefined; + readonly plaintext: string | null | undefined; readonly " $fragmentType": "PlainUserNameRenderer_name"; }; export type PlainUserNameRenderer_name$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-raw-response-on-conditional.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-raw-response-on-conditional.expected index 258883e076701..777565655be7b 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-raw-response-on-conditional.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-raw-response-on-conditional.expected @@ -24,22 +24,22 @@ export type ExampleQuery$variables = { export type ExampleQuery$data = { readonly node: { readonly " $fragmentSpreads": FragmentRefs<"FriendFragment">; - } | null; + } | null | undefined; }; export type ExampleQuery$rawResponse = { readonly node: { readonly __typename: "User"; readonly feedback: { readonly id: string; - readonly name: string | null; - } | null; + readonly name: string | null | undefined; + } | null | undefined; readonly id: string; - readonly lastName: string | null; - readonly name: string | null; + readonly lastName: string | null | undefined; + readonly name: string | null | undefined; } | { readonly __typename: string; readonly id: string; - } | null; + } | null | undefined; }; export type ExampleQuery = { rawResponse: ExampleQuery$rawResponse; @@ -51,10 +51,10 @@ import { FragmentRefs } from "relay-runtime"; export type FriendFragment$data = { readonly feedback?: { readonly id: string; - readonly name: string | null; - } | null; - readonly lastName?: string | null; - readonly name?: string | null; + readonly name: string | null | undefined; + } | null | undefined; + readonly lastName?: string | null | undefined; + readonly name?: string | null | undefined; readonly " $fragmentType": "FriendFragment"; }; export type FriendFragment$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-raw-response-on-literal-conditional.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-raw-response-on-literal-conditional.expected index d1d6ab5c14d2e..9d882e12149b3 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-raw-response-on-literal-conditional.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-raw-response-on-literal-conditional.expected @@ -29,18 +29,18 @@ export type ExampleQuery$variables = { export type ExampleQuery$data = { readonly node: { readonly friends?: { - readonly count: number | null; - } | null; - readonly username: string | null; + readonly count: number | null | undefined; + } | null | undefined; + readonly username: string | null | undefined; readonly " $fragmentSpreads": FragmentRefs<"FriendFragment">; - } | null; + } | null | undefined; }; export type ExampleQuery$rawResponse = { readonly node: { readonly __typename: string; readonly id: string; - readonly username: string | null; - } | null; + readonly username: string | null | undefined; + } | null | undefined; }; export type ExampleQuery = { rawResponse: ExampleQuery$rawResponse; @@ -52,10 +52,10 @@ import { FragmentRefs } from "relay-runtime"; export type FriendFragment$data = { readonly feedback?: { readonly id: string; - readonly name: string | null; - } | null; - readonly lastName?: string | null; - readonly name?: string | null; + readonly name: string | null | undefined; + } | null | undefined; + readonly lastName?: string | null | undefined; + readonly name?: string | null | undefined; readonly " $fragmentType": "FriendFragment"; }; export type FriendFragment$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-stream-connection.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-stream-connection.expected index 0bac05ea84f3d..3bcc711ec9bff 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-stream-connection.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-stream-connection.expected @@ -17,48 +17,48 @@ query TestDefer @raw_response_type { } } ==================================== OUTPUT =================================== -export type TestDefer$variables = {}; +export type TestDefer$variables = Record; export type TestDefer$data = { readonly node: { readonly friends?: { readonly edges: ReadonlyArray<{ readonly node: { readonly actor: { - readonly name: string | null; - } | null; - } | null; - } | null> | null; - } | null; - readonly name?: string | null; - } | null; + readonly name: string | null | undefined; + } | null | undefined; + } | null | undefined; + } | null | undefined> | null | undefined; + } | null | undefined; + readonly name?: string | null | undefined; + } | null | undefined; }; export type TestDefer$rawResponse = { readonly node: { readonly __typename: "User"; readonly friends: { readonly edges: ReadonlyArray<{ - readonly cursor: string | null; + readonly cursor: string | null | undefined; readonly node: { readonly __typename: "User"; readonly actor: { readonly __typename: string; readonly id: string; - readonly name: string | null; - } | null; + readonly name: string | null | undefined; + } | null | undefined; readonly id: string; - } | null; - } | null> | null; + } | null | undefined; + } | null | undefined> | null | undefined; readonly pageInfo: { - readonly endCursor: string | null; - readonly hasNextPage: boolean | null; - } | null; - } | null; + readonly endCursor: string | null | undefined; + readonly hasNextPage: boolean | null | undefined; + } | null | undefined; + } | null | undefined; readonly id: string; - readonly name: string | null; + readonly name: string | null | undefined; } | { readonly __typename: string; readonly id: string; - } | null; + } | null | undefined; }; export type TestDefer = { rawResponse: TestDefer$rawResponse; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-stream.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-stream.expected index 6fcf8a530242b..a5e8c9d8fab04 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-stream.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/query-with-stream.expected @@ -18,41 +18,41 @@ query TestStream @raw_response_type { } } ==================================== OUTPUT =================================== -export type TestStream$variables = {}; +export type TestStream$variables = Record; export type TestStream$data = { readonly node: { readonly friends?: { readonly edges: ReadonlyArray<{ readonly node: { readonly id: string; - } | null; - } | null> | null; - } | null; - readonly name?: string | null; - } | null; + } | null | undefined; + } | null | undefined> | null | undefined; + } | null | undefined; + readonly name?: string | null | undefined; + } | null | undefined; }; export type TestStream$rawResponse = { readonly node: { readonly __typename: "User"; readonly friends: { readonly edges: ReadonlyArray<{ - readonly cursor: string | null; + readonly cursor: string | null | undefined; readonly node: { readonly __typename: "User"; readonly id: string; - } | null; - } | null> | null; + } | null | undefined; + } | null | undefined> | null | undefined; readonly pageInfo: { - readonly endCursor: string | null; - readonly hasNextPage: boolean | null; - } | null; - } | null; + readonly endCursor: string | null | undefined; + readonly hasNextPage: boolean | null | undefined; + } | null | undefined; + } | null | undefined; readonly id: string; - readonly name: string | null; + readonly name: string | null | undefined; } | { readonly __typename: string; readonly id: string; - } | null; + } | null | undefined; }; export type TestStream = { rawResponse: TestStream$rawResponse; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/refetchable-fragment.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/refetchable-fragment.expected index 560edd1993dc9..ca84a89985df6 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/refetchable-fragment.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/refetchable-fragment.expected @@ -14,7 +14,7 @@ export type RefetchableFragmentQuery$variables = { export type RefetchableFragmentQuery$data = { readonly node: { readonly " $fragmentSpreads": FragmentRefs<"RefetchableFragment">; - } | null; + } | null | undefined; }; export type RefetchableFragmentQuery = { response: RefetchableFragmentQuery$data; @@ -24,8 +24,8 @@ export type RefetchableFragmentQuery = { import { FragmentRefs } from "relay-runtime"; export type RefetchableFragment$data = { readonly fragAndField: { - readonly uri: string | null; - } | null; + readonly uri: string | null | undefined; + } | null | undefined; readonly id: string; readonly " $fragmentType": "RefetchableFragment"; }; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/refetchable.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/refetchable.expected index 7d3074e25d8f4..0d447f017be26 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/refetchable.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/refetchable.expected @@ -14,7 +14,7 @@ export type FlowRefetchableFragmentQuery$variables = { export type FlowRefetchableFragmentQuery$data = { readonly node: { readonly " $fragmentSpreads": FragmentRefs<"FlowRefetchableFragment">; - } | null; + } | null | undefined; }; export type FlowRefetchableFragmentQuery = { response: FlowRefetchableFragmentQuery$data; @@ -24,7 +24,7 @@ export type FlowRefetchableFragmentQuery = { import { FragmentRefs } from "relay-runtime"; export type FlowRefetchableFragment$data = { readonly id: string; - readonly name?: string | null; + readonly name?: string | null | undefined; readonly " $fragmentType": "FlowRefetchableFragment"; }; export type FlowRefetchableFragment$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/regular-query-with-assignable-fragment-spread.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/regular-query-with-assignable-fragment-spread.expected new file mode 100644 index 0000000000000..cf12f1daae405 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/regular-query-with-assignable-fragment-spread.expected @@ -0,0 +1,26 @@ +==================================== INPUT ==================================== +query Foo { + viewer { + ...Assignable_viewer + } +} + +fragment Assignable_viewer on Viewer @assignable { + __typename +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type Foo$variables = Record; +export type Foo$data = { + readonly viewer: { + readonly __typename: "Viewer"; + readonly __id: string; + readonly " $fragmentSpreads": FragmentRefs<"Assignable_viewer">; + } | null | undefined; +}; +export type Foo = { + response: Foo$data; + variables: Foo$variables; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/regular-query-with-assignable-fragment-spread.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/regular-query-with-assignable-fragment-spread.graphql new file mode 100644 index 0000000000000..60ec23d06e5cd --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/regular-query-with-assignable-fragment-spread.graphql @@ -0,0 +1,9 @@ +query Foo { + viewer { + ...Assignable_viewer + } +} + +fragment Assignable_viewer on Viewer @assignable { + __typename +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-client-id-field.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-client-id-field.expected index c400b6169c6bd..e0c9b1efb3124 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-client-id-field.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-client-id-field.expected @@ -36,7 +36,7 @@ export type RelayClientIDFieldQuery$data = { readonly __typename: "User"; readonly __id: string; readonly id: string; - } | null; + } | null | undefined; readonly node: { readonly __typename: string; readonly __id: string; @@ -46,11 +46,11 @@ export type RelayClientIDFieldQuery$data = { readonly text?: { readonly __typename: "Text"; readonly __id: string; - readonly text: string | null; - } | null; - } | null; + readonly text: string | null | undefined; + } | null | undefined; + } | null | undefined; readonly id: string; - } | null; + } | null | undefined; }; export type RelayClientIDFieldQuery = { response: RelayClientIDFieldQuery$data; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-client-interface.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-client-interface.expected index 18d7d97b6e360..0e483e7a93213 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-client-interface.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-client-interface.expected @@ -25,6 +25,7 @@ interface ClientInterface { type ClientType implements ClientInterface { name: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "ClientTypeResolver" fragment_name: "ClientType__id", generated_fragment: true, inject_fragment_data: "id", import_name: "ClientType") } type ClientTypeWithNestedInterface { @@ -36,26 +37,26 @@ extend type User { pop_star_game: ClientTypeWithNestedInterface @relay_resolver(fragment_name: "PopStarNameResolverFragment_name", import_path: "PopStarNameResolver", has_output_type: true) } ==================================== OUTPUT =================================== -export type User__pop_star_game$normalization$variables = {}; +export type User__pop_star_game$normalization$variables = Record; export type User__pop_star_game$normalization$data = { readonly client_interface: { readonly __typename: "ClientType"; - readonly name: string | null; + readonly name: string | null | undefined; } | { // This will never be '%other', but we need some // value in case none of the concrete values match. readonly __typename: "%other"; - } | null; + } | null | undefined; }; export type User__pop_star_game$normalization = { response: User__pop_star_game$normalization$data; variables: User__pop_star_game$normalization$variables; }; ------------------------------------------------------------------------------- -export type User__pop_star_name$normalization$variables = {}; +export type User__pop_star_name$normalization$variables = Record; export type User__pop_star_name$normalization$data = { readonly __typename: "ClientType"; - readonly name: string | null; + readonly name: string | null | undefined; } | { // This will never be '%other', but we need some // value in case none of the concrete values match. @@ -67,10 +68,11 @@ export type User__pop_star_name$normalization = { }; ------------------------------------------------------------------------------- import type { FragmentRefs } from "relay-runtime"; +import userPopStarNameResolverType from "PopStarNameResolver"; export type Foo_user$data = { readonly poppy: { - readonly name: string | null; - } | null; + readonly name: string | null | undefined; + } | null | undefined; readonly " $fragmentType": "Foo_user"; }; export type Foo_user$key = { @@ -81,11 +83,11 @@ export type Foo_user$key = { import type { FragmentRefs } from "relay-runtime"; export type PopStarNameResolverFragment_name$data = { readonly address: { - readonly street: string | null; - } | null; - readonly name: string | null; + readonly street: string | null | undefined; + } | null | undefined; + readonly name: string | null | undefined; readonly parents: ReadonlyArray<{ - readonly lastName: string | null; + readonly lastName: string | null | undefined; }>; readonly " $fragmentType": "PopStarNameResolverFragment_name"; }; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-client-interface.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-client-interface.graphql index 15faa3afe0c30..7d40421cee19b 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-client-interface.graphql +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-client-interface.graphql @@ -24,6 +24,7 @@ interface ClientInterface { type ClientType implements ClientInterface { name: String + __relay_model_instance: RelayResolverValue @relay_resolver(import_path: "ClientTypeResolver" fragment_name: "ClientType__id", generated_fragment: true, inject_fragment_data: "id", import_name: "ClientType") } type ClientTypeWithNestedInterface { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-client-object.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-client-object.expected index 273def7834516..8ced180aaa4a6 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-client-object.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-client-object.expected @@ -27,9 +27,9 @@ extend type User { pop_star_name: ClientUser @relay_resolver(fragment_name: "PopStarNameResolverFragment_name", import_path: "PopStarNameResolver", has_output_type: true) } ==================================== OUTPUT =================================== -export type User__pop_star_name$normalization$variables = {}; +export type User__pop_star_name$normalization$variables = Record; export type User__pop_star_name$normalization$data = { - readonly name: string | null; + readonly name: string | null | undefined; }; export type User__pop_star_name$normalization = { response: User__pop_star_name$normalization$data; @@ -37,10 +37,11 @@ export type User__pop_star_name$normalization = { }; ------------------------------------------------------------------------------- import type { FragmentRefs } from "relay-runtime"; +import userPopStarNameResolverType from "PopStarNameResolver"; export type Foo_user$data = { readonly poppy: { - readonly name: string | null; - } | null; + readonly name: string | null | undefined; + } | null | undefined; readonly " $fragmentType": "Foo_user"; }; export type Foo_user$key = { @@ -51,11 +52,11 @@ export type Foo_user$key = { import type { FragmentRefs } from "relay-runtime"; export type PopStarNameResolverFragment_name$data = { readonly address: { - readonly street: string | null; - } | null; - readonly name: string | null; + readonly street: string | null | undefined; + } | null | undefined; + readonly name: string | null | undefined; readonly parents: ReadonlyArray<{ - readonly lastName: string | null; + readonly lastName: string | null | undefined; }>; readonly " $fragmentType": "PopStarNameResolverFragment_name"; }; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.expected new file mode 100644 index 0000000000000..b50a69212fe9a --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.expected @@ -0,0 +1,47 @@ +==================================== INPUT ==================================== +fragment Foo_user on User { + poppy: pop_star_name @required(action: THROW) +} + +fragment PopStarNameResolverFragment_name on User { + name + address { + street + } + parents { + lastName + } +} + +# %extensions% + +extend type User { + pop_star_name: RelayResolverValue @relay_resolver(fragment_name: "PopStarNameResolverFragment_name", import_path: "PopStarNameResolver", has_output_type: true) +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +import userPopStarNameResolverType from "PopStarNameResolver"; +export type Foo_user$data = { + readonly poppy: NonNullable>; + readonly " $fragmentType": "Foo_user"; +}; +export type Foo_user$key = { + readonly " $data"?: Foo_user$data; + readonly " $fragmentSpreads": FragmentRefs<"Foo_user">; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; +export type PopStarNameResolverFragment_name$data = { + readonly address: { + readonly street: string | null | undefined; + } | null | undefined; + readonly name: string | null | undefined; + readonly parents: ReadonlyArray<{ + readonly lastName: string | null | undefined; + }>; + readonly " $fragmentType": "PopStarNameResolverFragment_name"; +}; +export type PopStarNameResolverFragment_name$key = { + readonly " $data"?: PopStarNameResolverFragment_name$data; + readonly " $fragmentSpreads": FragmentRefs<"PopStarNameResolverFragment_name">; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.graphql new file mode 100644 index 0000000000000..89bdd35c24b41 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.graphql @@ -0,0 +1,19 @@ +fragment Foo_user on User { + poppy: pop_star_name @required(action: THROW) +} + +fragment PopStarNameResolverFragment_name on User { + name + address { + street + } + parents { + lastName + } +} + +# %extensions% + +extend type User { + pop_star_name: RelayResolverValue @relay_resolver(fragment_name: "PopStarNameResolverFragment_name", import_path: "PopStarNameResolver", has_output_type: true) +} \ No newline at end of file diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value.expected new file mode 100644 index 0000000000000..68d4f2d58dff7 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value.expected @@ -0,0 +1,47 @@ +==================================== INPUT ==================================== +fragment Foo_user on User { + poppy: pop_star_name +} + +fragment PopStarNameResolverFragment_name on User { + name + address { + street + } + parents { + lastName + } +} + +# %extensions% + +extend type User { + pop_star_name: RelayResolverValue @relay_resolver(fragment_name: "PopStarNameResolverFragment_name", import_path: "PopStarNameResolver", has_output_type: true) +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +import userPopStarNameResolverType from "PopStarNameResolver"; +export type Foo_user$data = { + readonly poppy: ReturnType | null | undefined; + readonly " $fragmentType": "Foo_user"; +}; +export type Foo_user$key = { + readonly " $data"?: Foo_user$data; + readonly " $fragmentSpreads": FragmentRefs<"Foo_user">; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; +export type PopStarNameResolverFragment_name$data = { + readonly address: { + readonly street: string | null | undefined; + } | null | undefined; + readonly name: string | null | undefined; + readonly parents: ReadonlyArray<{ + readonly lastName: string | null | undefined; + }>; + readonly " $fragmentType": "PopStarNameResolverFragment_name"; +}; +export type PopStarNameResolverFragment_name$key = { + readonly " $data"?: PopStarNameResolverFragment_name$data; + readonly " $fragmentSpreads": FragmentRefs<"PopStarNameResolverFragment_name">; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value.graphql new file mode 100644 index 0000000000000..87e2935762048 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value.graphql @@ -0,0 +1,19 @@ +fragment Foo_user on User { + poppy: pop_star_name +} + +fragment PopStarNameResolverFragment_name on User { + name + address { + street + } + parents { + lastName + } +} + +# %extensions% + +extend type User { + pop_star_name: RelayResolverValue @relay_resolver(fragment_name: "PopStarNameResolverFragment_name", import_path: "PopStarNameResolver", has_output_type: true) +} \ No newline at end of file diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-weak-client-type.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-weak-client-type.expected index 204d3ada0116d..2033bd9938c5a 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-weak-client-type.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/relay-weak-client-type.expected @@ -29,8 +29,8 @@ export type RelayWeakClientTypeQuery$variables = { }; export type RelayWeakClientTypeQuery$data = { readonly my_custom_type: { - readonly __instance: CustomClientType | null; - } | null; + readonly __instance: CustomClientType | null | undefined; + } | null | undefined; }; export type RelayWeakClientTypeQuery = { response: RelayWeakClientTypeQuery$data; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-through-inline-fragments-to-fragment.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-through-inline-fragments-to-fragment.expected index 1f35e3a18c6f2..14a4efbdbb77f 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-through-inline-fragments-to-fragment.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-through-inline-fragments-to-fragment.expected @@ -18,7 +18,7 @@ export type Foo$data = { // value in case none of the concrete values match. readonly __typename: "%other"; readonly " $fragmentType": "Foo"; -} | null; +} | null | undefined; export type Foo$key = { readonly " $data"?: Foo$data; readonly " $fragmentSpreads": FragmentRefs<"Foo">; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-to-fragment.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-to-fragment.expected index f9b05c336f7e9..aa421bf6eaa8d 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-to-fragment.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-to-fragment.expected @@ -6,10 +6,10 @@ fragment NonNullFragment on User { ==================================== OUTPUT =================================== import { FragmentRefs } from "relay-runtime"; export type NonNullFragment$data = { - readonly firstName: string | null; + readonly firstName: string | null | undefined; readonly lastName: string; readonly " $fragmentType": "NonNullFragment"; -} | null; +} | null | undefined; export type NonNullFragment$key = { readonly " $data"?: NonNullFragment$data; readonly " $fragmentSpreads": FragmentRefs<"NonNullFragment">; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-to-item-in-plural-field.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-to-item-in-plural-field.expected index b425616c395ac..902172c2ed5af 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-to-item-in-plural-field.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-to-item-in-plural-field.expected @@ -9,11 +9,11 @@ fragment NonNullFragment on User { ==================================== OUTPUT =================================== import { FragmentRefs } from "relay-runtime"; export type NonNullFragment$data = { - readonly firstName: string | null; + readonly firstName: string | null | undefined; readonly screennames: ReadonlyArray<{ - readonly name: string | null; + readonly name: string | null | undefined; readonly service: string; - } | null> | null; + } | null | undefined> | null | undefined; readonly " $fragmentType": "NonNullFragment"; }; export type NonNullFragment$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-to-query.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-to-query.expected index c7ad970e126c4..7e87d44a2bb26 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-to-query.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-to-query.expected @@ -6,13 +6,13 @@ query FooQuery { } } ==================================== OUTPUT =================================== -export type FooQuery$variables = {}; +export type FooQuery$variables = Record; export type FooQuery$data = { readonly me: { - readonly firstName: string | null; + readonly firstName: string | null | undefined; readonly lastName: string; }; -} | null; +} | null | undefined; export type FooQuery = { response: FooQuery$data; variables: FooQuery$variables; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-up-to-mutation-response.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-up-to-mutation-response.expected index 718675d78c796..c4a0802231f16 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-up-to-mutation-response.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-bubbles-up-to-mutation-response.expected @@ -9,14 +9,14 @@ mutation CommentCreateMutation($input: CommentCreateInput!) { ==================================== OUTPUT =================================== export type CommentCreateInput = { client_mutation_id: string; - feedback?: CommentfeedbackFeedback | null; - feedbackId?: string | null; + feedback?: CommentfeedbackFeedback | null | undefined; + feedbackId?: string | null | undefined; }; export type CommentfeedbackFeedback = { - comment?: FeedbackcommentComment | null; + comment?: FeedbackcommentComment | null | undefined; }; export type FeedbackcommentComment = { - feedback?: CommentfeedbackFeedback | null; + feedback?: CommentfeedbackFeedback | null | undefined; }; export type CommentCreateMutation$variables = { input: CommentCreateInput; @@ -27,7 +27,7 @@ export type CommentCreateMutation$data = { readonly id: string; }; }; -} | null; +} | null | undefined; export type CommentCreateMutation = { response: CommentCreateMutation$data; variables: CommentCreateMutation$variables; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-isolates-concrete-inline-fragments.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-isolates-concrete-inline-fragments.expected index fb9094db3bec7..2e3c34fa386b3 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-isolates-concrete-inline-fragments.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-isolates-concrete-inline-fragments.expected @@ -31,11 +31,11 @@ fragment Foo on Node { import { FragmentRefs } from "relay-runtime"; export type Bar$data = { readonly body?: { - readonly text: string | null; - } | null; + readonly text: string | null | undefined; + } | null | undefined; readonly name?: string; readonly " $fragmentType": "Bar"; -} | null; +} | null | undefined; export type Bar$key = { readonly " $data"?: Bar$data; readonly " $fragmentSpreads": FragmentRefs<"Bar">; @@ -45,8 +45,8 @@ import { FragmentRefs } from "relay-runtime"; export type Foo$data = { readonly __typename: "Comment"; readonly body: { - readonly text: string | null; - } | null; + readonly text: string | null | undefined; + } | null | undefined; readonly " $fragmentType": "Foo"; } | { readonly __typename: "User"; @@ -57,7 +57,7 @@ export type Foo$data = { // value in case none of the concrete values match. readonly __typename: "%other"; readonly " $fragmentType": "Foo"; -} | null; +} | null | undefined; export type Foo$key = { readonly " $data"?: Foo$data; readonly " $fragmentSpreads": FragmentRefs<"Foo">; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-raw-response-type.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-raw-response-type.expected index f5d5a426c1e3c..bfc7f43eb5fcc 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-raw-response-type.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-raw-response-type.expected @@ -6,18 +6,18 @@ query MyQuery @raw_response_type { } } ==================================== OUTPUT =================================== -export type MyQuery$variables = {}; +export type MyQuery$variables = Record; export type MyQuery$data = { readonly me: { readonly id: string; readonly name: string; }; -} | null; +} | null | undefined; export type MyQuery$rawResponse = { readonly me: { readonly id: string; - readonly name: string | null; - } | null; + readonly name: string | null | undefined; + } | null | undefined; }; export type MyQuery = { rawResponse: MyQuery$rawResponse; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-throw-doesnt-bubbles-to-fragment.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-throw-doesnt-bubbles-to-fragment.expected index a3f8eacc5264e..bb5af05cee544 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-throw-doesnt-bubbles-to-fragment.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-throw-doesnt-bubbles-to-fragment.expected @@ -6,7 +6,7 @@ fragment NonNullFragment on User { ==================================== OUTPUT =================================== import { FragmentRefs } from "relay-runtime"; export type NonNullFragment$data = { - readonly firstName: string | null; + readonly firstName: string | null | undefined; readonly lastName: string; readonly " $fragmentType": "NonNullFragment"; }; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-throw-doesnt-bubbles-to-query.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-throw-doesnt-bubbles-to-query.expected index e0dddc98dfa25..1e3c0589dea99 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-throw-doesnt-bubbles-to-query.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-throw-doesnt-bubbles-to-query.expected @@ -6,11 +6,11 @@ query FooQuery { } } ==================================== OUTPUT =================================== -export type FooQuery$variables = {}; +export type FooQuery$variables = Record; export type FooQuery$data = { readonly me: { - readonly firstName: string | null; - readonly lastName: string | null; + readonly firstName: string | null | undefined; + readonly lastName: string | null | undefined; }; }; export type FooQuery = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-throws-nested.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-throws-nested.expected index 6c602d0fcfdfe..e38bb23d3fd26 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-throws-nested.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-throws-nested.expected @@ -6,12 +6,12 @@ query FooQuery { } } ==================================== OUTPUT =================================== -export type FooQuery$variables = {}; +export type FooQuery$variables = Record; export type FooQuery$data = { readonly me: { - readonly firstName: string | null; + readonly firstName: string | null | undefined; readonly lastName: string; - } | null; + } | null | undefined; }; export type FooQuery = { response: FooQuery$data; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-within-aliased-inline-fragment-on-abstract.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-within-aliased-inline-fragment-on-abstract.expected index bd04176477109..54a12ddf89693 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-within-aliased-inline-fragment-on-abstract.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-within-aliased-inline-fragment-on-abstract.expected @@ -7,13 +7,13 @@ query RelayReaderNamedFragmentsTest2Query { } } ==================================== OUTPUT =================================== -export type RelayReaderNamedFragmentsTest2Query$variables = {}; +export type RelayReaderNamedFragmentsTest2Query$variables = Record; export type RelayReaderNamedFragmentsTest2Query$data = { readonly node: { readonly named_fragment: { readonly name: string; - } | null; - } | null; + } | null | undefined; + } | null | undefined; }; export type RelayReaderNamedFragmentsTest2Query = { response: RelayReaderNamedFragmentsTest2Query$data; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-within-aliased-inline-fragment.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-within-aliased-inline-fragment.expected index 9000827bfbea9..b41b0bfd41d23 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-within-aliased-inline-fragment.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required-within-aliased-inline-fragment.expected @@ -8,14 +8,14 @@ query RelayReaderNamedFragmentsTest2Query { } } ==================================== OUTPUT =================================== -export type RelayReaderNamedFragmentsTest2Query$variables = {}; +export type RelayReaderNamedFragmentsTest2Query$variables = Record; export type RelayReaderNamedFragmentsTest2Query$data = { readonly me: { readonly id: string; readonly named_fragment: { readonly name: string; - } | null; - } | null; + } | null | undefined; + } | null | undefined; }; export type RelayReaderNamedFragmentsTest2Query = { response: RelayReaderNamedFragmentsTest2Query$data; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required.expected index febb24d387312..37a1332b4cb72 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/required.expected @@ -6,12 +6,12 @@ query FooQuery { } } ==================================== OUTPUT =================================== -export type FooQuery$variables = {}; +export type FooQuery$variables = Record; export type FooQuery$data = { readonly me: { - readonly firstName: string | null; + readonly firstName: string | null | undefined; readonly lastName: string; - } | null; + } | null | undefined; }; export type FooQuery = { response: FooQuery$data; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/roots.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/roots.expected index 4facf7d74421e..0a57eca55bf6d 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/roots.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/roots.expected @@ -31,7 +31,7 @@ export type ExampleQuery$variables = { export type ExampleQuery$data = { readonly node: { readonly id: string; - } | null; + } | null | undefined; }; export type ExampleQuery = { response: ExampleQuery$data; @@ -40,14 +40,14 @@ export type ExampleQuery = { ------------------------------------------------------------------------------- export type CommentCreateInput = { client_mutation_id: string; - feedback?: CommentfeedbackFeedback | null; - feedbackId?: string | null; + feedback?: CommentfeedbackFeedback | null | undefined; + feedbackId?: string | null | undefined; }; export type CommentfeedbackFeedback = { - comment?: FeedbackcommentComment | null; + comment?: FeedbackcommentComment | null | undefined; }; export type FeedbackcommentComment = { - feedback?: CommentfeedbackFeedback | null; + feedback?: CommentfeedbackFeedback | null | undefined; }; export type TestMutation$variables = { input: CommentCreateInput; @@ -56,8 +56,8 @@ export type TestMutation$data = { readonly commentCreate: { readonly comment: { readonly id: string; - } | null; - } | null; + } | null | undefined; + } | null | undefined; }; export type TestMutation = { response: TestMutation$data; @@ -65,17 +65,17 @@ export type TestMutation = { }; ------------------------------------------------------------------------------- export type FeedbackLikeInput = { - feedbackId?: string | null; + feedbackId?: string | null | undefined; }; export type TestSubscription$variables = { - input?: FeedbackLikeInput | null; + input?: FeedbackLikeInput | null | undefined; }; export type TestSubscription$data = { readonly feedbackLikeSubscribe: { readonly feedback: { readonly id: string; - } | null; - } | null; + } | null | undefined; + } | null | undefined; }; export type TestSubscription = { response: TestSubscription$data; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/scalar-field.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/scalar-field.expected index 637736a3fa527..3bf3e56e83d42 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/scalar-field.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/scalar-field.expected @@ -17,16 +17,16 @@ export type PersonalityTraits = "CHEERFUL" | "DERISIVE" | "HELPFUL" | "SNARKY" | import { FragmentRefs } from "relay-runtime"; export type ScalarField$data = { readonly aliasedLinkedField: { - readonly aliasedField: number | null; - } | null; + readonly aliasedField: number | null | undefined; + } | null | undefined; readonly id: string; - readonly name: string | null; + readonly name: string | null | undefined; readonly screennames: ReadonlyArray<{ - readonly name: string | null; - readonly service: string | null; - } | null> | null; - readonly traits: ReadonlyArray | null; - readonly websites: ReadonlyArray | null; + readonly name: string | null | undefined; + readonly service: string | null | undefined; + } | null | undefined> | null | undefined; + readonly traits: ReadonlyArray | null | undefined; + readonly websites: ReadonlyArray | null | undefined; readonly " $fragmentType": "ScalarField"; }; export type ScalarField$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_in_raw_response.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_in_raw_response.expected new file mode 100644 index 0000000000000..97e4ea98c0721 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_in_raw_response.expected @@ -0,0 +1,50 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +query MyQuery @raw_response_type @throwOnFieldError { + opera { + composer { + name + } + cast { + singer { + name + } + character + } + } +} +==================================== OUTPUT =================================== +export type MyQuery$variables = Record; +export type MyQuery$data = { + readonly opera: { + readonly cast: ReadonlyArray<{ + readonly character: string; + readonly singer: { + readonly name: string | null | undefined; + }; + }>; + readonly composer: { + readonly name: string | null | undefined; + }; + } | null | undefined; +}; +export type MyQuery$rawResponse = { + readonly opera?: { + readonly cast: ReadonlyArray<{ + readonly character: string | null | undefined; + readonly singer: { + readonly id: string; + readonly name: string | null | undefined; + } | null | undefined; + } | null | undefined> | null | undefined; + readonly composer: { + readonly id: string; + readonly name: string | null | undefined; + } | null | undefined; + } | null | undefined; +}; +export type MyQuery = { + rawResponse: MyQuery$rawResponse; + response: MyQuery$data; + variables: MyQuery$variables; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_in_raw_response.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_in_raw_response.graphql new file mode 100644 index 0000000000000..9467ca18b3f1b --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_in_raw_response.graphql @@ -0,0 +1,14 @@ +# relay:experimental_emit_semantic_nullability_types +query MyQuery @raw_response_type @throwOnFieldError { + opera { + composer { + name + } + cast { + singer { + name + } + character + } + } +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_items_in_matrix.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_items_in_matrix.expected new file mode 100644 index 0000000000000..fa9bf35387cd5 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_items_in_matrix.expected @@ -0,0 +1,21 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on Screen @throwOnFieldError { + pixels +} + +%extensions% + +type Screen { + pixels: [[Int]] @semanticNonNull(levels: [2]) +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type MyFragment$data = { + readonly pixels: ReadonlyArray | null | undefined> | null | undefined; + readonly " $fragmentType": "MyFragment"; +}; +export type MyFragment$key = { + readonly " $data"?: MyFragment$data; + readonly " $fragmentSpreads": FragmentRefs<"MyFragment">; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_items_in_matrix.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_items_in_matrix.graphql new file mode 100644 index 0000000000000..b47dcf2474175 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_items_in_matrix.graphql @@ -0,0 +1,10 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on Screen @throwOnFieldError { + pixels +} + +%extensions% + +type Screen { + pixels: [[Int]] @semanticNonNull(levels: [2]) +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_liked_field_resolver.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_liked_field_resolver.expected new file mode 100644 index 0000000000000..81f1333ed8e5b --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_liked_field_resolver.expected @@ -0,0 +1,53 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + best_friend @waterfall { + name + } +} + +%extensions% + +type ClientUser { + best_friend: User @semanticNonNull @relay_resolver( + import_path: "./foo/bar.js" + ) +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type ClientEdgeQuery_MyFragment_best_friend$variables = { + id: string; +}; +export type ClientEdgeQuery_MyFragment_best_friend$data = { + readonly node: { + readonly " $fragmentSpreads": FragmentRefs<"RefetchableClientEdgeQuery_MyFragment_best_friend">; + } | null | undefined; +}; +export type ClientEdgeQuery_MyFragment_best_friend = { + response: ClientEdgeQuery_MyFragment_best_friend$data; + variables: ClientEdgeQuery_MyFragment_best_friend$variables; +}; +------------------------------------------------------------------------------- +import { FragmentRefs, DataID } from "relay-runtime"; +import clientUserBestFriendResolverType from "bar"; +export type MyFragment$data = { + readonly best_friend: { + readonly name: string | null | undefined; + }; + readonly " $fragmentType": "MyFragment"; +}; +export type MyFragment$key = { + readonly " $data"?: MyFragment$data; + readonly " $fragmentSpreads": FragmentRefs<"MyFragment">; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; +export type RefetchableClientEdgeQuery_MyFragment_best_friend$data = { + readonly id: string; + readonly name: string | null | undefined; + readonly " $fragmentType": "RefetchableClientEdgeQuery_MyFragment_best_friend"; +}; +export type RefetchableClientEdgeQuery_MyFragment_best_friend$key = { + readonly " $data"?: RefetchableClientEdgeQuery_MyFragment_best_friend$data; + readonly " $fragmentSpreads": FragmentRefs<"RefetchableClientEdgeQuery_MyFragment_best_friend">; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_liked_field_resolver.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_liked_field_resolver.graphql new file mode 100644 index 0000000000000..cf8fb71f1bd2e --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_liked_field_resolver.graphql @@ -0,0 +1,14 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + best_friend @waterfall { + name + } +} + +%extensions% + +type ClientUser { + best_friend: User @semanticNonNull @relay_resolver( + import_path: "./foo/bar.js" + ) +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_liked_field_weak_resolver.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_liked_field_weak_resolver.expected new file mode 100644 index 0000000000000..90b043378a205 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_liked_field_weak_resolver.expected @@ -0,0 +1,42 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + blob { + data + } +} + +%extensions% + +type ClientUser { + blob: Blob @semanticNonNull @relay_resolver( + import_path: "./foo/bar.js" + has_output_type: true + ) +} + +type Blob { + data: String +} +==================================== OUTPUT =================================== +export type ClientUser__blob$normalization$variables = Record; +export type ClientUser__blob$normalization$data = { + readonly data: string | null | undefined; +}; +export type ClientUser__blob$normalization = { + response: ClientUser__blob$normalization$data; + variables: ClientUser__blob$normalization$variables; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; +import clientUserBlobResolverType from "bar"; +export type MyFragment$data = { + readonly blob: { + readonly data: string | null | undefined; + }; + readonly " $fragmentType": "MyFragment"; +}; +export type MyFragment$key = { + readonly " $data"?: MyFragment$data; + readonly " $fragmentSpreads": FragmentRefs<"MyFragment">; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_liked_field_weak_resolver.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_liked_field_weak_resolver.graphql new file mode 100644 index 0000000000000..195659e6e9ef8 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_liked_field_weak_resolver.graphql @@ -0,0 +1,19 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + blob { + data + } +} + +%extensions% + +type ClientUser { + blob: Blob @semanticNonNull @relay_resolver( + import_path: "./foo/bar.js" + has_output_type: true + ) +} + +type Blob { + data: String +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_linked_field.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_linked_field.expected new file mode 100644 index 0000000000000..25da697d6fa9b --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_linked_field.expected @@ -0,0 +1,25 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + best_friend { + name + } +} + +%extensions% + +type ClientUser { + best_friend: User @semanticNonNull +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type MyFragment$data = { + readonly best_friend: { + readonly name: string | null | undefined; + }; + readonly " $fragmentType": "MyFragment"; +}; +export type MyFragment$key = { + readonly " $data"?: MyFragment$data; + readonly " $fragmentSpreads": FragmentRefs<"MyFragment">; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_linked_field.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_linked_field.graphql new file mode 100644 index 0000000000000..ef205348eb5ba --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_linked_field.graphql @@ -0,0 +1,12 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + best_friend { + name + } +} + +%extensions% + +type ClientUser { + best_friend: User @semanticNonNull +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_list_and_list_item.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_list_and_list_item.expected new file mode 100644 index 0000000000000..4a5d91b82a363 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_list_and_list_item.expected @@ -0,0 +1,21 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + favorite_numbers +} + +%extensions% + +type ClientUser { + favorite_numbers: [Int] @semanticNonNull(levels: [0, 1]) +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type MyFragment$data = { + readonly favorite_numbers: ReadonlyArray; + readonly " $fragmentType": "MyFragment"; +}; +export type MyFragment$key = { + readonly " $data"?: MyFragment$data; + readonly " $fragmentSpreads": FragmentRefs<"MyFragment">; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_list_and_list_item.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_list_and_list_item.graphql new file mode 100644 index 0000000000000..9a0128042baa9 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_list_and_list_item.graphql @@ -0,0 +1,10 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + favorite_numbers +} + +%extensions% + +type ClientUser { + favorite_numbers: [Int] @semanticNonNull(levels: [0, 1]) +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_list_item.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_list_item.expected new file mode 100644 index 0000000000000..0481717c4d78e --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_list_item.expected @@ -0,0 +1,21 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + favorite_numbers +} + +%extensions% + +type ClientUser { + favorite_numbers: [Int] @semanticNonNull(levels: [1]) +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type MyFragment$data = { + readonly favorite_numbers: ReadonlyArray | null | undefined; + readonly " $fragmentType": "MyFragment"; +}; +export type MyFragment$key = { + readonly " $data"?: MyFragment$data; + readonly " $fragmentSpreads": FragmentRefs<"MyFragment">; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_list_item.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_list_item.graphql new file mode 100644 index 0000000000000..e9c0a45b573ad --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_list_item.graphql @@ -0,0 +1,10 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + favorite_numbers +} + +%extensions% + +type ClientUser { + favorite_numbers: [Int] @semanticNonNull(levels: [1]) +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar.expected new file mode 100644 index 0000000000000..597a6276b77fb --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar.expected @@ -0,0 +1,21 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + name +} + +%extensions% + +type ClientUser { + name: String @semanticNonNull +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type MyFragment$data = { + readonly name: string; + readonly " $fragmentType": "MyFragment"; +}; +export type MyFragment$key = { + readonly " $data"?: MyFragment$data; + readonly " $fragmentSpreads": FragmentRefs<"MyFragment">; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar.graphql new file mode 100644 index 0000000000000..4ff85acbc9368 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar.graphql @@ -0,0 +1,10 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + name +} + +%extensions% + +type ClientUser { + name: String @semanticNonNull +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar_feature_disabled.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar_feature_disabled.expected new file mode 100644 index 0000000000000..5e41d0946e1e4 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar_feature_disabled.expected @@ -0,0 +1,21 @@ +==================================== INPUT ==================================== +# Note: No comment here enabling `experimental_emit_semantic_nullability_types` +fragment MyFragment on ClientUser { + name +} + +%extensions% + +type ClientUser { + name: String @semanticNonNull +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type MyFragment$data = { + readonly name: string | null | undefined; + readonly " $fragmentType": "MyFragment"; +}; +export type MyFragment$key = { + readonly " $data"?: MyFragment$data; + readonly " $fragmentSpreads": FragmentRefs<"MyFragment">; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar_feature_disabled.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar_feature_disabled.graphql new file mode 100644 index 0000000000000..c0d5cee2e7273 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar_feature_disabled.graphql @@ -0,0 +1,10 @@ +# Note: No comment here enabling `experimental_emit_semantic_nullability_types` +fragment MyFragment on ClientUser { + name +} + +%extensions% + +type ClientUser { + name: String @semanticNonNull +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar_resolver.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar_resolver.expected new file mode 100644 index 0000000000000..bd066858df35f --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar_resolver.expected @@ -0,0 +1,24 @@ +==================================== INPUT ==================================== +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + name +} + +%extensions% + +type ClientUser { + name: String @semanticNonNull @relay_resolver( + import_path: "./foo/bar.js" + ) +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +import clientUserNameResolverType from "bar"; +export type MyFragment$data = { + readonly name: NonNullable>; + readonly " $fragmentType": "MyFragment"; +}; +export type MyFragment$key = { + readonly " $data"?: MyFragment$data; + readonly " $fragmentSpreads": FragmentRefs<"MyFragment">; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar_resolver.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar_resolver.graphql new file mode 100644 index 0000000000000..ad5aacd8f6e8c --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/semantic_non_null_scalar_resolver.graphql @@ -0,0 +1,12 @@ +# relay:experimental_emit_semantic_nullability_types +fragment MyFragment on ClientUser @throwOnFieldError { + name +} + +%extensions% + +type ClientUser { + name: String @semanticNonNull @relay_resolver( + import_path: "./foo/bar.js" + ) +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/simple-use-import-type-syntax.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/simple-use-import-type-syntax.expected index 76fc0802087db..57de9dcaeb0a7 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/simple-use-import-type-syntax.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/simple-use-import-type-syntax.expected @@ -12,12 +12,12 @@ fragment LinkedField on User { ==================================== OUTPUT =================================== import type { FragmentRefs } from "relay-runtime"; export type LinkedField$data = { - readonly name: string | null; + readonly name: string | null | undefined; readonly profilePicture: { - readonly height: number | null; - readonly uri: string | null; - readonly width: number | null; - } | null; + readonly height: number | null | undefined; + readonly uri: string | null | undefined; + readonly width: number | null | undefined; + } | null | undefined; readonly " $fragmentType": "LinkedField"; }; export type LinkedField$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/simple.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/simple.expected index 80bf7da3fc268..7a5818a49a825 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/simple.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/simple.expected @@ -10,12 +10,12 @@ fragment LinkedField on User { ==================================== OUTPUT =================================== import { FragmentRefs } from "relay-runtime"; export type LinkedField$data = { - readonly name: string | null; + readonly name: string | null | undefined; readonly profilePicture: { - readonly height: number | null; - readonly uri: string | null; - readonly width: number | null; - } | null; + readonly height: number | null | undefined; + readonly uri: string | null | undefined; + readonly width: number | null | undefined; + } | null | undefined; readonly " $fragmentType": "LinkedField"; }; export type LinkedField$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/typename-inside-with-overlapping-fields.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/typename-inside-with-overlapping-fields.expected index 023c13577e336..de173d8596f6e 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/typename-inside-with-overlapping-fields.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/typename-inside-with-overlapping-fields.expected @@ -21,19 +21,19 @@ export type TypenameInsideWithOverlappingFields$data = { readonly actor: { readonly __typename: "Page"; readonly id: string; - readonly name: string | null; + readonly name: string | null | undefined; } | { readonly __typename: "User"; readonly id: string; - readonly name: string | null; + readonly name: string | null | undefined; readonly profile_picture: { - readonly uri: string | null; - } | null; + readonly uri: string | null | undefined; + } | null | undefined; } | { // This will never be '%other', but we need some // value in case none of the concrete values match. readonly __typename: "%other"; - } | null; + } | null | undefined; readonly " $fragmentType": "TypenameInsideWithOverlappingFields"; }; export type TypenameInsideWithOverlappingFields$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/typename-on-union.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/typename-on-union.expected index 21ea5e1291631..27a8ce655eeaa 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/typename-on-union.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/typename-on-union.expected @@ -83,11 +83,11 @@ fragment TypenameAliases on Actor { import { FragmentRefs } from "relay-runtime"; export type TypenameAlias$data = { readonly _typeAlias: "User"; - readonly firstName: string | null; + readonly firstName: string | null | undefined; readonly " $fragmentType": "TypenameAlias"; } | { readonly _typeAlias: "Page"; - readonly username: string | null; + readonly username: string | null | undefined; readonly " $fragmentType": "TypenameAlias"; } | { // This will never be '%other', but we need some @@ -104,12 +104,12 @@ import { FragmentRefs } from "relay-runtime"; export type TypenameAliases$data = { readonly _typeAlias1: "User"; readonly _typeAlias2: "User"; - readonly firstName: string | null; + readonly firstName: string | null | undefined; readonly " $fragmentType": "TypenameAliases"; } | { readonly _typeAlias1: "Page"; readonly _typeAlias2: "Page"; - readonly username: string | null; + readonly username: string | null | undefined; readonly " $fragmentType": "TypenameAliases"; } | { // This will never be '%other', but we need some @@ -128,11 +128,11 @@ export type TypenameAliases$key = { import { FragmentRefs } from "relay-runtime"; export type TypenameInside$data = { readonly __typename: "Page"; - readonly username: string | null; + readonly username: string | null | undefined; readonly " $fragmentType": "TypenameInside"; } | { readonly __typename: "User"; - readonly firstName: string | null; + readonly firstName: string | null | undefined; readonly " $fragmentType": "TypenameInside"; } | { // This will never be '%other', but we need some @@ -148,11 +148,11 @@ export type TypenameInside$key = { import { FragmentRefs } from "relay-runtime"; export type TypenameOutside$data = { readonly __typename: "Page"; - readonly username: string | null; + readonly username: string | null | undefined; readonly " $fragmentType": "TypenameOutside"; } | { readonly __typename: "User"; - readonly firstName: string | null; + readonly firstName: string | null | undefined; readonly " $fragmentType": "TypenameOutside"; } | { // This will never be '%other', but we need some @@ -169,12 +169,12 @@ import { FragmentRefs } from "relay-runtime"; export type TypenameOutsideWithAbstractType$data = { readonly __typename: string; readonly address?: { - readonly city: string | null; - readonly country: string | null; - readonly street?: string | null; - } | null; - readonly firstName?: string | null; - readonly username?: string | null; + readonly city: string | null | undefined; + readonly country: string | null | undefined; + readonly street?: string | null | undefined; + } | null | undefined; + readonly firstName?: string | null | undefined; + readonly username?: string | null | undefined; readonly " $fragmentType": "TypenameOutsideWithAbstractType"; }; export type TypenameOutsideWithAbstractType$key = { @@ -185,9 +185,9 @@ export type TypenameOutsideWithAbstractType$key = { import { FragmentRefs } from "relay-runtime"; export type TypenameWithCommonSelections$data = { readonly __typename: string; - readonly firstName?: string | null; - readonly name: string | null; - readonly username?: string | null; + readonly firstName?: string | null | undefined; + readonly name: string | null | undefined; + readonly username?: string | null | undefined; readonly " $fragmentType": "TypenameWithCommonSelections"; }; export type TypenameWithCommonSelections$key = { @@ -198,7 +198,7 @@ export type TypenameWithCommonSelections$key = { import { FragmentRefs } from "relay-runtime"; export type TypenameWithoutSpreads$data = { readonly __typename: "User"; - readonly firstName: string | null; + readonly firstName: string | null | undefined; readonly " $fragmentType": "TypenameWithoutSpreads"; }; export type TypenameWithoutSpreads$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/unmasked-fragment-spreads.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/unmasked-fragment-spreads.expected index 795c86daf8409..ebc9b9eb28d16 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/unmasked-fragment-spreads.expected +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/unmasked-fragment-spreads.expected @@ -28,8 +28,8 @@ fragment AnotherRecursiveFragment on Image { ==================================== OUTPUT =================================== import { FragmentRefs } from "relay-runtime"; export type AnotherRecursiveFragment$data = { - readonly height: number | null; - readonly uri: string | null; + readonly height: number | null | undefined; + readonly uri: string | null | undefined; readonly " $fragmentType": "AnotherRecursiveFragment"; }; export type AnotherRecursiveFragment$key = { @@ -39,8 +39,8 @@ export type AnotherRecursiveFragment$key = { ------------------------------------------------------------------------------- import { FragmentRefs } from "relay-runtime"; export type PhotoFragment$data = { - readonly uri: string | null; - readonly width: number | null; + readonly uri: string | null | undefined; + readonly width: number | null | undefined; readonly " $fragmentType": "PhotoFragment"; }; export type PhotoFragment$key = { @@ -50,8 +50,8 @@ export type PhotoFragment$key = { ------------------------------------------------------------------------------- import { FragmentRefs } from "relay-runtime"; export type RecursiveFragment$data = { - readonly uri: string | null; - readonly width: number | null; + readonly uri: string | null | undefined; + readonly width: number | null | undefined; }; export type RecursiveFragment$key = { readonly " $data"?: RecursiveFragment$data; @@ -61,11 +61,11 @@ export type RecursiveFragment$key = { import { FragmentRefs } from "relay-runtime"; export type UserProfile$data = { readonly profilePicture: { - readonly height: number | null; - readonly uri: string | null; - readonly width: number | null; + readonly height: number | null | undefined; + readonly uri: string | null | undefined; + readonly width: number | null | undefined; readonly " $fragmentSpreads": FragmentRefs<"PhotoFragment">; - } | null; + } | null | undefined; readonly " $fragmentType": "UserProfile"; }; export type UserProfile$key = { diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread-and-regular-spread.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread-and-regular-spread.expected new file mode 100644 index 0000000000000..0e9e58742bb95 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread-and-regular-spread.expected @@ -0,0 +1,48 @@ +==================================== INPUT ==================================== +query updatableFragmentSpreadAndRegularSpreadQuery { + me { + ...updatableFragmentSpreadAndRegularSpread_updatable_user + ...updatableFragmentSpreadAndRegularSpread_user + } +} + +fragment updatableFragmentSpreadAndRegularSpread_updatable_user on User @updatable { + firstName +} + +fragment updatableFragmentSpreadAndRegularSpread_user on User { + firstName +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type updatableFragmentSpreadAndRegularSpreadQuery$variables = Record; +export type updatableFragmentSpreadAndRegularSpreadQuery$data = { + readonly me: { + readonly $updatableFragmentSpreads: FragmentRefs<"updatableFragmentSpreadAndRegularSpread_updatable_user">; + readonly " $fragmentSpreads": FragmentRefs<"updatableFragmentSpreadAndRegularSpread_user">; + } | null | undefined; +}; +export type updatableFragmentSpreadAndRegularSpreadQuery = { + response: updatableFragmentSpreadAndRegularSpreadQuery$data; + variables: updatableFragmentSpreadAndRegularSpreadQuery$variables; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; +export type updatableFragmentSpreadAndRegularSpread_updatable_user$data = { + firstName: string | null | undefined; + readonly " $fragmentType": "updatableFragmentSpreadAndRegularSpread_updatable_user"; +}; +export type updatableFragmentSpreadAndRegularSpread_updatable_user$key = { + readonly " $data"?: updatableFragmentSpreadAndRegularSpread_updatable_user$data; + readonly $updatableFragmentSpreads: FragmentRefs<"updatableFragmentSpreadAndRegularSpread_updatable_user">; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; +export type updatableFragmentSpreadAndRegularSpread_user$data = { + readonly firstName: string | null | undefined; + readonly " $fragmentType": "updatableFragmentSpreadAndRegularSpread_user"; +}; +export type updatableFragmentSpreadAndRegularSpread_user$key = { + readonly " $data"?: updatableFragmentSpreadAndRegularSpread_user$data; + readonly " $fragmentSpreads": FragmentRefs<"updatableFragmentSpreadAndRegularSpread_user">; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread-and-regular-spread.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread-and-regular-spread.graphql new file mode 100644 index 0000000000000..4871b51ff122c --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread-and-regular-spread.graphql @@ -0,0 +1,14 @@ +query updatableFragmentSpreadAndRegularSpreadQuery { + me { + ...updatableFragmentSpreadAndRegularSpread_updatable_user + ...updatableFragmentSpreadAndRegularSpread_user + } +} + +fragment updatableFragmentSpreadAndRegularSpread_updatable_user on User @updatable { + firstName +} + +fragment updatableFragmentSpreadAndRegularSpread_user on User { + firstName +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread-multiple.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread-multiple.expected new file mode 100644 index 0000000000000..62277341f32d3 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread-multiple.expected @@ -0,0 +1,47 @@ +==================================== INPUT ==================================== +query updatableFragmentSpreadAndRegularSpreadQuery { + me { + ...updatableFragmentSpreadAndRegularSpread_updatable_user + ...updatableFragmentSpreadAndRegularSpread_2_updatable_user + } +} + +fragment updatableFragmentSpreadAndRegularSpread_updatable_user on User @updatable { + firstName +} + +fragment updatableFragmentSpreadAndRegularSpread_2_updatable_user on User @updatable { + firstName +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type updatableFragmentSpreadAndRegularSpreadQuery$variables = Record; +export type updatableFragmentSpreadAndRegularSpreadQuery$data = { + readonly me: { + readonly $updatableFragmentSpreads: FragmentRefs<"updatableFragmentSpreadAndRegularSpread_2_updatable_user" | "updatableFragmentSpreadAndRegularSpread_updatable_user">; + } | null | undefined; +}; +export type updatableFragmentSpreadAndRegularSpreadQuery = { + response: updatableFragmentSpreadAndRegularSpreadQuery$data; + variables: updatableFragmentSpreadAndRegularSpreadQuery$variables; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; +export type updatableFragmentSpreadAndRegularSpread_2_updatable_user$data = { + firstName: string | null | undefined; + readonly " $fragmentType": "updatableFragmentSpreadAndRegularSpread_2_updatable_user"; +}; +export type updatableFragmentSpreadAndRegularSpread_2_updatable_user$key = { + readonly " $data"?: updatableFragmentSpreadAndRegularSpread_2_updatable_user$data; + readonly $updatableFragmentSpreads: FragmentRefs<"updatableFragmentSpreadAndRegularSpread_2_updatable_user">; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; +export type updatableFragmentSpreadAndRegularSpread_updatable_user$data = { + firstName: string | null | undefined; + readonly " $fragmentType": "updatableFragmentSpreadAndRegularSpread_updatable_user"; +}; +export type updatableFragmentSpreadAndRegularSpread_updatable_user$key = { + readonly " $data"?: updatableFragmentSpreadAndRegularSpread_updatable_user$data; + readonly $updatableFragmentSpreads: FragmentRefs<"updatableFragmentSpreadAndRegularSpread_updatable_user">; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread-multiple.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread-multiple.graphql new file mode 100644 index 0000000000000..c03581510b31e --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread-multiple.graphql @@ -0,0 +1,14 @@ +query updatableFragmentSpreadAndRegularSpreadQuery { + me { + ...updatableFragmentSpreadAndRegularSpread_updatable_user + ...updatableFragmentSpreadAndRegularSpread_2_updatable_user + } +} + +fragment updatableFragmentSpreadAndRegularSpread_updatable_user on User @updatable { + firstName +} + +fragment updatableFragmentSpreadAndRegularSpread_2_updatable_user on User @updatable { + firstName +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread.expected new file mode 100644 index 0000000000000..ea3514b56b1ae --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread.expected @@ -0,0 +1,32 @@ +==================================== INPUT ==================================== +query updatableFragmentSpreadQuery { + me { + ...updatableFragmentSpread_updatable_user + } +} + +fragment updatableFragmentSpread_updatable_user on User @updatable { + firstName +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type updatableFragmentSpreadQuery$variables = Record; +export type updatableFragmentSpreadQuery$data = { + readonly me: { + readonly $updatableFragmentSpreads: FragmentRefs<"updatableFragmentSpread_updatable_user">; + } | null | undefined; +}; +export type updatableFragmentSpreadQuery = { + response: updatableFragmentSpreadQuery$data; + variables: updatableFragmentSpreadQuery$variables; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; +export type updatableFragmentSpread_updatable_user$data = { + firstName: string | null | undefined; + readonly " $fragmentType": "updatableFragmentSpread_updatable_user"; +}; +export type updatableFragmentSpread_updatable_user$key = { + readonly " $data"?: updatableFragmentSpread_updatable_user$data; + readonly $updatableFragmentSpreads: FragmentRefs<"updatableFragmentSpread_updatable_user">; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread.graphql new file mode 100644 index 0000000000000..1ae2e6b57968e --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-fragment-spread.graphql @@ -0,0 +1,9 @@ +query updatableFragmentSpreadQuery { + me { + ...updatableFragmentSpread_updatable_user + } +} + +fragment updatableFragmentSpread_updatable_user on User @updatable { + firstName +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragment-plural.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragment-plural.expected new file mode 100644 index 0000000000000..bd436449a8720 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragment-plural.expected @@ -0,0 +1,40 @@ +==================================== INPUT ==================================== +query UpdatableQuery @updatable { + node(id: 4) { + ... on User { + __typename + parents { + ...Assignable_user + } + } + } +} + +fragment Assignable_user on User @assignable { + __typename +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type UpdatableQuery$variables = Record; +export type UpdatableQuery$data = { + get node(): { + readonly __typename: "User"; + get parents(): ReadonlyArray>; + set parents(value: ReadonlyArray<{ + readonly __typename: "User"; + readonly __id: string; + readonly " $fragmentSpreads": FragmentRefs<"Assignable_user">; + }>); + } | { + // This will never be '%other', but we need some + // value in case none of the concrete values match. + readonly __typename: "%other"; + } | null | undefined; + set node(value: null | undefined); +}; +export type UpdatableQuery = { + response: UpdatableQuery$data; + variables: UpdatableQuery$variables; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragment-plural.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragment-plural.graphql new file mode 100644 index 0000000000000..8f05dfc2e9a16 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragment-plural.graphql @@ -0,0 +1,14 @@ +query UpdatableQuery @updatable { + node(id: 4) { + ... on User { + __typename + parents { + ...Assignable_user + } + } + } +} + +fragment Assignable_user on User @assignable { + __typename +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragment.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragment.expected new file mode 100644 index 0000000000000..f72fe93751e1c --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragment.expected @@ -0,0 +1,27 @@ +==================================== INPUT ==================================== +query UpdatableQuery @updatable { + viewer { + ...Assignable_viewer + } +} + +fragment Assignable_viewer on Viewer @assignable { + __typename +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type UpdatableQuery$variables = Record; +export type UpdatableQuery$data = { + get viewer(): Record | null | undefined; + set viewer(value: { + readonly __typename: "Viewer"; + readonly __id: string; + readonly " $fragmentSpreads": FragmentRefs<"Assignable_viewer">; + } | null | undefined); +}; +export type UpdatableQuery = { + response: UpdatableQuery$data; + variables: UpdatableQuery$variables; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragment.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragment.graphql new file mode 100644 index 0000000000000..f44ae84ada939 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragment.graphql @@ -0,0 +1,9 @@ +query UpdatableQuery @updatable { + viewer { + ...Assignable_viewer + } +} + +fragment Assignable_viewer on Viewer @assignable { + __typename +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragments-within-narrowing.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragments-within-narrowing.expected new file mode 100644 index 0000000000000..93d66cbd586cb --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragments-within-narrowing.expected @@ -0,0 +1,57 @@ +==================================== INPUT ==================================== +query UpdatableQuery @updatable { + me { + actor { + ... on User { + __typename + ...Assignable_user + } + ... on Page { + __typename + ...Assignable_page + } + } + } +} + +fragment Assignable_user on User @assignable { + __typename +} + +fragment Assignable_page on Page @assignable { + __typename +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type UpdatableQuery$variables = Record; +export type UpdatableQuery$data = { + get me(): { + get actor(): { + readonly __typename: "Page"; + } | { + readonly __typename: "User"; + } | { + // This will never be '%other', but we need some + // value in case none of the concrete values match. + readonly __typename: "%other"; + } | null | undefined; + set actor(value: { + readonly __typename: "Page"; + readonly __id: string; + readonly " $fragmentSpreads": FragmentRefs<"Assignable_page">; + } | { + readonly __typename: "User"; + readonly __id: string; + readonly " $fragmentSpreads": FragmentRefs<"Assignable_user">; + } | null | undefined); + } | null | undefined; + set me(value: null | undefined); +}; +export type UpdatableQuery = { + response: UpdatableQuery$data; + variables: UpdatableQuery$variables; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragments-within-narrowing.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragments-within-narrowing.graphql new file mode 100644 index 0000000000000..1bec07743881e --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-assignable-fragments-within-narrowing.graphql @@ -0,0 +1,23 @@ +query UpdatableQuery @updatable { + me { + actor { + ... on User { + __typename + ...Assignable_user + } + ... on Page { + __typename + ...Assignable_page + } + } + } +} + +fragment Assignable_user on User @assignable { + __typename +} + +fragment Assignable_page on Page @assignable { + __typename +} + diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-multiple-assignable-fragments.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-multiple-assignable-fragments.expected new file mode 100644 index 0000000000000..49a34fd7b8021 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-multiple-assignable-fragments.expected @@ -0,0 +1,48 @@ +==================================== INPUT ==================================== +query UpdatableQuery @updatable { + me { + actor { + ...Assignable_page + ...Assignable_node + lastName + name + } + } +} + +fragment Assignable_node on Node @assignable { + __typename +} + +fragment Assignable_page on Page @assignable { + __typename +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type UpdatableQuery$variables = Record; +export type UpdatableQuery$data = { + get me(): { + get actor(): { + lastName: string | null | undefined; + name: string | null | undefined; + } | null | undefined; + set actor(value: { + readonly __typename: "Page"; + readonly __id: string; + readonly " $fragmentSpreads": FragmentRefs<"Assignable_page">; + } | { + readonly __id: string; + readonly __isAssignable_node: string; + readonly " $fragmentSpreads": FragmentRefs<"Assignable_node">; + } | null | undefined); + } | null | undefined; + set me(value: null | undefined); +}; +export type UpdatableQuery = { + response: UpdatableQuery$data; + variables: UpdatableQuery$variables; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-multiple-assignable-fragments.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-multiple-assignable-fragments.graphql new file mode 100644 index 0000000000000..932ebc81d75d4 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-multiple-assignable-fragments.graphql @@ -0,0 +1,18 @@ +query UpdatableQuery @updatable { + me { + actor { + ...Assignable_page + ...Assignable_node + lastName + name + } + } +} + +fragment Assignable_node on Node @assignable { + __typename +} + +fragment Assignable_page on Page @assignable { + __typename +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-plural-field-no-spreads.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-plural-field-no-spreads.expected new file mode 100644 index 0000000000000..8502972638dd2 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-plural-field-no-spreads.expected @@ -0,0 +1,18 @@ +==================================== INPUT ==================================== +query UpdatableQuery @updatable { + nodes(ids: [4]) { + id + } +} +==================================== OUTPUT =================================== +export type UpdatableQuery$variables = Record; +export type UpdatableQuery$data = { + get nodes(): ReadonlyArray<{ + readonly id: string; + } | null | undefined> | null | undefined; + set nodes(value: []); +}; +export type UpdatableQuery = { + response: UpdatableQuery$data; + variables: UpdatableQuery$variables; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-plural-field-no-spreads.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-plural-field-no-spreads.graphql new file mode 100644 index 0000000000000..863d80dca9758 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-plural-field-no-spreads.graphql @@ -0,0 +1,6 @@ +query UpdatableQuery @updatable { + nodes(ids: [4]) { + id + } +} + diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-plural-field-with-spreads.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-plural-field-with-spreads.expected new file mode 100644 index 0000000000000..e93157db417ee --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-plural-field-with-spreads.expected @@ -0,0 +1,27 @@ +==================================== INPUT ==================================== +query UpdatableQuery @updatable { + nodes(ids: [4]) { + ...Updatable_user + } +} + +fragment Updatable_user on User @assignable { + __typename +} +==================================== OUTPUT =================================== +import { FragmentRefs } from "relay-runtime"; +export type UpdatableQuery$variables = Record; +export type UpdatableQuery$data = { + get nodes(): ReadonlyArray | null | undefined> | null | undefined; + set nodes(value: ReadonlyArray<{ + readonly __typename: "User"; + readonly __id: string; + readonly " $fragmentSpreads": FragmentRefs<"Updatable_user">; + }>); +}; +export type UpdatableQuery = { + response: UpdatableQuery$data; + variables: UpdatableQuery$variables; +}; +------------------------------------------------------------------------------- +import { FragmentRefs } from "relay-runtime"; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-plural-field-with-spreads.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-plural-field-with-spreads.graphql new file mode 100644 index 0000000000000..e7b7071b98e24 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-plural-field-with-spreads.graphql @@ -0,0 +1,9 @@ +query UpdatableQuery @updatable { + nodes(ids: [4]) { + ...Updatable_user + } +} + +fragment Updatable_user on User @assignable { + __typename +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-special-fields.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-special-fields.expected new file mode 100644 index 0000000000000..a291226e1731e --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-special-fields.expected @@ -0,0 +1,28 @@ +==================================== INPUT ==================================== +query UpdatableQuery @updatable { + me { + id + aliased_id: id + __typename + aliased_typename: __typename + __id + aliased_double_under_id: __id + } +} +==================================== OUTPUT =================================== +export type UpdatableQuery$variables = Record; +export type UpdatableQuery$data = { + get me(): { + readonly __typename: "User"; + readonly __id: string; + readonly aliased_double_under_id: string; + readonly aliased_id: string; + readonly aliased_typename: "User"; + readonly id: string; + } | null | undefined; + set me(value: null | undefined); +}; +export type UpdatableQuery = { + response: UpdatableQuery$data; + variables: UpdatableQuery$variables; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-special-fields.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-special-fields.graphql new file mode 100644 index 0000000000000..f884e727ee97b --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-special-fields.graphql @@ -0,0 +1,10 @@ +query UpdatableQuery @updatable { + me { + id + aliased_id: id + __typename + aliased_typename: __typename + __id + aliased_double_under_id: __id + } +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-type-refinement.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-type-refinement.expected new file mode 100644 index 0000000000000..43f18bf84dde7 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-type-refinement.expected @@ -0,0 +1,40 @@ +==================================== INPUT ==================================== +query UpdatableQuery @updatable { + maybeNodeInterface { + ... on NonNodeNoID { + __typename + name + } + ... on Story { + __typename + actorCount + address { + city + } + } + } +} +==================================== OUTPUT =================================== +export type UpdatableQuery$variables = Record; +export type UpdatableQuery$data = { + get maybeNodeInterface(): { + readonly __typename: "NonNodeNoID"; + name: string | null | undefined; + } | { + readonly __typename: "Story"; + actorCount: number | null | undefined; + get address(): { + city: string | null | undefined; + } | null | undefined; + set address(value: null | undefined); + } | { + // This will never be '%other', but we need some + // value in case none of the concrete values match. + readonly __typename: "%other"; + } | null | undefined; + set maybeNodeInterface(value: null | undefined); +}; +export type UpdatableQuery = { + response: UpdatableQuery$data; + variables: UpdatableQuery$variables; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-type-refinement.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-type-refinement.graphql new file mode 100644 index 0000000000000..6a79d3d91764d --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation-type-refinement.graphql @@ -0,0 +1,15 @@ +query UpdatableQuery @updatable { + maybeNodeInterface { + ... on NonNodeNoID { + __typename + name + } + ... on Story { + __typename + actorCount + address { + city + } + } + } +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation.expected b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation.expected new file mode 100644 index 0000000000000..10b0e737ec5f2 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation.expected @@ -0,0 +1,20 @@ +==================================== INPUT ==================================== +query UpdatableQuery @updatable { + me { + actorCount + name + } +} +==================================== OUTPUT =================================== +export type UpdatableQuery$variables = Record; +export type UpdatableQuery$data = { + get me(): { + actorCount: number | null | undefined; + name: string | null | undefined; + } | null | undefined; + set me(value: null | undefined); +}; +export type UpdatableQuery = { + response: UpdatableQuery$data; + variables: UpdatableQuery$variables; +}; diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation.graphql b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation.graphql new file mode 100644 index 0000000000000..93db1a9874954 --- /dev/null +++ b/compiler/crates/relay-typegen/tests/generate_typescript/fixtures/updatable-operation.graphql @@ -0,0 +1,6 @@ +query UpdatableQuery @updatable { + me { + actorCount + name + } +} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript/mod.rs b/compiler/crates/relay-typegen/tests/generate_typescript/mod.rs deleted file mode 100644 index eebed60a31d19..0000000000000 --- a/compiler/crates/relay-typegen/tests/generate_typescript/mod.rs +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::sync::Arc; - -use common::ConsoleLogger; -use common::FeatureFlag; -use common::FeatureFlags; -use common::ScalarName; -use common::SourceLocationKey; -use fixture_tests::Fixture; -use fnv::FnvBuildHasher; -use fnv::FnvHashMap; -use graphql_ir::build; -use graphql_ir::Program; -use graphql_syntax::parse_executable; -use indexmap::IndexMap; -use intern::string_key::Intern; -use relay_codegen::JsModuleFormat; -use relay_config::CustomScalarType; -use relay_config::CustomScalarTypeImport; -use relay_config::ProjectConfig; -use relay_test_schema::get_test_schema; -use relay_test_schema::get_test_schema_with_extensions; -use relay_transforms::apply_transforms; -use relay_typegen::FragmentLocations; -use relay_typegen::TypegenConfig; -use relay_typegen::TypegenLanguage; - -type FnvIndexMap = IndexMap; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts = fixture.content.split("%extensions%").collect::>(); - let (source, schema) = match parts.as_slice() { - [source, extensions] => (source, get_test_schema_with_extensions(extensions)), - [source] => (source, get_test_schema()), - _ => panic!(), - }; - - let source_location = SourceLocationKey::standalone(fixture.file_name); - - let mut sources = FnvHashMap::default(); - sources.insert(source_location, source); - let ast = parse_executable(source, source_location).unwrap_or_else(|e| { - panic!("Encountered error building AST: {:?}", e); - }); - let ir = build(&schema, &ast.definitions).unwrap_or_else(|e| { - panic!("Encountered error building IR {:?}", e); - }); - let program = Program::from_definitions(Arc::clone(&schema), ir); - let mut custom_scalar_types = FnvIndexMap::default(); - custom_scalar_types.insert( - ScalarName("JSON".intern()), - CustomScalarType::Path(CustomScalarTypeImport { - name: "JSON".intern(), - path: "TypeDefsFile".into(), - }), - ); - let project_config = ProjectConfig { - name: "test".intern(), - js_module_format: JsModuleFormat::Haste, - typegen_config: TypegenConfig { - language: TypegenLanguage::TypeScript, - custom_scalar_types, - use_import_type_syntax: fixture - .content - .contains("# typegen_config.use_import_type_syntax = true"), - ..Default::default() - }, - feature_flags: Arc::new(FeatureFlags { - enable_fragment_aliases: FeatureFlag::Enabled, - enable_relay_resolver_transform: true, - ..Default::default() - }), - ..Default::default() - }; - let programs = apply_transforms( - &project_config, - Arc::new(program), - Default::default(), - Arc::new(ConsoleLogger), - None, - None, - ) - .unwrap(); - - let fragment_locations = FragmentLocations::new(programs.typegen.fragments()); - let mut operations: Vec<_> = programs.typegen.operations().collect(); - operations.sort_by_key(|op| op.name.item.0); - let operation_strings = operations.into_iter().map(|typegen_operation| { - let normalization_operation = programs - .normalization - .operation(typegen_operation.name.item) - .unwrap(); - relay_typegen::generate_operation_type_exports_section( - typegen_operation, - normalization_operation, - &schema, - &project_config, - &fragment_locations, - ) - }); - - let mut fragments: Vec<_> = programs.typegen.fragments().collect(); - fragments.sort_by_key(|frag| frag.name.item); - let fragment_strings = fragments.into_iter().map(|frag| { - relay_typegen::generate_fragment_type_exports_section( - frag, - &schema, - &project_config, - &fragment_locations, - ) - }); - - let mut result: Vec = operation_strings.collect(); - result.extend(fragment_strings); - Ok(result - .join("-------------------------------------------------------------------------------\n")) -} diff --git a/compiler/crates/relay-typegen/tests/generate_typescript_test.rs b/compiler/crates/relay-typegen/tests/generate_typescript_test.rs index 62d4d84af37be..73509e4faaf24 100644 --- a/compiler/crates/relay-typegen/tests/generate_typescript_test.rs +++ b/compiler/crates/relay-typegen/tests/generate_typescript_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<> */ mod generate_typescript; @@ -12,408 +12,590 @@ mod generate_typescript; use generate_typescript::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn aliased_fragment_raw_response_type() { +#[tokio::test] +async fn aliased_fragment_raw_response_type() { let input = include_str!("generate_typescript/fixtures/aliased-fragment-raw-response-type.graphql"); let expected = include_str!("generate_typescript/fixtures/aliased-fragment-raw-response-type.expected"); - test_fixture(transform_fixture, "aliased-fragment-raw-response-type.graphql", "generate_typescript/fixtures/aliased-fragment-raw-response-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased-fragment-raw-response-type.graphql", "generate_typescript/fixtures/aliased-fragment-raw-response-type.expected", input, expected).await; } -#[test] -fn aliased_fragment_spread() { +#[tokio::test] +async fn aliased_fragment_spread() { let input = include_str!("generate_typescript/fixtures/aliased-fragment-spread.graphql"); let expected = include_str!("generate_typescript/fixtures/aliased-fragment-spread.expected"); - test_fixture(transform_fixture, "aliased-fragment-spread.graphql", "generate_typescript/fixtures/aliased-fragment-spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased-fragment-spread.graphql", "generate_typescript/fixtures/aliased-fragment-spread.expected", input, expected).await; } -#[test] -fn aliased_fragment_spread_in_abstract_selection() { +#[tokio::test] +async fn aliased_fragment_spread_in_abstract_selection() { let input = include_str!("generate_typescript/fixtures/aliased-fragment-spread-in-abstract-selection.graphql"); let expected = include_str!("generate_typescript/fixtures/aliased-fragment-spread-in-abstract-selection.expected"); - test_fixture(transform_fixture, "aliased-fragment-spread-in-abstract-selection.graphql", "generate_typescript/fixtures/aliased-fragment-spread-in-abstract-selection.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased-fragment-spread-in-abstract-selection.graphql", "generate_typescript/fixtures/aliased-fragment-spread-in-abstract-selection.expected", input, expected).await; } -#[test] -fn aliased_inline_fragment_spread() { +#[tokio::test] +async fn aliased_inline_fragment_spread() { let input = include_str!("generate_typescript/fixtures/aliased-inline-fragment-spread.graphql"); let expected = include_str!("generate_typescript/fixtures/aliased-inline-fragment-spread.expected"); - test_fixture(transform_fixture, "aliased-inline-fragment-spread.graphql", "generate_typescript/fixtures/aliased-inline-fragment-spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased-inline-fragment-spread.graphql", "generate_typescript/fixtures/aliased-inline-fragment-spread.expected", input, expected).await; } -#[test] -fn aliased_inline_fragment_spread_without_type_condition_fragment_root() { +#[tokio::test] +async fn aliased_inline_fragment_spread_without_type_condition_fragment_root() { let input = include_str!("generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.graphql"); let expected = include_str!("generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.expected"); - test_fixture(transform_fixture, "aliased-inline-fragment-spread-without-type-condition-fragment-root.graphql", "generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased-inline-fragment-spread-without-type-condition-fragment-root.graphql", "generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-fragment-root.expected", input, expected).await; } -#[test] -fn aliased_inline_fragment_spread_without_type_condition_linked_field() { +#[tokio::test] +async fn aliased_inline_fragment_spread_without_type_condition_linked_field() { let input = include_str!("generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.graphql"); let expected = include_str!("generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.expected"); - test_fixture(transform_fixture, "aliased-inline-fragment-spread-without-type-condition-linked-field.graphql", "generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased-inline-fragment-spread-without-type-condition-linked-field.graphql", "generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-linked-field.expected", input, expected).await; } -#[test] -fn aliased_inline_fragment_spread_without_type_condition_query_root() { +#[tokio::test] +async fn aliased_inline_fragment_spread_without_type_condition_query_root() { let input = include_str!("generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.graphql"); let expected = include_str!("generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.expected"); - test_fixture(transform_fixture, "aliased-inline-fragment-spread-without-type-condition-query-root.graphql", "generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.expected", input, expected); + test_fixture(transform_fixture, file!(), "aliased-inline-fragment-spread-without-type-condition-query-root.graphql", "generate_typescript/fixtures/aliased-inline-fragment-spread-without-type-condition-query-root.expected", input, expected).await; } -#[test] -fn conditional() { +#[tokio::test] +async fn conditional() { let input = include_str!("generate_typescript/fixtures/conditional.graphql"); let expected = include_str!("generate_typescript/fixtures/conditional.expected"); - test_fixture(transform_fixture, "conditional.graphql", "generate_typescript/fixtures/conditional.expected", input, expected); + test_fixture(transform_fixture, file!(), "conditional.graphql", "generate_typescript/fixtures/conditional.expected", input, expected).await; } -#[test] -fn custom_scalar_type_import() { +#[tokio::test] +async fn custom_scalar_type_import() { let input = include_str!("generate_typescript/fixtures/custom-scalar-type-import.graphql"); let expected = include_str!("generate_typescript/fixtures/custom-scalar-type-import.expected"); - test_fixture(transform_fixture, "custom-scalar-type-import.graphql", "generate_typescript/fixtures/custom-scalar-type-import.expected", input, expected); + test_fixture(transform_fixture, file!(), "custom-scalar-type-import.graphql", "generate_typescript/fixtures/custom-scalar-type-import.expected", input, expected).await; } -#[test] -fn fragment_spread() { +#[tokio::test] +async fn default_input() { + let input = include_str!("generate_typescript/fixtures/default-input.graphql"); + let expected = include_str!("generate_typescript/fixtures/default-input.expected"); + test_fixture(transform_fixture, file!(), "default-input.graphql", "generate_typescript/fixtures/default-input.expected", input, expected).await; +} + +#[tokio::test] +async fn fragment_spread() { let input = include_str!("generate_typescript/fixtures/fragment-spread.graphql"); let expected = include_str!("generate_typescript/fixtures/fragment-spread.expected"); - test_fixture(transform_fixture, "fragment-spread.graphql", "generate_typescript/fixtures/fragment-spread.expected", input, expected); + test_fixture(transform_fixture, file!(), "fragment-spread.graphql", "generate_typescript/fixtures/fragment-spread.expected", input, expected).await; } -#[test] -fn inline_fragment() { +#[tokio::test] +async fn inline_fragment() { let input = include_str!("generate_typescript/fixtures/inline-fragment.graphql"); let expected = include_str!("generate_typescript/fixtures/inline-fragment.expected"); - test_fixture(transform_fixture, "inline-fragment.graphql", "generate_typescript/fixtures/inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "inline-fragment.graphql", "generate_typescript/fixtures/inline-fragment.expected", input, expected).await; } -#[test] -fn linked_field() { +#[tokio::test] +async fn linked_field() { let input = include_str!("generate_typescript/fixtures/linked-field.graphql"); let expected = include_str!("generate_typescript/fixtures/linked-field.expected"); - test_fixture(transform_fixture, "linked-field.graphql", "generate_typescript/fixtures/linked-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "linked-field.graphql", "generate_typescript/fixtures/linked-field.expected", input, expected).await; } -#[test] -fn match_field() { +#[tokio::test] +async fn match_field() { let input = include_str!("generate_typescript/fixtures/match-field.graphql"); let expected = include_str!("generate_typescript/fixtures/match-field.expected"); - test_fixture(transform_fixture, "match-field.graphql", "generate_typescript/fixtures/match-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-field.graphql", "generate_typescript/fixtures/match-field.expected", input, expected).await; } -#[test] -fn match_field_in_query() { +#[tokio::test] +async fn match_field_in_query() { let input = include_str!("generate_typescript/fixtures/match-field-in-query.graphql"); let expected = include_str!("generate_typescript/fixtures/match-field-in-query.expected"); - test_fixture(transform_fixture, "match-field-in-query.graphql", "generate_typescript/fixtures/match-field-in-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "match-field-in-query.graphql", "generate_typescript/fixtures/match-field-in-query.expected", input, expected).await; } -#[test] -fn mutation() { +#[tokio::test] +async fn mutation() { let input = include_str!("generate_typescript/fixtures/mutation.graphql"); let expected = include_str!("generate_typescript/fixtures/mutation.expected"); - test_fixture(transform_fixture, "mutation.graphql", "generate_typescript/fixtures/mutation.expected", input, expected); + test_fixture(transform_fixture, file!(), "mutation.graphql", "generate_typescript/fixtures/mutation.expected", input, expected).await; } -#[test] -fn mutation_input_has_array() { +#[tokio::test] +async fn mutation_input_has_array() { let input = include_str!("generate_typescript/fixtures/mutation-input-has-array.graphql"); let expected = include_str!("generate_typescript/fixtures/mutation-input-has-array.expected"); - test_fixture(transform_fixture, "mutation-input-has-array.graphql", "generate_typescript/fixtures/mutation-input-has-array.expected", input, expected); + test_fixture(transform_fixture, file!(), "mutation-input-has-array.graphql", "generate_typescript/fixtures/mutation-input-has-array.expected", input, expected).await; } -#[test] -fn mutation_with_client_extension() { +#[tokio::test] +async fn mutation_with_client_extension() { let input = include_str!("generate_typescript/fixtures/mutation-with-client-extension.graphql"); let expected = include_str!("generate_typescript/fixtures/mutation-with-client-extension.expected"); - test_fixture(transform_fixture, "mutation-with-client-extension.graphql", "generate_typescript/fixtures/mutation-with-client-extension.expected", input, expected); + test_fixture(transform_fixture, file!(), "mutation-with-client-extension.graphql", "generate_typescript/fixtures/mutation-with-client-extension.expected", input, expected).await; } -#[test] -fn mutation_with_enums_on_fragment() { +#[tokio::test] +async fn mutation_with_enums_on_fragment() { let input = include_str!("generate_typescript/fixtures/mutation-with-enums-on-fragment.graphql"); let expected = include_str!("generate_typescript/fixtures/mutation-with-enums-on-fragment.expected"); - test_fixture(transform_fixture, "mutation-with-enums-on-fragment.graphql", "generate_typescript/fixtures/mutation-with-enums-on-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "mutation-with-enums-on-fragment.graphql", "generate_typescript/fixtures/mutation-with-enums-on-fragment.expected", input, expected).await; } -#[test] -fn mutation_with_nested_fragments() { +#[tokio::test] +async fn mutation_with_nested_fragments() { let input = include_str!("generate_typescript/fixtures/mutation-with-nested-fragments.graphql"); let expected = include_str!("generate_typescript/fixtures/mutation-with-nested-fragments.expected"); - test_fixture(transform_fixture, "mutation-with-nested-fragments.graphql", "generate_typescript/fixtures/mutation-with-nested-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "mutation-with-nested-fragments.graphql", "generate_typescript/fixtures/mutation-with-nested-fragments.expected", input, expected).await; } -#[test] -fn mutation_with_response_on_inline_fragments() { +#[tokio::test] +async fn mutation_with_response_on_inline_fragments() { let input = include_str!("generate_typescript/fixtures/mutation-with-response-on-inline-fragments.graphql"); let expected = include_str!("generate_typescript/fixtures/mutation-with-response-on-inline-fragments.expected"); - test_fixture(transform_fixture, "mutation-with-response-on-inline-fragments.graphql", "generate_typescript/fixtures/mutation-with-response-on-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "mutation-with-response-on-inline-fragments.graphql", "generate_typescript/fixtures/mutation-with-response-on-inline-fragments.expected", input, expected).await; } -#[test] -fn plural_fragment() { +#[tokio::test] +async fn plural_fragment() { let input = include_str!("generate_typescript/fixtures/plural-fragment.graphql"); let expected = include_str!("generate_typescript/fixtures/plural-fragment.expected"); - test_fixture(transform_fixture, "plural-fragment.graphql", "generate_typescript/fixtures/plural-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "plural-fragment.graphql", "generate_typescript/fixtures/plural-fragment.expected", input, expected).await; } -#[test] -fn query_with_handles() { +#[tokio::test] +async fn query_with_handles() { let input = include_str!("generate_typescript/fixtures/query-with-handles.graphql"); let expected = include_str!("generate_typescript/fixtures/query-with-handles.expected"); - test_fixture(transform_fixture, "query-with-handles.graphql", "generate_typescript/fixtures/query-with-handles.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-handles.graphql", "generate_typescript/fixtures/query-with-handles.expected", input, expected).await; } -#[test] -fn query_with_match_fields() { +#[tokio::test] +async fn query_with_match_fields() { let input = include_str!("generate_typescript/fixtures/query-with-match-fields.graphql"); let expected = include_str!("generate_typescript/fixtures/query-with-match-fields.expected"); - test_fixture(transform_fixture, "query-with-match-fields.graphql", "generate_typescript/fixtures/query-with-match-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-match-fields.graphql", "generate_typescript/fixtures/query-with-match-fields.expected", input, expected).await; } -#[test] -fn query_with_module_field() { +#[tokio::test] +async fn query_with_module_field() { let input = include_str!("generate_typescript/fixtures/query-with-module-field.graphql"); let expected = include_str!("generate_typescript/fixtures/query-with-module-field.expected"); - test_fixture(transform_fixture, "query-with-module-field.graphql", "generate_typescript/fixtures/query-with-module-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-module-field.graphql", "generate_typescript/fixtures/query-with-module-field.expected", input, expected).await; } -#[test] -fn query_with_multiple_match_fields() { +#[tokio::test] +async fn query_with_multiple_match_fields() { let input = include_str!("generate_typescript/fixtures/query-with-multiple-match-fields.graphql"); let expected = include_str!("generate_typescript/fixtures/query-with-multiple-match-fields.expected"); - test_fixture(transform_fixture, "query-with-multiple-match-fields.graphql", "generate_typescript/fixtures/query-with-multiple-match-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-multiple-match-fields.graphql", "generate_typescript/fixtures/query-with-multiple-match-fields.expected", input, expected).await; } -#[test] -fn query_with_raw_response_on_conditional() { +#[tokio::test] +async fn query_with_raw_response_on_conditional() { let input = include_str!("generate_typescript/fixtures/query-with-raw-response-on-conditional.graphql"); let expected = include_str!("generate_typescript/fixtures/query-with-raw-response-on-conditional.expected"); - test_fixture(transform_fixture, "query-with-raw-response-on-conditional.graphql", "generate_typescript/fixtures/query-with-raw-response-on-conditional.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-raw-response-on-conditional.graphql", "generate_typescript/fixtures/query-with-raw-response-on-conditional.expected", input, expected).await; } -#[test] -fn query_with_raw_response_on_literal_conditional() { +#[tokio::test] +async fn query_with_raw_response_on_literal_conditional() { let input = include_str!("generate_typescript/fixtures/query-with-raw-response-on-literal-conditional.graphql"); let expected = include_str!("generate_typescript/fixtures/query-with-raw-response-on-literal-conditional.expected"); - test_fixture(transform_fixture, "query-with-raw-response-on-literal-conditional.graphql", "generate_typescript/fixtures/query-with-raw-response-on-literal-conditional.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-raw-response-on-literal-conditional.graphql", "generate_typescript/fixtures/query-with-raw-response-on-literal-conditional.expected", input, expected).await; } -#[test] -fn query_with_stream() { +#[tokio::test] +async fn query_with_stream() { let input = include_str!("generate_typescript/fixtures/query-with-stream.graphql"); let expected = include_str!("generate_typescript/fixtures/query-with-stream.expected"); - test_fixture(transform_fixture, "query-with-stream.graphql", "generate_typescript/fixtures/query-with-stream.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-stream.graphql", "generate_typescript/fixtures/query-with-stream.expected", input, expected).await; } -#[test] -fn query_with_stream_connection() { +#[tokio::test] +async fn query_with_stream_connection() { let input = include_str!("generate_typescript/fixtures/query-with-stream-connection.graphql"); let expected = include_str!("generate_typescript/fixtures/query-with-stream-connection.expected"); - test_fixture(transform_fixture, "query-with-stream-connection.graphql", "generate_typescript/fixtures/query-with-stream-connection.expected", input, expected); + test_fixture(transform_fixture, file!(), "query-with-stream-connection.graphql", "generate_typescript/fixtures/query-with-stream-connection.expected", input, expected).await; } -#[test] -fn recursive_fragments() { +#[tokio::test] +async fn recursive_fragments() { let input = include_str!("generate_typescript/fixtures/recursive-fragments.graphql"); let expected = include_str!("generate_typescript/fixtures/recursive-fragments.expected"); - test_fixture(transform_fixture, "recursive-fragments.graphql", "generate_typescript/fixtures/recursive-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "recursive-fragments.graphql", "generate_typescript/fixtures/recursive-fragments.expected", input, expected).await; } -#[test] -fn refetchable() { +#[tokio::test] +async fn refetchable() { let input = include_str!("generate_typescript/fixtures/refetchable.graphql"); let expected = include_str!("generate_typescript/fixtures/refetchable.expected"); - test_fixture(transform_fixture, "refetchable.graphql", "generate_typescript/fixtures/refetchable.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable.graphql", "generate_typescript/fixtures/refetchable.expected", input, expected).await; } -#[test] -fn refetchable_fragment() { +#[tokio::test] +async fn refetchable_fragment() { let input = include_str!("generate_typescript/fixtures/refetchable-fragment.graphql"); let expected = include_str!("generate_typescript/fixtures/refetchable-fragment.expected"); - test_fixture(transform_fixture, "refetchable-fragment.graphql", "generate_typescript/fixtures/refetchable-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "refetchable-fragment.graphql", "generate_typescript/fixtures/refetchable-fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn regular_query_with_assignable_fragment_spread() { + let input = include_str!("generate_typescript/fixtures/regular-query-with-assignable-fragment-spread.graphql"); + let expected = include_str!("generate_typescript/fixtures/regular-query-with-assignable-fragment-spread.expected"); + test_fixture(transform_fixture, file!(), "regular-query-with-assignable-fragment-spread.graphql", "generate_typescript/fixtures/regular-query-with-assignable-fragment-spread.expected", input, expected).await; } -#[test] -fn relay_client_id_field() { +#[tokio::test] +async fn relay_client_id_field() { let input = include_str!("generate_typescript/fixtures/relay-client-id-field.graphql"); let expected = include_str!("generate_typescript/fixtures/relay-client-id-field.expected"); - test_fixture(transform_fixture, "relay-client-id-field.graphql", "generate_typescript/fixtures/relay-client-id-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-client-id-field.graphql", "generate_typescript/fixtures/relay-client-id-field.expected", input, expected).await; } -#[test] -fn relay_resolver_with_output_type_client_interface() { +#[tokio::test] +async fn relay_resolver_with_output_type_client_interface() { let input = include_str!("generate_typescript/fixtures/relay-resolver-with-output-type-client-interface.graphql"); let expected = include_str!("generate_typescript/fixtures/relay-resolver-with-output-type-client-interface.expected"); - test_fixture(transform_fixture, "relay-resolver-with-output-type-client-interface.graphql", "generate_typescript/fixtures/relay-resolver-with-output-type-client-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-client-interface.graphql", "generate_typescript/fixtures/relay-resolver-with-output-type-client-interface.expected", input, expected).await; } -#[test] -fn relay_resolver_with_output_type_client_object() { +#[tokio::test] +async fn relay_resolver_with_output_type_client_object() { let input = include_str!("generate_typescript/fixtures/relay-resolver-with-output-type-client-object.graphql"); let expected = include_str!("generate_typescript/fixtures/relay-resolver-with-output-type-client-object.expected"); - test_fixture(transform_fixture, "relay-resolver-with-output-type-client-object.graphql", "generate_typescript/fixtures/relay-resolver-with-output-type-client-object.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-client-object.graphql", "generate_typescript/fixtures/relay-resolver-with-output-type-client-object.expected", input, expected).await; } -#[test] -fn relay_weak_client_type() { +#[tokio::test] +async fn relay_resolver_with_output_type_relay_resolver_value() { + let input = include_str!("generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value.graphql"); + let expected = include_str!("generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-relay-resolver-value.graphql", "generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_resolver_with_output_type_relay_resolver_value_required() { + let input = include_str!("generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.graphql"); + let expected = include_str!("generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.expected"); + test_fixture(transform_fixture, file!(), "relay-resolver-with-output-type-relay-resolver-value-required.graphql", "generate_typescript/fixtures/relay-resolver-with-output-type-relay-resolver-value-required.expected", input, expected).await; +} + +#[tokio::test] +async fn relay_weak_client_type() { let input = include_str!("generate_typescript/fixtures/relay-weak-client-type.graphql"); let expected = include_str!("generate_typescript/fixtures/relay-weak-client-type.expected"); - test_fixture(transform_fixture, "relay-weak-client-type.graphql", "generate_typescript/fixtures/relay-weak-client-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "relay-weak-client-type.graphql", "generate_typescript/fixtures/relay-weak-client-type.expected", input, expected).await; } -#[test] -fn required() { +#[tokio::test] +async fn required() { let input = include_str!("generate_typescript/fixtures/required.graphql"); let expected = include_str!("generate_typescript/fixtures/required.expected"); - test_fixture(transform_fixture, "required.graphql", "generate_typescript/fixtures/required.expected", input, expected); + test_fixture(transform_fixture, file!(), "required.graphql", "generate_typescript/fixtures/required.expected", input, expected).await; } -#[test] -fn required_bubbles_through_inline_fragments_to_fragment() { +#[tokio::test] +async fn required_bubbles_through_inline_fragments_to_fragment() { let input = include_str!("generate_typescript/fixtures/required-bubbles-through-inline-fragments-to-fragment.graphql"); let expected = include_str!("generate_typescript/fixtures/required-bubbles-through-inline-fragments-to-fragment.expected"); - test_fixture(transform_fixture, "required-bubbles-through-inline-fragments-to-fragment.graphql", "generate_typescript/fixtures/required-bubbles-through-inline-fragments-to-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-through-inline-fragments-to-fragment.graphql", "generate_typescript/fixtures/required-bubbles-through-inline-fragments-to-fragment.expected", input, expected).await; } -#[test] -fn required_bubbles_to_fragment() { +#[tokio::test] +async fn required_bubbles_to_fragment() { let input = include_str!("generate_typescript/fixtures/required-bubbles-to-fragment.graphql"); let expected = include_str!("generate_typescript/fixtures/required-bubbles-to-fragment.expected"); - test_fixture(transform_fixture, "required-bubbles-to-fragment.graphql", "generate_typescript/fixtures/required-bubbles-to-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-to-fragment.graphql", "generate_typescript/fixtures/required-bubbles-to-fragment.expected", input, expected).await; } -#[test] -fn required_bubbles_to_item_in_plural_field() { +#[tokio::test] +async fn required_bubbles_to_item_in_plural_field() { let input = include_str!("generate_typescript/fixtures/required-bubbles-to-item-in-plural-field.graphql"); let expected = include_str!("generate_typescript/fixtures/required-bubbles-to-item-in-plural-field.expected"); - test_fixture(transform_fixture, "required-bubbles-to-item-in-plural-field.graphql", "generate_typescript/fixtures/required-bubbles-to-item-in-plural-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-to-item-in-plural-field.graphql", "generate_typescript/fixtures/required-bubbles-to-item-in-plural-field.expected", input, expected).await; } -#[test] -fn required_bubbles_to_query() { +#[tokio::test] +async fn required_bubbles_to_query() { let input = include_str!("generate_typescript/fixtures/required-bubbles-to-query.graphql"); let expected = include_str!("generate_typescript/fixtures/required-bubbles-to-query.expected"); - test_fixture(transform_fixture, "required-bubbles-to-query.graphql", "generate_typescript/fixtures/required-bubbles-to-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-to-query.graphql", "generate_typescript/fixtures/required-bubbles-to-query.expected", input, expected).await; } -#[test] -fn required_bubbles_up_to_mutation_response() { +#[tokio::test] +async fn required_bubbles_up_to_mutation_response() { let input = include_str!("generate_typescript/fixtures/required-bubbles-up-to-mutation-response.graphql"); let expected = include_str!("generate_typescript/fixtures/required-bubbles-up-to-mutation-response.expected"); - test_fixture(transform_fixture, "required-bubbles-up-to-mutation-response.graphql", "generate_typescript/fixtures/required-bubbles-up-to-mutation-response.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-bubbles-up-to-mutation-response.graphql", "generate_typescript/fixtures/required-bubbles-up-to-mutation-response.expected", input, expected).await; } -#[test] -fn required_isolates_concrete_inline_fragments() { +#[tokio::test] +async fn required_isolates_concrete_inline_fragments() { let input = include_str!("generate_typescript/fixtures/required-isolates-concrete-inline-fragments.graphql"); let expected = include_str!("generate_typescript/fixtures/required-isolates-concrete-inline-fragments.expected"); - test_fixture(transform_fixture, "required-isolates-concrete-inline-fragments.graphql", "generate_typescript/fixtures/required-isolates-concrete-inline-fragments.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-isolates-concrete-inline-fragments.graphql", "generate_typescript/fixtures/required-isolates-concrete-inline-fragments.expected", input, expected).await; } -#[test] -fn required_raw_response_type() { +#[tokio::test] +async fn required_raw_response_type() { let input = include_str!("generate_typescript/fixtures/required-raw-response-type.graphql"); let expected = include_str!("generate_typescript/fixtures/required-raw-response-type.expected"); - test_fixture(transform_fixture, "required-raw-response-type.graphql", "generate_typescript/fixtures/required-raw-response-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-raw-response-type.graphql", "generate_typescript/fixtures/required-raw-response-type.expected", input, expected).await; } -#[test] -fn required_throw_doesnt_bubbles_to_fragment() { +#[tokio::test] +async fn required_throw_doesnt_bubbles_to_fragment() { let input = include_str!("generate_typescript/fixtures/required-throw-doesnt-bubbles-to-fragment.graphql"); let expected = include_str!("generate_typescript/fixtures/required-throw-doesnt-bubbles-to-fragment.expected"); - test_fixture(transform_fixture, "required-throw-doesnt-bubbles-to-fragment.graphql", "generate_typescript/fixtures/required-throw-doesnt-bubbles-to-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-throw-doesnt-bubbles-to-fragment.graphql", "generate_typescript/fixtures/required-throw-doesnt-bubbles-to-fragment.expected", input, expected).await; } -#[test] -fn required_throw_doesnt_bubbles_to_query() { +#[tokio::test] +async fn required_throw_doesnt_bubbles_to_query() { let input = include_str!("generate_typescript/fixtures/required-throw-doesnt-bubbles-to-query.graphql"); let expected = include_str!("generate_typescript/fixtures/required-throw-doesnt-bubbles-to-query.expected"); - test_fixture(transform_fixture, "required-throw-doesnt-bubbles-to-query.graphql", "generate_typescript/fixtures/required-throw-doesnt-bubbles-to-query.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-throw-doesnt-bubbles-to-query.graphql", "generate_typescript/fixtures/required-throw-doesnt-bubbles-to-query.expected", input, expected).await; } -#[test] -fn required_throws_nested() { +#[tokio::test] +async fn required_throws_nested() { let input = include_str!("generate_typescript/fixtures/required-throws-nested.graphql"); let expected = include_str!("generate_typescript/fixtures/required-throws-nested.expected"); - test_fixture(transform_fixture, "required-throws-nested.graphql", "generate_typescript/fixtures/required-throws-nested.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-throws-nested.graphql", "generate_typescript/fixtures/required-throws-nested.expected", input, expected).await; } -#[test] -fn required_within_aliased_inline_fragment() { +#[tokio::test] +async fn required_within_aliased_inline_fragment() { let input = include_str!("generate_typescript/fixtures/required-within-aliased-inline-fragment.graphql"); let expected = include_str!("generate_typescript/fixtures/required-within-aliased-inline-fragment.expected"); - test_fixture(transform_fixture, "required-within-aliased-inline-fragment.graphql", "generate_typescript/fixtures/required-within-aliased-inline-fragment.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-within-aliased-inline-fragment.graphql", "generate_typescript/fixtures/required-within-aliased-inline-fragment.expected", input, expected).await; } -#[test] -fn required_within_aliased_inline_fragment_on_abstract() { +#[tokio::test] +async fn required_within_aliased_inline_fragment_on_abstract() { let input = include_str!("generate_typescript/fixtures/required-within-aliased-inline-fragment-on-abstract.graphql"); let expected = include_str!("generate_typescript/fixtures/required-within-aliased-inline-fragment-on-abstract.expected"); - test_fixture(transform_fixture, "required-within-aliased-inline-fragment-on-abstract.graphql", "generate_typescript/fixtures/required-within-aliased-inline-fragment-on-abstract.expected", input, expected); + test_fixture(transform_fixture, file!(), "required-within-aliased-inline-fragment-on-abstract.graphql", "generate_typescript/fixtures/required-within-aliased-inline-fragment-on-abstract.expected", input, expected).await; } -#[test] -fn roots() { +#[tokio::test] +async fn roots() { let input = include_str!("generate_typescript/fixtures/roots.graphql"); let expected = include_str!("generate_typescript/fixtures/roots.expected"); - test_fixture(transform_fixture, "roots.graphql", "generate_typescript/fixtures/roots.expected", input, expected); + test_fixture(transform_fixture, file!(), "roots.graphql", "generate_typescript/fixtures/roots.expected", input, expected).await; } -#[test] -fn scalar_field() { +#[tokio::test] +async fn scalar_field() { let input = include_str!("generate_typescript/fixtures/scalar-field.graphql"); let expected = include_str!("generate_typescript/fixtures/scalar-field.expected"); - test_fixture(transform_fixture, "scalar-field.graphql", "generate_typescript/fixtures/scalar-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "scalar-field.graphql", "generate_typescript/fixtures/scalar-field.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_in_raw_response() { + let input = include_str!("generate_typescript/fixtures/semantic_non_null_in_raw_response.graphql"); + let expected = include_str!("generate_typescript/fixtures/semantic_non_null_in_raw_response.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_in_raw_response.graphql", "generate_typescript/fixtures/semantic_non_null_in_raw_response.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_items_in_matrix() { + let input = include_str!("generate_typescript/fixtures/semantic_non_null_items_in_matrix.graphql"); + let expected = include_str!("generate_typescript/fixtures/semantic_non_null_items_in_matrix.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_items_in_matrix.graphql", "generate_typescript/fixtures/semantic_non_null_items_in_matrix.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_liked_field_resolver() { + let input = include_str!("generate_typescript/fixtures/semantic_non_null_liked_field_resolver.graphql"); + let expected = include_str!("generate_typescript/fixtures/semantic_non_null_liked_field_resolver.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_liked_field_resolver.graphql", "generate_typescript/fixtures/semantic_non_null_liked_field_resolver.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_liked_field_weak_resolver() { + let input = include_str!("generate_typescript/fixtures/semantic_non_null_liked_field_weak_resolver.graphql"); + let expected = include_str!("generate_typescript/fixtures/semantic_non_null_liked_field_weak_resolver.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_liked_field_weak_resolver.graphql", "generate_typescript/fixtures/semantic_non_null_liked_field_weak_resolver.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_linked_field() { + let input = include_str!("generate_typescript/fixtures/semantic_non_null_linked_field.graphql"); + let expected = include_str!("generate_typescript/fixtures/semantic_non_null_linked_field.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_linked_field.graphql", "generate_typescript/fixtures/semantic_non_null_linked_field.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_list_and_list_item() { + let input = include_str!("generate_typescript/fixtures/semantic_non_null_list_and_list_item.graphql"); + let expected = include_str!("generate_typescript/fixtures/semantic_non_null_list_and_list_item.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_list_and_list_item.graphql", "generate_typescript/fixtures/semantic_non_null_list_and_list_item.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_list_item() { + let input = include_str!("generate_typescript/fixtures/semantic_non_null_list_item.graphql"); + let expected = include_str!("generate_typescript/fixtures/semantic_non_null_list_item.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_list_item.graphql", "generate_typescript/fixtures/semantic_non_null_list_item.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_scalar() { + let input = include_str!("generate_typescript/fixtures/semantic_non_null_scalar.graphql"); + let expected = include_str!("generate_typescript/fixtures/semantic_non_null_scalar.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_scalar.graphql", "generate_typescript/fixtures/semantic_non_null_scalar.expected", input, expected).await; +} + +#[tokio::test] +async fn semantic_non_null_scalar_feature_disabled() { + let input = include_str!("generate_typescript/fixtures/semantic_non_null_scalar_feature_disabled.graphql"); + let expected = include_str!("generate_typescript/fixtures/semantic_non_null_scalar_feature_disabled.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_scalar_feature_disabled.graphql", "generate_typescript/fixtures/semantic_non_null_scalar_feature_disabled.expected", input, expected).await; } -#[test] -fn simple() { +#[tokio::test] +async fn semantic_non_null_scalar_resolver() { + let input = include_str!("generate_typescript/fixtures/semantic_non_null_scalar_resolver.graphql"); + let expected = include_str!("generate_typescript/fixtures/semantic_non_null_scalar_resolver.expected"); + test_fixture(transform_fixture, file!(), "semantic_non_null_scalar_resolver.graphql", "generate_typescript/fixtures/semantic_non_null_scalar_resolver.expected", input, expected).await; +} + +#[tokio::test] +async fn simple() { let input = include_str!("generate_typescript/fixtures/simple.graphql"); let expected = include_str!("generate_typescript/fixtures/simple.expected"); - test_fixture(transform_fixture, "simple.graphql", "generate_typescript/fixtures/simple.expected", input, expected); + test_fixture(transform_fixture, file!(), "simple.graphql", "generate_typescript/fixtures/simple.expected", input, expected).await; } -#[test] -fn simple_use_import_type_syntax() { +#[tokio::test] +async fn simple_use_import_type_syntax() { let input = include_str!("generate_typescript/fixtures/simple-use-import-type-syntax.graphql"); let expected = include_str!("generate_typescript/fixtures/simple-use-import-type-syntax.expected"); - test_fixture(transform_fixture, "simple-use-import-type-syntax.graphql", "generate_typescript/fixtures/simple-use-import-type-syntax.expected", input, expected); + test_fixture(transform_fixture, file!(), "simple-use-import-type-syntax.graphql", "generate_typescript/fixtures/simple-use-import-type-syntax.expected", input, expected).await; } -#[test] -fn typename_in_union_with_other_fields() { +#[tokio::test] +async fn typename_in_union_with_other_fields() { let input = include_str!("generate_typescript/fixtures/typename-in-union-with-other-fields.graphql"); let expected = include_str!("generate_typescript/fixtures/typename-in-union-with-other-fields.expected"); - test_fixture(transform_fixture, "typename-in-union-with-other-fields.graphql", "generate_typescript/fixtures/typename-in-union-with-other-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "typename-in-union-with-other-fields.graphql", "generate_typescript/fixtures/typename-in-union-with-other-fields.expected", input, expected).await; } -#[test] -fn typename_inside_with_overlapping_fields() { +#[tokio::test] +async fn typename_inside_with_overlapping_fields() { let input = include_str!("generate_typescript/fixtures/typename-inside-with-overlapping-fields.graphql"); let expected = include_str!("generate_typescript/fixtures/typename-inside-with-overlapping-fields.expected"); - test_fixture(transform_fixture, "typename-inside-with-overlapping-fields.graphql", "generate_typescript/fixtures/typename-inside-with-overlapping-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "typename-inside-with-overlapping-fields.graphql", "generate_typescript/fixtures/typename-inside-with-overlapping-fields.expected", input, expected).await; } -#[test] -fn typename_on_union() { +#[tokio::test] +async fn typename_on_union() { let input = include_str!("generate_typescript/fixtures/typename-on-union.graphql"); let expected = include_str!("generate_typescript/fixtures/typename-on-union.expected"); - test_fixture(transform_fixture, "typename-on-union.graphql", "generate_typescript/fixtures/typename-on-union.expected", input, expected); + test_fixture(transform_fixture, file!(), "typename-on-union.graphql", "generate_typescript/fixtures/typename-on-union.expected", input, expected).await; } -#[test] -fn typename_on_union_with_non_matching_aliases() { +#[tokio::test] +async fn typename_on_union_with_non_matching_aliases() { let input = include_str!("generate_typescript/fixtures/typename-on-union-with-non-matching-aliases.graphql"); let expected = include_str!("generate_typescript/fixtures/typename-on-union-with-non-matching-aliases.expected"); - test_fixture(transform_fixture, "typename-on-union-with-non-matching-aliases.graphql", "generate_typescript/fixtures/typename-on-union-with-non-matching-aliases.expected", input, expected); + test_fixture(transform_fixture, file!(), "typename-on-union-with-non-matching-aliases.graphql", "generate_typescript/fixtures/typename-on-union-with-non-matching-aliases.expected", input, expected).await; } -#[test] -fn unmasked_fragment_spreads() { +#[tokio::test] +async fn unmasked_fragment_spreads() { let input = include_str!("generate_typescript/fixtures/unmasked-fragment-spreads.graphql"); let expected = include_str!("generate_typescript/fixtures/unmasked-fragment-spreads.expected"); - test_fixture(transform_fixture, "unmasked-fragment-spreads.graphql", "generate_typescript/fixtures/unmasked-fragment-spreads.expected", input, expected); + test_fixture(transform_fixture, file!(), "unmasked-fragment-spreads.graphql", "generate_typescript/fixtures/unmasked-fragment-spreads.expected", input, expected).await; +} + +#[tokio::test] +async fn updatable_fragment_spread() { + let input = include_str!("generate_typescript/fixtures/updatable-fragment-spread.graphql"); + let expected = include_str!("generate_typescript/fixtures/updatable-fragment-spread.expected"); + test_fixture(transform_fixture, file!(), "updatable-fragment-spread.graphql", "generate_typescript/fixtures/updatable-fragment-spread.expected", input, expected).await; +} + +#[tokio::test] +async fn updatable_fragment_spread_and_regular_spread() { + let input = include_str!("generate_typescript/fixtures/updatable-fragment-spread-and-regular-spread.graphql"); + let expected = include_str!("generate_typescript/fixtures/updatable-fragment-spread-and-regular-spread.expected"); + test_fixture(transform_fixture, file!(), "updatable-fragment-spread-and-regular-spread.graphql", "generate_typescript/fixtures/updatable-fragment-spread-and-regular-spread.expected", input, expected).await; +} + +#[tokio::test] +async fn updatable_fragment_spread_multiple() { + let input = include_str!("generate_typescript/fixtures/updatable-fragment-spread-multiple.graphql"); + let expected = include_str!("generate_typescript/fixtures/updatable-fragment-spread-multiple.expected"); + test_fixture(transform_fixture, file!(), "updatable-fragment-spread-multiple.graphql", "generate_typescript/fixtures/updatable-fragment-spread-multiple.expected", input, expected).await; +} + +#[tokio::test] +async fn updatable_operation() { + let input = include_str!("generate_typescript/fixtures/updatable-operation.graphql"); + let expected = include_str!("generate_typescript/fixtures/updatable-operation.expected"); + test_fixture(transform_fixture, file!(), "updatable-operation.graphql", "generate_typescript/fixtures/updatable-operation.expected", input, expected).await; +} + +#[tokio::test] +async fn updatable_operation_assignable_fragment() { + let input = include_str!("generate_typescript/fixtures/updatable-operation-assignable-fragment.graphql"); + let expected = include_str!("generate_typescript/fixtures/updatable-operation-assignable-fragment.expected"); + test_fixture(transform_fixture, file!(), "updatable-operation-assignable-fragment.graphql", "generate_typescript/fixtures/updatable-operation-assignable-fragment.expected", input, expected).await; +} + +#[tokio::test] +async fn updatable_operation_assignable_fragment_plural() { + let input = include_str!("generate_typescript/fixtures/updatable-operation-assignable-fragment-plural.graphql"); + let expected = include_str!("generate_typescript/fixtures/updatable-operation-assignable-fragment-plural.expected"); + test_fixture(transform_fixture, file!(), "updatable-operation-assignable-fragment-plural.graphql", "generate_typescript/fixtures/updatable-operation-assignable-fragment-plural.expected", input, expected).await; +} + +#[tokio::test] +async fn updatable_operation_assignable_fragments_within_narrowing() { + let input = include_str!("generate_typescript/fixtures/updatable-operation-assignable-fragments-within-narrowing.graphql"); + let expected = include_str!("generate_typescript/fixtures/updatable-operation-assignable-fragments-within-narrowing.expected"); + test_fixture(transform_fixture, file!(), "updatable-operation-assignable-fragments-within-narrowing.graphql", "generate_typescript/fixtures/updatable-operation-assignable-fragments-within-narrowing.expected", input, expected).await; +} + +#[tokio::test] +async fn updatable_operation_multiple_assignable_fragments() { + let input = include_str!("generate_typescript/fixtures/updatable-operation-multiple-assignable-fragments.graphql"); + let expected = include_str!("generate_typescript/fixtures/updatable-operation-multiple-assignable-fragments.expected"); + test_fixture(transform_fixture, file!(), "updatable-operation-multiple-assignable-fragments.graphql", "generate_typescript/fixtures/updatable-operation-multiple-assignable-fragments.expected", input, expected).await; +} + +#[tokio::test] +async fn updatable_operation_plural_field_no_spreads() { + let input = include_str!("generate_typescript/fixtures/updatable-operation-plural-field-no-spreads.graphql"); + let expected = include_str!("generate_typescript/fixtures/updatable-operation-plural-field-no-spreads.expected"); + test_fixture(transform_fixture, file!(), "updatable-operation-plural-field-no-spreads.graphql", "generate_typescript/fixtures/updatable-operation-plural-field-no-spreads.expected", input, expected).await; +} + +#[tokio::test] +async fn updatable_operation_plural_field_with_spreads() { + let input = include_str!("generate_typescript/fixtures/updatable-operation-plural-field-with-spreads.graphql"); + let expected = include_str!("generate_typescript/fixtures/updatable-operation-plural-field-with-spreads.expected"); + test_fixture(transform_fixture, file!(), "updatable-operation-plural-field-with-spreads.graphql", "generate_typescript/fixtures/updatable-operation-plural-field-with-spreads.expected", input, expected).await; +} + +#[tokio::test] +async fn updatable_operation_special_fields() { + let input = include_str!("generate_typescript/fixtures/updatable-operation-special-fields.graphql"); + let expected = include_str!("generate_typescript/fixtures/updatable-operation-special-fields.expected"); + test_fixture(transform_fixture, file!(), "updatable-operation-special-fields.graphql", "generate_typescript/fixtures/updatable-operation-special-fields.expected", input, expected).await; +} + +#[tokio::test] +async fn updatable_operation_type_refinement() { + let input = include_str!("generate_typescript/fixtures/updatable-operation-type-refinement.graphql"); + let expected = include_str!("generate_typescript/fixtures/updatable-operation-type-refinement.expected"); + test_fixture(transform_fixture, file!(), "updatable-operation-type-refinement.graphql", "generate_typescript/fixtures/updatable-operation-type-refinement.expected", input, expected).await; } diff --git a/compiler/crates/resolution-path/Cargo.toml b/compiler/crates/resolution-path/Cargo.toml index da1ed0cced580..aabff0726eeca 100644 --- a/compiler/crates/resolution-path/Cargo.toml +++ b/compiler/crates/resolution-path/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/resolution-path:resolution-path + [package] name = "resolution-path" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] diff --git a/compiler/crates/resolution-path/src/argument_root.rs b/compiler/crates/resolution-path/src/argument_root.rs index 021bec1490bb2..2683ad8135e3e 100644 --- a/compiler/crates/resolution-path/src/argument_root.rs +++ b/compiler/crates/resolution-path/src/argument_root.rs @@ -13,6 +13,9 @@ impl<'a> ArgumentParent<'a> { Self::Directive(directive_path) => ArgumentRoot::Directive(directive_path), Self::ScalarField(scalar_field_path) => ArgumentRoot::ScalarField(scalar_field_path), Self::LinkedField(linked_field_path) => ArgumentRoot::LinkedField(linked_field_path), + Self::FragmentSpread(fragment_spread_path) => { + ArgumentRoot::FragmentSpread(fragment_spread_path) + } Self::ConstantObject(ConstantObjectPath { inner: _, parent }) => parent .parent .find_enclosing_argument_path() @@ -33,6 +36,7 @@ pub enum ArgumentRoot<'a> { LinkedField(&'a LinkedFieldPath<'a>), ScalarField(&'a ScalarFieldPath<'a>), Directive(&'a DirectivePath<'a>), + FragmentSpread(&'a FragmentSpreadPath<'a>), } #[cfg(test)] diff --git a/compiler/crates/resolution-path/src/constant_value_root.rs b/compiler/crates/resolution-path/src/constant_value_root.rs index a420bf7b4cea3..8a6f33fff7e5b 100644 --- a/compiler/crates/resolution-path/src/constant_value_root.rs +++ b/compiler/crates/resolution-path/src/constant_value_root.rs @@ -12,7 +12,11 @@ impl<'a> ConstantValueParent<'a> { match self { ConstantValueParent::DefaultValue(DefaultValuePath { inner: _, - parent: variable_definition_path, + parent: DefaultValueParent::InputValueDefinition(input_value_definition_path), + }) => ConstantValueRoot::InputValueDefinition(input_value_definition_path), + ConstantValueParent::DefaultValue(DefaultValuePath { + inner: _, + parent: DefaultValueParent::VariableDefinition(variable_definition_path), }) => ConstantValueRoot::VariableDefinition(variable_definition_path), ConstantValueParent::Value(ValuePath { inner: _, @@ -25,14 +29,17 @@ impl<'a> ConstantValueParent<'a> { ConstantValueParent::ConstantObj(constant_obj) => { constant_obj.parent.parent.find_constant_value_root() } - ConstantValueParent::ConstantArgValue(ConstantArgPath { - inner: _, - parent: - ConstantObjPath { + ConstantValueParent::ConstantArgumentValue(constant_argument_path) => { + match &constant_argument_path.parent { + ConstantArgumentParent::ConstantDirective(_) => { + ConstantValueRoot::ConstantArgument(constant_argument_path) + } + ConstantArgumentParent::ConstantObj(ConstantObjPath { inner: _, parent: constant_value_path, - }, - }) => constant_value_path.parent.find_constant_value_root(), + }) => constant_value_path.parent.find_constant_value_root(), + } + } } } } @@ -65,6 +72,8 @@ impl<'a> ValueParent<'a> { #[derive(Debug)] pub enum ConstantValueRoot<'a> { VariableDefinition(&'a VariableDefinitionPath<'a>), + InputValueDefinition(&'a InputValueDefinitionPath<'a>), + ConstantArgument(&'a ConstantArgumentPath<'a>), Argument(&'a ArgumentPath<'a>), } @@ -72,6 +81,7 @@ pub enum ConstantValueRoot<'a> { mod test { use super::*; use crate::test::test_resolution; + use crate::test::test_schema_resolution; #[test] fn constant_value_root_variable_definition_simple_boolean() { @@ -176,4 +186,104 @@ mod test { } }) } + + #[test] + fn constant_value_input_value_definition_simple_boolean() { + let source = r#" + input Foo { + bar: Boolean = true + } + "#; + test_schema_resolution(source, "true", |resolved| { + if let ResolutionPath::ConstantBoolean(ConstantBooleanPath { + inner: _, + parent: ConstantValuePath { inner: _, parent }, + }) = resolved + { + assert_matches!( + parent.find_constant_value_root(), + ConstantValueRoot::InputValueDefinition(_) + ) + } else { + panic!( + "Should resolve to ConstantBoolean, instead got {:?}", + resolved + ); + } + }) + } + + #[test] + fn constant_value_input_value_definition_nested_boolean() { + let source = r#" + input Foo { + bar: AComplexType = {greeting: "Hello", inputs: [true]} + } + "#; + test_schema_resolution(source, "true", |resolved| { + if let ResolutionPath::ConstantBoolean(ConstantBooleanPath { + inner: _, + parent: ConstantValuePath { inner: _, parent }, + }) = resolved + { + assert_matches!( + parent.find_constant_value_root(), + ConstantValueRoot::InputValueDefinition(_) + ) + } else { + panic!( + "Should resolve to ConstantBoolean, instead got {:?}", + resolved + ); + } + }) + } + + #[test] + fn constant_value_constant_argument_simple_boolean() { + let source = r#" + scalar Foo @bar(baz: true) + "#; + test_schema_resolution(source, "true", |resolved| { + if let ResolutionPath::ConstantBoolean(ConstantBooleanPath { + inner: _, + parent: ConstantValuePath { inner: _, parent }, + }) = resolved + { + assert_matches!( + parent.find_constant_value_root(), + ConstantValueRoot::ConstantArgument(_) + ) + } else { + panic!( + "Should resolve to ConstantBoolean, instead got {:?}", + resolved + ); + } + }) + } + + #[test] + fn constant_value_constant_argument_nested_boolean() { + let source = r#" + scalar Foo @bar(baz: {greeting: "Hello", inputs: [true]}) + "#; + test_schema_resolution(source, "true", |resolved| { + if let ResolutionPath::ConstantBoolean(ConstantBooleanPath { + inner: _, + parent: ConstantValuePath { inner: _, parent }, + }) = resolved + { + assert_matches!( + parent.find_constant_value_root(), + ConstantValueRoot::ConstantArgument(_) + ) + } else { + panic!( + "Should resolve to ConstantBoolean, instead got {:?}", + resolved + ); + } + }) + } } diff --git a/compiler/crates/resolution-path/src/lib.rs b/compiler/crates/resolution-path/src/lib.rs index a16f6c7835d2f..05c3b9743368c 100644 --- a/compiler/crates/resolution-path/src/lib.rs +++ b/compiler/crates/resolution-path/src/lib.rs @@ -10,31 +10,52 @@ use common::Span; use graphql_syntax::Argument; use graphql_syntax::BooleanNode; use graphql_syntax::ConstantArgument; +use graphql_syntax::ConstantDirective; use graphql_syntax::ConstantValue; use graphql_syntax::DefaultValue; use graphql_syntax::Directive; +use graphql_syntax::DirectiveDefinition; use graphql_syntax::EnumNode; +use graphql_syntax::EnumTypeDefinition; +use graphql_syntax::EnumTypeExtension; +use graphql_syntax::EnumValueDefinition; use graphql_syntax::ExecutableDefinition; use graphql_syntax::ExecutableDocument; +use graphql_syntax::FieldDefinition; use graphql_syntax::FloatNode; use graphql_syntax::FragmentDefinition; use graphql_syntax::FragmentSpread; use graphql_syntax::Identifier; use graphql_syntax::InlineFragment; +use graphql_syntax::InputObjectTypeDefinition; +use graphql_syntax::InputObjectTypeExtension; +use graphql_syntax::InputValueDefinition; use graphql_syntax::IntNode; +use graphql_syntax::InterfaceTypeDefinition; +use graphql_syntax::InterfaceTypeExtension; use graphql_syntax::LinkedField; use graphql_syntax::List; use graphql_syntax::ListTypeAnnotation; use graphql_syntax::NamedTypeAnnotation; use graphql_syntax::NonNullTypeAnnotation; +use graphql_syntax::ObjectTypeDefinition; +use graphql_syntax::ObjectTypeExtension; use graphql_syntax::OperationDefinition; use graphql_syntax::OperationKind; +use graphql_syntax::OperationTypeDefinition; use graphql_syntax::ScalarField; +use graphql_syntax::ScalarTypeDefinition; +use graphql_syntax::ScalarTypeExtension; +use graphql_syntax::SchemaDefinition; +use graphql_syntax::SchemaDocument; +use graphql_syntax::SchemaExtension; use graphql_syntax::Selection; use graphql_syntax::StringNode; use graphql_syntax::Token; use graphql_syntax::TypeAnnotation; use graphql_syntax::TypeCondition; +use graphql_syntax::UnionTypeDefinition; +use graphql_syntax::UnionTypeExtension; use graphql_syntax::Value; use graphql_syntax::VariableDefinition; use graphql_syntax::VariableIdentifier; @@ -43,6 +64,7 @@ mod constant_value_root; pub use constant_value_root::ConstantValueRoot; mod argument_root; pub use argument_root::*; +use schema::TypeSystemDefinition; mod selection_parent_type; mod variable_definition_path; @@ -124,7 +146,7 @@ pub enum ResolutionPath<'a> { ConstantEnum(ConstantEnumPath<'a>), ConstantList(ConstantListPath<'a>), ConstantObj(ConstantObjPath<'a>), - ConstantArg(ConstantArgPath<'a>), + ConstantArgument(ConstantArgumentPath<'a>), ValueList(ValueListPath<'a>), VariableIdentifier(VariableIdentifierPath<'a>), ConstantObject(ConstantObjectPath<'a>), @@ -133,6 +155,28 @@ pub enum ResolutionPath<'a> { ListTypeAnnotation(ListTypeAnnotationPath<'a>), NonNullTypeAnnotation(NonNullTypeAnnotationPath<'a>), DefaultValue(DefaultValuePath<'a>), + + SchemaDocument(&'a SchemaDocument), + SchemaDefinition(SchemaDefinitionPath<'a>), + SchemaExtension(SchemaExtensionPath<'a>), + OperationTypeDefinition(OperationTypeDefinitionPath<'a>), + DirectiveDefinition(DirectiveDefinitionPath<'a>), + InputValueDefinition(InputValueDefinitionPath<'a>), + UnionTypeDefinition(UnionTypeDefinitionPath<'a>), + UnionTypeExtension(UnionTypeExtensionPath<'a>), + InterfaceTypeDefinition(InterfaceTypeDefinitionPath<'a>), + InterfaceTypeExtension(InterfaceTypeExtensionPath<'a>), + ObjectTypeDefinition(ObjectTypeDefinitionPath<'a>), + ObjectTypeExtension(ObjectTypeExtensionPath<'a>), + InputObjectTypeDefinition(InputObjectTypeDefinitionPath<'a>), + InputObjectTypeExtension(InputObjectTypeExtensionPath<'a>), + EnumTypeDefinition(EnumTypeDefinitionPath<'a>), + EnumTypeExtension(EnumTypeExtensionPath<'a>), + EnumValueDefinition(EnumValueDefinitionPath<'a>), + ScalarTypeDefinition(ScalarTypeDefinitionPath<'a>), + ScalarTypeExtension(ScalarTypeExtensionPath<'a>), + FieldDefinition(FieldDefinitionPath<'a>), + ConstantDirective(ConstantDirectivePath<'a>), } #[derive(Debug)] pub struct Path { @@ -282,7 +326,10 @@ impl<'a> ResolvePosition<'a> for VariableDefinition { if let Some(default) = &self.default_value { if default.contains(position) { - return default.resolve(self.path(parent), position); + return default.resolve( + DefaultValueParent::VariableDefinition(self.path(parent)), + position, + ); } } @@ -303,7 +350,11 @@ impl<'a> ResolvePosition<'a> for VariableDefinition { } pub type DefaultValuePath<'a> = Path<&'a DefaultValue, DefaultValueParent<'a>>; -pub type DefaultValueParent<'a> = VariableDefinitionPath<'a>; +#[derive(Debug)] +pub enum DefaultValueParent<'a> { + VariableDefinition(VariableDefinitionPath<'a>), + InputValueDefinition(InputValueDefinitionPath<'a>), +} impl<'a> ResolvePosition<'a> for DefaultValue { type Parent = DefaultValueParent<'a>; @@ -332,6 +383,8 @@ pub enum TypeAnnotationParent<'a> { VariableDefinition(VariableDefinitionPath<'a>), ListTypeAnnotation(ListTypeAnnotationPath<'a>), NonNullTypeAnnotation(NonNullTypeAnnotationPath<'a>), + FieldDefinition(FieldDefinitionPath<'a>), + InputValueDefinition(InputValueDefinitionPath<'a>), } impl<'a> ResolvePosition<'a> for TypeAnnotation { type Parent = TypeAnnotationParent<'a>; @@ -531,7 +584,28 @@ pub enum IdentParent<'a> { ArgumentName(ArgumentPath<'a>), ArgumentValue(ArgumentPath<'a>), NamedTypeAnnotation(NamedTypeAnnotationPath<'a>), - ConstantArgKey(ConstantArgPath<'a>), + ConstantArgumentKey(ConstantArgumentPath<'a>), + + DirectiveDefinitionName(DirectiveDefinitionPath<'a>), + UnionTypeDefinitionName(UnionTypeDefinitionPath<'a>), + UnionTypeExtensionName(UnionTypeExtensionPath<'a>), + UnionTypeMemberType(UnionTypeMemberParent<'a>), + InterfaceTypeDefinitionName(InterfaceTypeDefinitionPath<'a>), + InterfaceTypeExtensionName(InterfaceTypeExtensionPath<'a>), + ObjectTypeDefinitionName(ObjectTypeDefinitionPath<'a>), + ObjectTypeExtensionName(ObjectTypeExtensionPath<'a>), + ImplementedInterfaceName(ImplementedInterfaceParent<'a>), + InputObjectTypeDefinitionName(InputObjectTypeDefinitionPath<'a>), + InputObjectTypeExtensionName(InputObjectTypeExtensionPath<'a>), + EnumTypeDefinitionName(EnumTypeDefinitionPath<'a>), + EnumTypeExtensionName(EnumTypeExtensionPath<'a>), + EnumValueDefinitionName(EnumValueDefinitionPath<'a>), + ScalarTypeDefinitionName(ScalarTypeDefinitionPath<'a>), + ScalarTypeExtensionName(ScalarTypeExtensionPath<'a>), + FieldDefinitionName(FieldDefinitionPath<'a>), + InputValueDefinitionName(InputValueDefinitionPath<'a>), + OperationTypeDefinitionType(OperationTypeDefinitionPath<'a>), + ConstantDirectiveName(ConstantDirectivePath<'a>), } impl<'a> ResolvePosition<'a> for Identifier { @@ -587,6 +661,14 @@ impl<'a> ResolvePosition<'a> for FragmentSpread { .name .resolve(IdentParent::FragmentSpreadName(self.path(parent)), position); } + if let Some(arguments) = &self.arguments { + for argument in &arguments.items { + if argument.contains(position) { + return argument + .resolve(ArgumentParent::FragmentSpread(self.path(parent)), position); + } + } + } for directive in self.directives.iter() { if directive.contains(position) { return directive @@ -797,6 +879,7 @@ pub enum ArgumentParent<'a> { ScalarField(ScalarFieldPath<'a>), ConstantObject(ConstantObjectPath<'a>), Directive(DirectivePath<'a>), + FragmentSpread(FragmentSpreadPath<'a>), } impl<'a> ResolvePosition<'a> for Argument { @@ -861,7 +944,7 @@ pub enum ConstantValueParent<'a> { DefaultValue(DefaultValuePath<'a>), ConstantList(ConstantListPath<'a>), ConstantObj(Box>), - ConstantArgValue(ConstantArgPath<'a>), + ConstantArgumentValue(ConstantArgumentPath<'a>), } impl<'a> ResolvePosition<'a> for ConstantValue { type Parent = ConstantValueParent<'a>; @@ -936,7 +1019,10 @@ impl<'a> ResolvePosition<'a> for List { fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { for arg in &self.items { if arg.contains(position) { - return arg.resolve(self.path(parent), position); + return arg.resolve( + ConstantArgumentParent::ConstantObj(self.path(parent)), + position, + ); } } ResolutionPath::ConstantObj(self.path(parent)) @@ -946,23 +1032,29 @@ impl<'a> ResolvePosition<'a> for List { } } -pub type ConstantArgPath<'a> = Path<&'a ConstantArgument, ConstantObjPath<'a>>; +pub type ConstantArgumentPath<'a> = Path<&'a ConstantArgument, ConstantArgumentParent<'a>>; +#[derive(Debug)] +pub enum ConstantArgumentParent<'a> { + ConstantObj(ConstantObjPath<'a>), + ConstantDirective(ConstantDirectivePath<'a>), +} impl<'a> ResolvePosition<'a> for ConstantArgument { - type Parent = ConstantObjPath<'a>; + type Parent = ConstantArgumentParent<'a>; fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { if self.name.contains(position) { - return self - .name - .resolve(IdentParent::ConstantArgKey(self.path(parent)), position); + return self.name.resolve( + IdentParent::ConstantArgumentKey(self.path(parent)), + position, + ); } if self.value.contains(position) { return self.value.resolve( - ConstantValueParent::ConstantArgValue(self.path(parent)), + ConstantValueParent::ConstantArgumentValue(self.path(parent)), position, ); } - ResolutionPath::ConstantArg(self.path(parent)) + ResolutionPath::ConstantArgument(self.path(parent)) } fn contains(&'a self, position: Span) -> bool { self.span.contains(position) @@ -1025,5 +1117,985 @@ impl<'a> ResolvePosition<'a> for List { } } +pub type SchemaDocumentPath<'a> = Path<&'a SchemaDocument, ()>; + +impl<'a> ResolvePosition<'a> for SchemaDocument { + type Parent = (); + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + for definition in &self.definitions { + if definition.contains(position) { + return definition.resolve(self.path(parent), position); + } + } + + // We didn't find the position in the definitions + ResolutionPath::SchemaDocument(self) + } + + fn contains(&'a self, position: Span) -> bool { + self.location.span().contains(position) + } +} + +pub type TypeSystemDefinitionPath<'a> = + Path<&'a TypeSystemDefinition, TypeSystemDefinitionParent<'a>>; +pub type TypeSystemDefinitionParent<'a> = SchemaDocumentPath<'a>; + +impl<'a> ResolvePosition<'a> for TypeSystemDefinition { + type Parent = TypeSystemDefinitionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + match self { + TypeSystemDefinition::DirectiveDefinition(directive) => { + directive.resolve(self.path(parent), position) + } + TypeSystemDefinition::UnionTypeDefinition(union) => { + union.resolve(self.path(parent), position) + } + TypeSystemDefinition::UnionTypeExtension(union_ext) => { + union_ext.resolve(self.path(parent), position) + } + TypeSystemDefinition::InterfaceTypeDefinition(interface) => { + interface.resolve(self.path(parent), position) + } + TypeSystemDefinition::InterfaceTypeExtension(interface_ext) => { + interface_ext.resolve(self.path(parent), position) + } + TypeSystemDefinition::ObjectTypeDefinition(object) => { + object.resolve(self.path(parent), position) + } + TypeSystemDefinition::ObjectTypeExtension(object_ext) => { + object_ext.resolve(self.path(parent), position) + } + TypeSystemDefinition::InputObjectTypeDefinition(input_object) => { + input_object.resolve(self.path(parent), position) + } + TypeSystemDefinition::InputObjectTypeExtension(input_object_ext) => { + input_object_ext.resolve(self.path(parent), position) + } + TypeSystemDefinition::EnumTypeDefinition(enum_type) => { + enum_type.resolve(self.path(parent), position) + } + TypeSystemDefinition::EnumTypeExtension(enum_type_ext) => { + enum_type_ext.resolve(self.path(parent), position) + } + TypeSystemDefinition::SchemaDefinition(schema) => { + schema.resolve(self.path(parent), position) + } + TypeSystemDefinition::SchemaExtension(schema_ext) => { + schema_ext.resolve(self.path(parent), position) + } + TypeSystemDefinition::ScalarTypeDefinition(scalar) => { + scalar.resolve(self.path(parent), position) + } + TypeSystemDefinition::ScalarTypeExtension(scalar_ext) => { + scalar_ext.resolve(self.path(parent), position) + } + } + } + + fn contains(&'a self, position: Span) -> bool { + match self { + TypeSystemDefinition::DirectiveDefinition(directive) => directive.contains(position), + TypeSystemDefinition::UnionTypeDefinition(union) => union.contains(position), + TypeSystemDefinition::UnionTypeExtension(union_ext) => union_ext.contains(position), + TypeSystemDefinition::InterfaceTypeDefinition(interface) => { + interface.contains(position) + } + TypeSystemDefinition::InterfaceTypeExtension(interface_ext) => { + interface_ext.contains(position) + } + TypeSystemDefinition::ObjectTypeDefinition(object) => object.contains(position), + TypeSystemDefinition::ObjectTypeExtension(object_ext) => object_ext.contains(position), + TypeSystemDefinition::InputObjectTypeDefinition(input_object) => { + input_object.contains(position) + } + TypeSystemDefinition::InputObjectTypeExtension(input_object_ext) => { + input_object_ext.contains(position) + } + TypeSystemDefinition::EnumTypeDefinition(enum_type) => enum_type.contains(position), + TypeSystemDefinition::EnumTypeExtension(enum_type_ext) => { + enum_type_ext.contains(position) + } + TypeSystemDefinition::SchemaDefinition(schema) => schema.contains(position), + TypeSystemDefinition::SchemaExtension(schema_ext) => schema_ext.contains(position), + TypeSystemDefinition::ScalarTypeDefinition(scalar) => scalar.contains(position), + TypeSystemDefinition::ScalarTypeExtension(scalar_ext) => scalar_ext.contains(position), + } + } +} + +pub type DirectiveDefinitionPath<'a> = Path<&'a DirectiveDefinition, DirectiveDefinitionParent<'a>>; +pub type DirectiveDefinitionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for DirectiveDefinition { + type Parent = DirectiveDefinitionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::DirectiveDefinitionName(self.path(parent)), + position, + ); + } + + if let Some(arguments) = &self.arguments { + for argument in &arguments.items { + if argument.contains(position) { + return argument.resolve( + InputValueDefinitionParent::DirectiveDefinition(self.path(parent)), + position, + ); + } + } + } + + ResolutionPath::DirectiveDefinition(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +#[derive(Debug)] +pub enum UnionTypeMemberParent<'a> { + UnionTypeDefinition(UnionTypeDefinitionPath<'a>), + UnionTypeExtension(UnionTypeExtensionPath<'a>), +} + +pub type UnionTypeDefinitionPath<'a> = Path<&'a UnionTypeDefinition, UnionTypeDefinitionParent<'a>>; +pub type UnionTypeDefinitionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for UnionTypeDefinition { + type Parent = UnionTypeDefinitionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::UnionTypeDefinitionName(self.path(parent)), + position, + ); + } + + for member in &self.members { + if member.contains(position) { + return member.resolve( + IdentParent::UnionTypeMemberType(UnionTypeMemberParent::UnionTypeDefinition( + self.path(parent), + )), + position, + ); + } + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::UnionTypeDefinition(self.path(parent)), + position, + ); + } + } + + ResolutionPath::UnionTypeDefinition(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type UnionTypeExtensionPath<'a> = Path<&'a UnionTypeExtension, UnionTypeExtensionParent<'a>>; +pub type UnionTypeExtensionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for UnionTypeExtension { + type Parent = UnionTypeExtensionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::UnionTypeExtensionName(self.path(parent)), + position, + ); + } + + for member in &self.members { + if member.contains(position) { + return member.resolve( + IdentParent::UnionTypeMemberType(UnionTypeMemberParent::UnionTypeExtension( + self.path(parent), + )), + position, + ); + } + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::UnionTypeExtension(self.path(parent)), + position, + ); + } + } + + ResolutionPath::UnionTypeExtension(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +#[derive(Debug)] +pub enum ImplementedInterfaceParent<'a> { + ObjectTypeDefinition(ObjectTypeDefinitionPath<'a>), + ObjectTypeExtension(ObjectTypeExtensionPath<'a>), + InterfaceTypeDefinition(InterfaceTypeDefinitionPath<'a>), + InterfaceTypeExtension(InterfaceTypeExtensionPath<'a>), +} + +pub type InterfaceTypeDefinitionPath<'a> = + Path<&'a InterfaceTypeDefinition, InterfaceTypeDefinitionParent<'a>>; +pub type InterfaceTypeDefinitionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for InterfaceTypeDefinition { + type Parent = InterfaceTypeDefinitionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::InterfaceTypeDefinitionName(self.path(parent)), + position, + ); + } + + for interface in &self.interfaces { + if interface.contains(position) { + return interface.resolve( + IdentParent::ImplementedInterfaceName( + ImplementedInterfaceParent::InterfaceTypeDefinition(self.path(parent)), + ), + position, + ); + } + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::InterfaceTypeDefinition(self.path(parent)), + position, + ); + } + } + + if let Some(field_list) = &self.fields { + for field in &field_list.items { + if field.contains(position) { + return field.resolve( + FieldDefinitionParent::InterfaceTypeDefinition(self.path(parent)), + position, + ); + } + } + } + + ResolutionPath::InterfaceTypeDefinition(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type InterfaceTypeExtensionPath<'a> = + Path<&'a InterfaceTypeExtension, InterfaceTypeExtensionParent<'a>>; +pub type InterfaceTypeExtensionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for InterfaceTypeExtension { + type Parent = InterfaceTypeExtensionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::InterfaceTypeExtensionName(self.path(parent)), + position, + ); + } + + for interface in &self.interfaces { + if interface.contains(position) { + return interface.resolve( + IdentParent::ImplementedInterfaceName( + ImplementedInterfaceParent::InterfaceTypeExtension(self.path(parent)), + ), + position, + ); + } + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::InterfaceTypeExtension(self.path(parent)), + position, + ); + } + } + + if let Some(field_list) = &self.fields { + for field in &field_list.items { + if field.contains(position) { + return field.resolve( + FieldDefinitionParent::InterfaceTypeExtension(self.path(parent)), + position, + ); + } + } + } + + ResolutionPath::InterfaceTypeExtension(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type ObjectTypeDefinitionPath<'a> = + Path<&'a ObjectTypeDefinition, ObjectTypeDefinitionParent<'a>>; +pub type ObjectTypeDefinitionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for ObjectTypeDefinition { + type Parent = ObjectTypeDefinitionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::ObjectTypeDefinitionName(self.path(parent)), + position, + ); + } + + for interface in &self.interfaces { + if interface.contains(position) { + return interface.resolve( + IdentParent::ImplementedInterfaceName( + ImplementedInterfaceParent::ObjectTypeDefinition(self.path(parent)), + ), + position, + ); + } + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::ObjectTypeDefinition(self.path(parent)), + position, + ); + } + } + + if let Some(field_list) = &self.fields { + for field in &field_list.items { + if field.contains(position) { + return field.resolve( + FieldDefinitionParent::ObjectTypeDefinition(self.path(parent)), + position, + ); + } + } + } + + ResolutionPath::ObjectTypeDefinition(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +type ObjectTypeExtensionPath<'a> = Path<&'a ObjectTypeExtension, ObjectTypeExtensionParent<'a>>; +pub type ObjectTypeExtensionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for ObjectTypeExtension { + type Parent = ObjectTypeExtensionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::ObjectTypeExtensionName(self.path(parent)), + position, + ); + } + + for interface in &self.interfaces { + if interface.contains(position) { + return interface.resolve( + IdentParent::ImplementedInterfaceName( + ImplementedInterfaceParent::ObjectTypeExtension(self.path(parent)), + ), + position, + ); + } + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::ObjectTypeExtension(self.path(parent)), + position, + ); + } + } + + if let Some(field_list) = &self.fields { + for field in &field_list.items { + if field.contains(position) { + return field.resolve( + FieldDefinitionParent::ObjectTypeExtension(self.path(parent)), + position, + ); + } + } + } + + ResolutionPath::ObjectTypeExtension(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type FieldDefinitionPath<'a> = Path<&'a FieldDefinition, FieldDefinitionParent<'a>>; +#[derive(Debug)] +pub enum FieldDefinitionParent<'a> { + ObjectTypeDefinition(ObjectTypeDefinitionPath<'a>), + ObjectTypeExtension(ObjectTypeExtensionPath<'a>), + InterfaceTypeDefinition(InterfaceTypeDefinitionPath<'a>), + InterfaceTypeExtension(InterfaceTypeExtensionPath<'a>), +} + +impl<'a> ResolvePosition<'a> for FieldDefinition { + type Parent = FieldDefinitionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::FieldDefinitionName(self.path(parent)), + position, + ); + } + + if let Some(arguments) = &self.arguments { + for argument in &arguments.items { + if argument.contains(position) { + return argument.resolve( + InputValueDefinitionParent::FieldDefinition(self.path(parent)), + position, + ); + } + } + } + + if self.type_.contains(position) { + return self.type_.resolve( + TypeAnnotationParent::FieldDefinition(self.path(parent)), + position, + ); + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::FieldDefinition(self.path(parent)), + position, + ); + } + } + + ResolutionPath::FieldDefinition(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type InputObjectTypeDefinitionPath<'a> = + Path<&'a InputObjectTypeDefinition, InputObjectTypeDefinitionParent<'a>>; +pub type InputObjectTypeDefinitionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for InputObjectTypeDefinition { + type Parent = InputObjectTypeDefinitionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::InputObjectTypeDefinitionName(self.path(parent)), + position, + ); + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::InputObjectTypeDefinition(self.path(parent)), + position, + ); + } + } + + if let Some(field_list) = &self.fields { + for field in &field_list.items { + if field.contains(position) { + return field.resolve( + InputValueDefinitionParent::InputObjectTypeDefinition(self.path(parent)), + position, + ); + } + } + } + + ResolutionPath::InputObjectTypeDefinition(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type InputObjectTypeExtensionPath<'a> = + Path<&'a InputObjectTypeExtension, InputObjectTypeExtensionParent<'a>>; +pub type InputObjectTypeExtensionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for InputObjectTypeExtension { + type Parent = InputObjectTypeExtensionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::InputObjectTypeExtensionName(self.path(parent)), + position, + ); + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::InputObjectTypeExtension(self.path(parent)), + position, + ); + } + } + + if let Some(field_list) = &self.fields { + for field in &field_list.items { + if field.contains(position) { + return field.resolve( + InputValueDefinitionParent::InputObjectTypeExtension(self.path(parent)), + position, + ); + } + } + } + + ResolutionPath::InputObjectTypeExtension(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type InputValueDefinitionPath<'a> = + Path<&'a InputValueDefinition, InputValueDefinitionParent<'a>>; +#[derive(Debug)] +pub enum InputValueDefinitionParent<'a> { + DirectiveDefinition(DirectiveDefinitionPath<'a>), + InputObjectTypeDefinition(InputObjectTypeDefinitionPath<'a>), + InputObjectTypeExtension(InputObjectTypeExtensionPath<'a>), + FieldDefinition(FieldDefinitionPath<'a>), +} + +impl<'a> ResolvePosition<'a> for InputValueDefinition { + type Parent = InputValueDefinitionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::InputValueDefinitionName(self.path(parent)), + position, + ); + } + + if self.type_.contains(position) { + return self.type_.resolve( + TypeAnnotationParent::InputValueDefinition(self.path(parent)), + position, + ); + } + + if let Some(default_value) = &self.default_value { + if default_value.contains(position) { + return default_value.resolve( + DefaultValueParent::InputValueDefinition(self.path(parent)), + position, + ); + } + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::InputValueDefinition(self.path(parent)), + position, + ); + } + } + + ResolutionPath::InputValueDefinition(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type EnumTypeDefinitionPath<'a> = Path<&'a EnumTypeDefinition, EnumTypeDefinitionParent<'a>>; +pub type EnumTypeDefinitionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for EnumTypeDefinition { + type Parent = EnumTypeDefinitionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::EnumTypeDefinitionName(self.path(parent)), + position, + ); + } + + if let Some(values) = &self.values { + for value in &values.items { + if value.contains(position) { + return value.resolve( + EnumValueDefinitionParent::EnumTypeDefinition(self.path(parent)), + position, + ); + } + } + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::EnumTypeDefinition(self.path(parent)), + position, + ); + } + } + + ResolutionPath::EnumTypeDefinition(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type EnumTypeExtensionPath<'a> = Path<&'a EnumTypeExtension, EnumTypeExtensionParent<'a>>; +pub type EnumTypeExtensionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for EnumTypeExtension { + type Parent = EnumTypeExtensionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::EnumTypeExtensionName(self.path(parent)), + position, + ); + } + + if let Some(values) = &self.values { + for value in &values.items { + if value.contains(position) { + return value.resolve( + EnumValueDefinitionParent::EnumTypeExtension(self.path(parent)), + position, + ); + } + } + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::EnumTypeExtension(self.path(parent)), + position, + ); + } + } + + ResolutionPath::EnumTypeExtension(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type EnumValueDefinitionPath<'a> = Path<&'a EnumValueDefinition, EnumValueDefinitionParent<'a>>; +#[derive(Debug)] +pub enum EnumValueDefinitionParent<'a> { + EnumTypeDefinition(EnumTypeDefinitionPath<'a>), + EnumTypeExtension(EnumTypeExtensionPath<'a>), +} + +impl<'a> ResolvePosition<'a> for EnumValueDefinition { + type Parent = EnumValueDefinitionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::EnumValueDefinitionName(self.path(parent)), + position, + ); + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::EnumValueDefinition(self.path(parent)), + position, + ); + } + } + + ResolutionPath::EnumValueDefinition(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type SchemaDefinitionPath<'a> = Path<&'a SchemaDefinition, SchemaDefinitionParent<'a>>; +pub type SchemaDefinitionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for SchemaDefinition { + type Parent = SchemaDefinitionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + for operation_type in &self.operation_types.items { + if operation_type.contains(position) { + return operation_type.resolve( + OperationTypeDefinitionParent::SchemaDefinition(self.path(parent)), + position, + ); + } + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::SchemaDefinition(self.path(parent)), + position, + ); + } + } + + ResolutionPath::SchemaDefinition(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type SchemaExtensionPath<'a> = Path<&'a SchemaExtension, SchemaExtensionParent<'a>>; +pub type SchemaExtensionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for SchemaExtension { + type Parent = SchemaExtensionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if let Some(operation_types) = &self.operation_types { + for operation_type in &operation_types.items { + if operation_type.contains(position) { + return operation_type.resolve( + OperationTypeDefinitionParent::SchemaExtension(self.path(parent)), + position, + ); + } + } + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::SchemaExtension(self.path(parent)), + position, + ); + } + } + + ResolutionPath::SchemaExtension(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type OperationTypeDefinitionPath<'a> = + Path<&'a OperationTypeDefinition, OperationTypeDefinitionParent<'a>>; +#[derive(Debug)] +pub enum OperationTypeDefinitionParent<'a> { + SchemaDefinition(SchemaDefinitionPath<'a>), + SchemaExtension(SchemaExtensionPath<'a>), +} + +impl<'a> ResolvePosition<'a> for OperationTypeDefinition { + type Parent = OperationTypeDefinitionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.type_.contains(position) { + return self.type_.resolve( + IdentParent::OperationTypeDefinitionType(self.path(parent)), + position, + ); + } + + ResolutionPath::OperationTypeDefinition(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type ScalarTypeDefinitionPath<'a> = + Path<&'a ScalarTypeDefinition, ScalarTypeDefinitionParent<'a>>; +pub type ScalarTypeDefinitionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for ScalarTypeDefinition { + type Parent = ScalarTypeDefinitionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::ScalarTypeDefinitionName(self.path(parent)), + position, + ); + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::ScalarTypeDefinition(self.path(parent)), + position, + ); + } + } + + ResolutionPath::ScalarTypeDefinition(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type ScalarTypeExtensionPath<'a> = Path<&'a ScalarTypeExtension, ScalarTypeExtensionParent<'a>>; +pub type ScalarTypeExtensionParent<'a> = TypeSystemDefinitionPath<'a>; + +impl<'a> ResolvePosition<'a> for ScalarTypeExtension { + type Parent = ScalarTypeExtensionParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + if self.name.contains(position) { + return self.name.resolve( + IdentParent::ScalarTypeExtensionName(self.path(parent)), + position, + ); + } + + for directive in self.directives.iter() { + if directive.contains(position) { + return directive.resolve( + ConstantDirectiveParent::ScalarTypeExtension(self.path(parent)), + position, + ); + } + } + + ResolutionPath::ScalarTypeExtension(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + +pub type ConstantDirectivePath<'a> = Path<&'a ConstantDirective, ConstantDirectiveParent<'a>>; +#[derive(Debug)] +pub enum ConstantDirectiveParent<'a> { + UnionTypeDefinition(UnionTypeDefinitionPath<'a>), + UnionTypeExtension(UnionTypeExtensionPath<'a>), + InterfaceTypeDefinition(InterfaceTypeDefinitionPath<'a>), + InterfaceTypeExtension(InterfaceTypeExtensionPath<'a>), + ObjectTypeDefinition(ObjectTypeDefinitionPath<'a>), + ObjectTypeExtension(ObjectTypeExtensionPath<'a>), + InputObjectTypeDefinition(InputObjectTypeDefinitionPath<'a>), + InputObjectTypeExtension(InputObjectTypeExtensionPath<'a>), + EnumTypeDefinition(EnumTypeDefinitionPath<'a>), + EnumTypeExtension(EnumTypeExtensionPath<'a>), + EnumValueDefinition(EnumValueDefinitionPath<'a>), + ScalarTypeDefinition(ScalarTypeDefinitionPath<'a>), + ScalarTypeExtension(ScalarTypeExtensionPath<'a>), + SchemaDefinition(SchemaDefinitionPath<'a>), + SchemaExtension(SchemaExtensionPath<'a>), + FieldDefinition(FieldDefinitionPath<'a>), + InputValueDefinition(InputValueDefinitionPath<'a>), +} + +impl<'a> ResolvePosition<'a> for ConstantDirective { + type Parent = ConstantDirectiveParent<'a>; + + fn resolve(&'a self, parent: Self::Parent, position: Span) -> ResolutionPath<'a> { + // Note: We don't currently handle the `@` explicitly. + if self.name.contains(position) { + return self.name.resolve( + IdentParent::ConstantDirectiveName(self.path(parent)), + position, + ); + } + + if let Some(arguments) = &self.arguments { + for argument in &arguments.items { + if argument.contains(position) { + return argument.resolve( + ConstantArgumentParent::ConstantDirective(self.path(parent)), + position, + ); + } + } + } + + ResolutionPath::ConstantDirective(self.path(parent)) + } + + fn contains(&'a self, position: Span) -> bool { + self.span.contains(position) + } +} + #[cfg(test)] mod test; diff --git a/compiler/crates/resolution-path/src/test.rs b/compiler/crates/resolution-path/src/test.rs index a15be9a37bd77..5b45247ae41d1 100644 --- a/compiler/crates/resolution-path/src/test.rs +++ b/compiler/crates/resolution-path/src/test.rs @@ -8,6 +8,7 @@ use common::SourceLocationKey; use common::Span; use graphql_syntax::parse_executable_with_features; +use graphql_syntax::parse_schema_document; use graphql_syntax::FragmentArgumentSyntaxKind; use graphql_syntax::ParserFeatures; @@ -37,8 +38,49 @@ pub(super) fn test_resolution(source: &str, sub_str: &str, cb: impl Fn(&Resoluti cb(&resolved); } +pub(super) fn test_schema_resolution( + source: &str, + sub_str: &str, + cb: impl Fn(&ResolutionPath<'_>), +) { + let document = + parse_schema_document(source, SourceLocationKey::standalone("/test/file")).unwrap(); + + let pos = source.find(sub_str).unwrap() as u32; + + // Select the `uri` field + let position_span = Span { + start: pos, + end: pos, + }; + + let resolved = document.resolve((), position_span); + + cb(&resolved); +} + +#[test] +fn operation_definition_operation() { + let source = r#" + query Foo { + me { + id + } + } + "#; + test_resolution(source, "query", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Operation(OperationPath { + inner: (_, OperationKind::Query), + parent: _, + }) + ); + }) +} + #[test] -fn operation_definition_type_condition() { +fn operation_definition_name() { let source = r#" query Foo { me { @@ -58,25 +100,71 @@ fn operation_definition_type_condition() { } #[test] -fn operation_definition_operation() { +fn operation_definition_variable_definition_name() { let source = r#" - query Foo { + query Foo($bar: ID!) { me { id } } "#; - test_resolution(source, "query", |resolved| { + test_resolution(source, "bar", |resolved| { assert_matches!( resolved, - ResolutionPath::Operation(OperationPath { - inner: (_, OperationKind::Query), - parent: _, + ResolutionPath::VariableIdentifier(VariableIdentifierPath { + inner: _, + parent: VariableIdentifierParent::VariableDefinition(VariableDefinitionPath { + inner: _, + parent: VariableDefinitionListPath { + inner: _, + parent: VariableDefinitionListParent::OperationDefinition(_), + }, + }), }) ); }) } +#[test] +fn operation_definition_variable_definition_type() { + let source = r#" + query Foo($bar: ID!) { + me { + id + } + } + "#; + test_resolution(source, "ID!", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::NamedTypeAnnotation(NamedTypeAnnotationPath { + inner: _, + parent: TypeAnnotationPath { + inner: _, + parent: TypeAnnotationParent::NonNullTypeAnnotation(_), + } + }), + }) + ) + }) +} + +#[test] +fn operation_definition_variable_definition_default_value() { + let source = r#" + query Foo($localId: ID! = "1") { + me { + id + } + } + "#; + test_resolution(source, r#""1""#, |resolved| { + assert_matches!(resolved, ResolutionPath::ConstantString(_)); + }) +} + #[test] fn linked_field_name() { let source = r#" @@ -248,6 +336,27 @@ fn fragment_spread_name() { }) } +#[test] +fn fragment_spread_argument_name() { + let source = r#" + fragment Foo on Node { + ...someFragment(someArg: 5) + } + "#; + test_resolution(source, "someArg", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ArgumentName(ArgumentPath { + inner: _, + parent: ArgumentParent::FragmentSpread(_), + }), + }) + ); + }) +} + #[test] fn directive_name() { let source = r#" @@ -313,7 +422,7 @@ fn list_literal() { inner: _, parent: ConstantValuePath { inner: _, - parent: ConstantValueParent::ConstantArgValue(_), + parent: ConstantValueParent::ConstantArgumentValue(_), }, }) ); @@ -321,7 +430,7 @@ fn list_literal() { } #[test] -fn fragment_argument_name() { +fn fragment_argument_definition_name() { let source = r#" fragment Foo($localId: ID!) on User { id @@ -345,7 +454,7 @@ fn fragment_argument_name() { } #[test] -fn fragment_argument_type() { +fn fragment_argument_definition_type() { let source = r#" fragment Foo($localId: ID!) on User { id @@ -369,7 +478,7 @@ fn fragment_argument_type() { } #[test] -fn fragment_argument_default_value() { +fn fragment_argument_definition_default_value() { let source = r#" fragment Foo($localId: ID! = "1") on User { id @@ -379,14 +488,1201 @@ fn fragment_argument_default_value() { assert_matches!(resolved, ResolutionPath::ConstantString(_)); }) } + #[test] -fn fragment_argument_directive() { +fn fragment_argument_definition_directive() { let source = r#" - fragment Foo($localId: ID! = "1") on User { + fragment Foo($localId: ID! = "1" @bar) on User { id } "#; - test_resolution(source, r#""1""#, |resolved| { - assert_matches!(resolved, ResolutionPath::ConstantString(_)); + test_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::DirectiveName(_) + }) + ); + }) +} + +// ## Directives + +#[test] +fn directive_definition_name() { + let source = r#" + directive @foo on FIELD + "#; + test_schema_resolution(source, "foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::DirectiveDefinitionName(_), + }) + ); + }) +} + +#[test] +fn directive_definition_argument_name() { + let source = r#" + directive @foo(bar: Baz) on FIELD + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::InputValueDefinitionName(InputValueDefinitionPath { + inner: _, + parent: InputValueDefinitionParent::DirectiveDefinition(_), + }), + }) + ); + }) +} + +// ## Union Types + +#[test] +fn union_type_definition_name() { + let source = r#" + union Foo = Bar | Baz + "#; + test_schema_resolution(source, "Foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::UnionTypeDefinitionName(_), + }) + ); + }) +} + +#[test] +fn union_type_definition_member_name() { + let source = r#" + union Foo = Bar | Baz + "#; + test_schema_resolution(source, "Bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::UnionTypeMemberType( + UnionTypeMemberParent::UnionTypeDefinition(_), + ) + }) + ); + }) +} + +#[test] +fn union_type_definition_directive() { + let source = r#" + union Foo @bar = Baz + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::UnionTypeDefinition(_), + }) + }) + ); + }) +} + +// ## Union Type Extensions + +#[test] +fn union_type_extension_name() { + let source = r#" + extend union Foo = Bar | Baz + "#; + test_schema_resolution(source, "Foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::UnionTypeExtensionName(_), + }) + ); + }) +} + +#[test] +fn union_type_extension_member_name() { + let source = r#" + extend union Foo = Bar | Baz + "#; + test_schema_resolution(source, "Bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::UnionTypeMemberType( + UnionTypeMemberParent::UnionTypeExtension(_), + ) + }) + ); + }) +} + +#[test] +fn union_type_extension_directive() { + let source = r#" + extend union Foo @bar = Baz + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::UnionTypeExtension(_), + }) + }) + ); + }) +} + +// ## Interface Types + +#[test] +fn interface_type_definition_name() { + let source = r#" + interface Foo { + bar: Baz + } + "#; + test_schema_resolution(source, "Foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::InterfaceTypeDefinitionName(_), + }) + ); + }) +} + +#[test] +fn interface_type_definition_implements_interface() { + let source = r#" + interface Foo implements Bar { + baz: Qux + } + "#; + test_schema_resolution(source, "Bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ImplementedInterfaceName( + ImplementedInterfaceParent::InterfaceTypeDefinition(_), + ) + }) + ); + }) +} + +#[test] +fn interface_type_definition_field() { + let source = r#" + interface Foo { + bar: Baz + } + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::FieldDefinitionName(FieldDefinitionPath { + inner: _, + parent: FieldDefinitionParent::InterfaceTypeDefinition(_), + }), + }) + ); + }) +} + +#[test] +fn interface_type_definition_directive() { + let source = r#" + interface Foo @bar { + baz: Qux + } + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::InterfaceTypeDefinition(_), + }), + }) + ); + }) +} + +// ## Interface Type Extensions + +#[test] +fn interface_type_extension_name() { + let source = r#" + extend interface Foo { + bar: String + } + "#; + test_schema_resolution(source, "Foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::InterfaceTypeExtensionName(_), + }) + ); + }) +} + +#[test] +fn interface_type_extension_implements_interface() { + let source = r#" + extend interface Foo implements Bar { + baz: String + } + "#; + test_schema_resolution(source, "Bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ImplementedInterfaceName( + ImplementedInterfaceParent::InterfaceTypeExtension(_), + ) + }) + ); + }) +} + +#[test] +fn interface_type_extension_field() { + let source = r#" + extend interface Foo { + bar: String + } + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::FieldDefinitionName(FieldDefinitionPath { + inner: _, + parent: FieldDefinitionParent::InterfaceTypeExtension(_), + }), + }) + ); + }) +} + +#[test] +fn interface_type_extension_directive() { + let source = r#" + extend interface Foo @bar { + baz: Qux + } + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::InterfaceTypeExtension(_), + }), + }) + ); + }) +} + +// ## Object Types + +#[test] +fn object_type_definition_name() { + let source = r#" + type Foo { + bar: Baz + } + "#; + test_schema_resolution(source, "Foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ObjectTypeDefinitionName(_), + }) + ); + }) +} + +#[test] +fn object_type_definition_implements_interface_name() { + let source = r#" + type Foo implements Node { + bar: Baz + } + "#; + test_schema_resolution(source, "Node", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ImplementedInterfaceName( + ImplementedInterfaceParent::ObjectTypeDefinition(_) + ) + }) + ); + }) +} + +#[test] +fn object_type_definition_field_name() { + let source = r#" + type Foo { + bar: Baz + } + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::FieldDefinitionName(FieldDefinitionPath { + inner: _, + parent: FieldDefinitionParent::ObjectTypeDefinition(_), + }), + }) + ); + }) +} + +#[test] +fn object_type_definition_directive() { + let source = r#" + type Foo @bar { + baz: Qux + } + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::ObjectTypeDefinition(_), + }), + }) + ); + }) +} + +// ## Object Type Extensions + +#[test] +fn object_type_extension_name() { + let source = r#" + extend type Foo { + bar: Baz + } + "#; + test_schema_resolution(source, "Foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ObjectTypeExtensionName(_), + }) + ); + }) +} + +#[test] +fn object_type_extension_implements_interface_name() { + let source = r#" + extend type Foo implements Node { + bar: Baz + } + "#; + test_schema_resolution(source, "Node", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ImplementedInterfaceName( + ImplementedInterfaceParent::ObjectTypeExtension(_) + ) + }) + ); + }) +} + +#[test] +fn object_type_extension_field_name() { + let source = r#" + extend type Foo { + bar: Baz + } + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::FieldDefinitionName(FieldDefinitionPath { + inner: _, + parent: FieldDefinitionParent::ObjectTypeExtension(_), + }), + }) + ); + }) +} + +#[test] +fn object_type_extension_directive() { + let source = r#" + extend type Foo @bar { + baz: Qux + } + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::ObjectTypeExtension(_), + }), + }) + ); + }) +} + +// ## Input Object Types + +#[test] +fn input_object_type_definition_name() { + let source = r#" + input Foo { + bar: Baz + } + "#; + test_schema_resolution(source, "Foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::InputObjectTypeDefinitionName(_), + }) + ); + }) +} + +#[test] +fn input_object_type_definition_field_name() { + let source = r#" + input Foo { + bar: Baz + } + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::InputValueDefinitionName(InputValueDefinitionPath { + inner: _, + parent: InputValueDefinitionParent::InputObjectTypeDefinition(_), + }), + }) + ); + }) +} + +#[test] +fn input_object_type_definition_directive() { + let source = r#" + input Foo @bar { + baz: Qux + } + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::InputObjectTypeDefinition(_), + }), + }) + ); + }) +} + +// ## Input Object Type Extensions + +#[test] +fn input_object_type_extension_name() { + let source = r#" + extend input Foo { + bar: Baz + } + "#; + test_schema_resolution(source, "Foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::InputObjectTypeExtensionName(_), + }) + ); + }) +} + +#[test] +fn input_object_type_extension_field_name() { + let source = r#" + extend input Foo { + bar: Baz + } + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::InputValueDefinitionName(InputValueDefinitionPath { + inner: _, + parent: InputValueDefinitionParent::InputObjectTypeExtension(_), + }), + }) + ); + }) +} + +#[test] +fn input_object_type_extension_directive() { + let source = r#" + extend input Foo @bar { + baz: Qux + } + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::InputObjectTypeExtension(_), + }), + }) + ); + }) +} + +// ## Enum Types + +#[test] +fn enum_definition_name() { + let source = r#" + enum Foo { + BAR + BAZ + } + "#; + test_schema_resolution(source, "Foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::EnumTypeDefinitionName(_), + }) + ); + }) +} + +#[test] +fn enum_definition_value() { + let source = r#" + enum Foo { + BAZ + } + "#; + test_schema_resolution(source, "BAZ", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::EnumValueDefinitionName(EnumValueDefinitionPath { + inner: _, + parent: EnumValueDefinitionParent::EnumTypeDefinition(_), + }), + }) + ); + }) +} + +#[test] +fn enum_definition_directive() { + let source = r#" + enum Foo @bar { + BAZ + } + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::EnumTypeDefinition(_), + }), + }) + ); + }) +} + +// ## Enum Type Extensions + +#[test] +fn enum_extension_name() { + let source = r#" + extend enum Foo { + BAR + BAZ + } + "#; + test_schema_resolution(source, "Foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::EnumTypeExtensionName(_), + }) + ); + }) +} + +#[test] +fn enum_extension_value() { + let source = r#" + extend enum Foo { + BAZ + } + "#; + test_schema_resolution(source, "BAZ", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::EnumValueDefinitionName(EnumValueDefinitionPath { + inner: _, + parent: EnumValueDefinitionParent::EnumTypeExtension(_), + }), + }) + ); + }) +} + +#[test] +fn enum_extension_directive() { + let source = r#" + extend enum Foo @bar { + BAZ + } + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::EnumTypeExtension(_), + }), + }) + ); + }) +} + +// ## Scalar Types + +#[test] +fn scalar_definition_name() { + let source = r#" + scalar Foo + "#; + test_schema_resolution(source, "Foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ScalarTypeDefinitionName(_), + }) + ); + }) +} + +#[test] +fn scalar_definition_directive() { + let source = r#" + scalar Foo @bar + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::ScalarTypeDefinition(_), + }), + }) + ); + }) +} + +// ## Scalar Type Extensions + +#[test] +fn scalar_extension_name() { + let source = r#" + extend scalar Foo + "#; + test_schema_resolution(source, "Foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ScalarTypeExtensionName(_), + }) + ); + }) +} + +#[test] +fn scalar_extension_directive() { + let source = r#" + extend scalar Foo @bar + "#; + test_schema_resolution(source, "bar", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::ScalarTypeExtension(_), + }), + }) + ); + }) +} + +// ## Schema Definition + +#[test] +fn schema_definition_operation_type_type_name() { + let source = r#" + schema { + query: Foo + } + "#; + test_schema_resolution(source, "Foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::OperationTypeDefinitionType(OperationTypeDefinitionPath { + inner: _, + parent: OperationTypeDefinitionParent::SchemaDefinition(_), + }), + }) + ); + }) +} + +#[test] +fn schema_definition_directive() { + let source = r#" + schema @foo { + query: Bar + } + "#; + test_schema_resolution(source, "foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::SchemaDefinition(_), + }), + }) + ); + }) +} + +// ## Schema Extension + +#[test] +fn schema_extension_operation_type_type_name() { + let source = r#" + extend schema { + query: Foo + } + "#; + test_schema_resolution(source, "Foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::OperationTypeDefinitionType(OperationTypeDefinitionPath { + inner: _, + parent: OperationTypeDefinitionParent::SchemaExtension(_), + }), + }) + ); + }) +} + +#[test] +fn schema_extension_directive() { + let source = r#" + extend schema @foo { + query: Bar + } + "#; + test_schema_resolution(source, "foo", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::SchemaExtension(_), + }), + }) + ); + }) +} + +// ## Field Definition + +#[test] +fn field_definition_named_type() { + let source = r#" + type Foo { + bar: Baz + } + "#; + test_schema_resolution(source, "Baz", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::NamedTypeAnnotation(_), + }) + ); + }) +} + +#[test] +fn field_definition_non_null_type() { + let source = r#" + type Foo { + bar: Baz! + } + "#; + test_schema_resolution(source, "Baz", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::NamedTypeAnnotation(NamedTypeAnnotationPath { + inner: _, + parent: TypeAnnotationPath { + inner: _, + parent: TypeAnnotationParent::NonNullTypeAnnotation(_), + } + }), + }) + ); + }) +} + +#[test] +fn field_definition_list_type() { + let source = r#" + type Foo { + bar: [Baz] + } + "#; + test_schema_resolution(source, "Baz", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::NamedTypeAnnotation(NamedTypeAnnotationPath { + inner: _, + parent: TypeAnnotationPath { + inner: _, + parent: TypeAnnotationParent::ListTypeAnnotation(_), + } + }), + }) + ); + }) +} + +#[test] +fn field_definition_directive() { + let source = r#" + type Foo { + bar: Baz @qux + } + "#; + test_schema_resolution(source, "qux", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::FieldDefinition(_) + }), + }) + ); + }) +} + +#[test] +fn field_definition_argument() { + let source = r#" + type Foo { + bar(baz: Qux): Quux + } + "#; + test_schema_resolution(source, "baz", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::InputValueDefinitionName(InputValueDefinitionPath { + inner: _, + parent: InputValueDefinitionParent::FieldDefinition(_), + }) + }) + ); + }) +} + +// ## Input Value Definition + +#[test] +fn input_value_definition_named_type() { + let source = r#" + input Foo { + bar: Baz + } + "#; + test_schema_resolution(source, "Baz", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::NamedTypeAnnotation(_), + }) + ); + }) +} + +#[test] +fn input_value_definition_non_null_type() { + let source = r#" + input Foo { + bar: Baz! + } + "#; + test_schema_resolution(source, "Baz", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::NamedTypeAnnotation(NamedTypeAnnotationPath { + inner: _, + parent: TypeAnnotationPath { + inner: _, + parent: TypeAnnotationParent::NonNullTypeAnnotation(_), + } + }), + }) + ); + }) +} + +#[test] +fn input_value_definition_list_type() { + let source = r#" + input Foo { + bar: [Baz] + } + "#; + test_schema_resolution(source, "Baz", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::NamedTypeAnnotation(NamedTypeAnnotationPath { + inner: _, + parent: TypeAnnotationPath { + inner: _, + parent: TypeAnnotationParent::ListTypeAnnotation(_), + } + }), + }) + ); + }) +} + +#[test] +fn input_value_definition_default_value() { + let source = r#" + input Foo { + bar: Baz = 5 + } + "#; + test_schema_resolution(source, "5", |resolved| { + assert_matches!( + resolved, + ResolutionPath::ConstantInt(ConstantIntPath { + inner: _, + parent: ConstantValuePath { + inner: _, + parent: ConstantValueParent::DefaultValue(DefaultValuePath { + inner: _, + parent: DefaultValueParent::InputValueDefinition(_), + }) + } + }) + ); + }) +} + +#[test] +fn input_value_definition_directive() { + let source = r#" + input Foo { + bar: Baz @qux + } + "#; + test_schema_resolution(source, "qux", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::InputValueDefinition(_) + }), + }) + ); + }) +} + +// ## Enum Value Definition + +#[test] +fn enum_value_definition_directive() { + let source = r#" + enum Foo { + BAR @baz + } + "#; + test_schema_resolution(source, "baz", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantDirectiveName(ConstantDirectivePath { + inner: _, + parent: ConstantDirectiveParent::EnumValueDefinition(_), + }), + }) + ); + }) +} + +// ## Constant Directive + +#[test] +fn constant_directive_argument_name() { + let source = r#" + enum Foo { + BAR @baz(qux: 5) + } + "#; + test_schema_resolution(source, "qux", |resolved| { + assert_matches!( + resolved, + ResolutionPath::Ident(IdentPath { + inner: _, + parent: IdentParent::ConstantArgumentKey(ConstantArgumentPath { + inner: _, + parent: ConstantArgumentParent::ConstantDirective(_), + }), + }) + ); + }) +} + +#[test] +fn constant_directive_argument_value() { + let source = r#" + enum Foo { + BAR @baz(qux: 5) + } + "#; + test_schema_resolution(source, "5", |resolved| { + assert_matches!( + resolved, + ResolutionPath::ConstantInt(ConstantIntPath { + inner: _, + parent: ConstantValuePath { + inner: _, + parent: ConstantValueParent::ConstantArgumentValue(ConstantArgumentPath { + inner: _, + parent: ConstantArgumentParent::ConstantDirective(_), + }) + } + }) + ); }) } diff --git a/compiler/crates/resolution-path/src/variable_definition_path.rs b/compiler/crates/resolution-path/src/variable_definition_path.rs index 47826eaffa514..9fb4bbdf477f8 100644 --- a/compiler/crates/resolution-path/src/variable_definition_path.rs +++ b/compiler/crates/resolution-path/src/variable_definition_path.rs @@ -8,10 +8,10 @@ use super::*; impl<'a> TypeAnnotationParent<'a> { - pub fn find_variable_definition_path(&'a self) -> &'a VariableDefinitionPath<'a> { + pub fn find_variable_definition_path(&'a self) -> Option<&'a VariableDefinitionPath<'a>> { match self { TypeAnnotationParent::VariableDefinition(variable_definition_path) => { - variable_definition_path + Some(variable_definition_path) } TypeAnnotationParent::ListTypeAnnotation(ListTypeAnnotationPath { inner: _, @@ -25,6 +25,8 @@ impl<'a> TypeAnnotationParent<'a> { }) => non_null_type_annotation_parent .parent .find_variable_definition_path(), + TypeAnnotationParent::FieldDefinition(FieldDefinitionPath { .. }) => None, + TypeAnnotationParent::InputValueDefinition(InputValueDefinitionPath { .. }) => None, } } } @@ -57,8 +59,9 @@ mod tests { }), }) = resolved { - let variable_definition_path = - type_annotation_parent.find_variable_definition_path(); + let variable_definition_path = type_annotation_parent + .find_variable_definition_path() + .unwrap(); assert!(variable_definition_path.inner.name.name.lookup() == "isCool"); } else { panic!( @@ -90,8 +93,9 @@ mod tests { }), }) = resolved { - let variable_definition_path = - type_annotation_parent.find_variable_definition_path(); + let variable_definition_path = type_annotation_parent + .find_variable_definition_path() + .unwrap(); assert!(variable_definition_path.inner.name.name.lookup() == "isCool"); } else { panic!( @@ -123,8 +127,9 @@ mod tests { }), }) = resolved { - let variable_definition_path = - type_annotation_parent.find_variable_definition_path(); + let variable_definition_path = type_annotation_parent + .find_variable_definition_path() + .unwrap(); assert!(variable_definition_path.inner.name.name.lookup() == "isCool"); } else { panic!( @@ -156,8 +161,9 @@ mod tests { }), }) = resolved { - let variable_definition_path = - type_annotation_parent.find_variable_definition_path(); + let variable_definition_path = type_annotation_parent + .find_variable_definition_path() + .unwrap(); assert!(variable_definition_path.inner.name.name.lookup() == "isCool"); } else { panic!( diff --git a/compiler/crates/schema-diff/Cargo.toml b/compiler/crates/schema-diff/Cargo.toml index 47f49a5217c47..921d92ceb5eba 100644 --- a/compiler/crates/schema-diff/Cargo.toml +++ b/compiler/crates/schema-diff/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/schema-diff:[schema-diff,schema-diff_tests] + [package] name = "schema-diff" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -17,4 +19,5 @@ graphql-syntax = { path = "../graphql-syntax" } intern = { path = "../intern" } lazy_static = "1.4" relay-config = { path = "../relay-config" } +rustc-hash = "1.1.0" schema = { path = "../schema" } diff --git a/compiler/crates/schema-diff/src/check.rs b/compiler/crates/schema-diff/src/check.rs index 7448a19dde133..211aad6dbe988 100644 --- a/compiler/crates/schema-diff/src/check.rs +++ b/compiler/crates/schema-diff/src/check.rs @@ -5,11 +5,15 @@ * LICENSE file in the root directory of this source tree. */ +use std::collections::HashSet; +use std::fmt; + use common::InterfaceName; use intern::string_key::Intern; use intern::string_key::StringKey; use lazy_static::lazy_static; use relay_config::SchemaConfig; +use rustc_hash::FxHashSet; use schema::SDLSchema; use schema::Schema; @@ -19,34 +23,90 @@ use crate::definitions::SchemaChange; use crate::definitions::Type; use crate::definitions::TypeChange; -/// Return if the changes are safe to skip full rebuild. +// This enum is very similar to the schema Type enum but uses StringKey instead of id +#[derive(Eq, PartialEq, Hash)] +pub enum IncrementalBuildSchemaChange { + Enum(StringKey), + Object(StringKey), + Union(StringKey), + Interface(StringKey), +} + +impl fmt::Debug for IncrementalBuildSchemaChange { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + IncrementalBuildSchemaChange::Enum(name) => write!(f, "enum({})", name), + IncrementalBuildSchemaChange::Object(name) => write!(f, "object({})", name), + IncrementalBuildSchemaChange::Union(name) => write!(f, "union({})", name), + IncrementalBuildSchemaChange::Interface(name) => write!(f, "interface({})", name), + } + } +} + +#[derive(PartialEq)] +pub enum SchemaChangeSafety { + Unsafe, + SafeWithIncrementalBuild(FxHashSet), + Safe, +} + +impl fmt::Debug for SchemaChangeSafety { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + SchemaChangeSafety::Unsafe => write!(f, "Unsafe"), + SchemaChangeSafety::SafeWithIncrementalBuild(changes) => { + write!(f, "SafeWithIncrementalBuild({:?})", changes) + } + SchemaChangeSafety::Safe => write!(f, "Safe"), + } + } +} + +// Return if the changes are safe to skip full rebuild or need an incremental build. impl SchemaChange { - pub fn is_safe(self: &SchemaChange, schema: &SDLSchema, schema_config: &SchemaConfig) -> bool { + pub fn get_safety( + self: SchemaChange, + schema: &SDLSchema, + schema_config: &SchemaConfig, + ) -> SchemaChangeSafety { + let mut needs_incremental_build: FxHashSet = + HashSet::default(); match self { - SchemaChange::None => true, - SchemaChange::GenericChange => false, - SchemaChange::InvalidSchema => false, + SchemaChange::None => SchemaChangeSafety::Safe, + SchemaChange::GenericChange => SchemaChangeSafety::Unsafe, + SchemaChange::InvalidSchema => SchemaChangeSafety::Unsafe, SchemaChange::DefinitionChanges(changes) => { for change in changes { match change { DefinitionChange::ObjectChanged { + name, changed, added, removed, interfaces_added, interfaces_removed, - .. } => { + let id_name = schema_config.node_interface_id_field; if !interfaces_added.is_empty() || !interfaces_removed.is_empty() - || !is_field_changes_safe( - added, - removed, - changed, - schema_config.node_interface_id_field, - ) + || !is_field_changes_safe(&added, &removed, &changed, id_name) { - return false; + needs_incremental_build + .insert(IncrementalBuildSchemaChange::Object(name)); + + let interfaces_changed: Vec = interfaces_added + .into_iter() + .chain(interfaces_removed.into_iter()) + .collect(); + add_interfaces_for_incremental_build( + schema, + &mut needs_incremental_build, + name, + &interfaces_changed, + &added, + &removed, + id_name, + ); } } DefinitionChange::InterfaceChanged { @@ -56,17 +116,17 @@ impl SchemaChange { .. } => { if !is_field_changes_safe( - added, - removed, - changed, + &added, + &removed, + &changed, schema_config.node_interface_id_field, ) { - return false; + return SchemaChangeSafety::Unsafe; } } DefinitionChange::ObjectAdded(name) => { - if !is_object_add_safe(*name, schema, schema_config) { - return false; + if !is_object_add_safe(name, schema, schema_config) { + return SchemaChangeSafety::Unsafe; } } // safe changes @@ -76,19 +136,29 @@ impl SchemaChange { | DefinitionChange::UnionAdded(_) | DefinitionChange::InputObjectAdded(_) => {} + // safe with incremental build changes + DefinitionChange::EnumChanged { name } + | DefinitionChange::EnumRemoved(name) => { + needs_incremental_build + .insert(IncrementalBuildSchemaChange::Enum(name)); + } + // unsafe changes - DefinitionChange::EnumChanged { .. } - | DefinitionChange::EnumRemoved(_) - | DefinitionChange::UnionChanged { .. } + DefinitionChange::UnionChanged { .. } | DefinitionChange::UnionRemoved(_) | DefinitionChange::ScalarRemoved(_) | DefinitionChange::InputObjectChanged { .. } | DefinitionChange::InputObjectRemoved(_) | DefinitionChange::InterfaceRemoved(_) - | DefinitionChange::ObjectRemoved(_) => return false, + | DefinitionChange::ObjectRemoved(_) => return SchemaChangeSafety::Unsafe, } } - true + if needs_incremental_build.is_empty() { + SchemaChangeSafety::Safe + } else { + add_unions_for_incremental_build(schema, &mut needs_incremental_build); + SchemaChangeSafety::SafeWithIncrementalBuild(needs_incremental_build) + } } } } @@ -166,3 +236,52 @@ fn is_field_changes_safe( } true } + +fn add_interfaces_for_incremental_build( + schema: &SDLSchema, + needs_incremental_build: &mut FxHashSet, + obj_name: StringKey, + interfaces_changed: &[StringKey], + added: &[TypeChange], + removed: &[TypeChange], + id_name: StringKey, +) { + if let Some(schema::Type::Object(id)) = schema.get_type(obj_name) { + let object = schema.object(id); + + let is_id_added = added.iter().any(|field| field.name == id_name); + let is_id_removed = removed.iter().any(|field| field.name == id_name); + let has_id = object + .fields + .iter() + .any(|field_id| schema.field(*field_id).name.item == id_name); + if !is_id_added && !is_id_removed && !has_id { + // no id on this object so no changes to interfaces needed + return; + } + + let object_interfaces = object + .interfaces + .iter() + .map(|id| &schema.interface(*id).name.item.0); + let all_interfaces = interfaces_changed + .iter() + .chain(object_interfaces) + .map(|interface| IncrementalBuildSchemaChange::Interface(*interface)); + needs_incremental_build.extend(all_interfaces); + } +} + +fn add_unions_for_incremental_build( + schema: &SDLSchema, + needs_incremental_build: &mut FxHashSet, +) { + for u in schema.unions() { + if u.members.iter().any(|obj_id| { + let obj_name = schema.object(*obj_id).name.item.0; + needs_incremental_build.contains(&IncrementalBuildSchemaChange::Object(obj_name)) + }) { + needs_incremental_build.insert(IncrementalBuildSchemaChange::Union(u.name.item.0)); + } + } +} diff --git a/compiler/crates/schema-diff/src/definitions.rs b/compiler/crates/schema-diff/src/definitions.rs index f4cb099651e11..e96dd994de2a8 100644 --- a/compiler/crates/schema-diff/src/definitions.rs +++ b/compiler/crates/schema-diff/src/definitions.rs @@ -171,6 +171,17 @@ pub enum SchemaChange { DefinitionChanges(Vec), } +impl fmt::Display for SchemaChange { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + SchemaChange::None => write!(f, "None"), + SchemaChange::GenericChange => write!(f, "GenericChange"), + SchemaChange::InvalidSchema => write!(f, "InvalidSchema"), + SchemaChange::DefinitionChanges(changes) => write!(f, "{:?}", changes), + } + } +} + impl fmt::Debug for SchemaChange { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { diff --git a/compiler/crates/schema-diff/src/lib.rs b/compiler/crates/schema-diff/src/lib.rs index acec865dd0d70..f60e7ecfed8ff 100644 --- a/compiler/crates/schema-diff/src/lib.rs +++ b/compiler/crates/schema-diff/src/lib.rs @@ -9,8 +9,7 @@ #![deny(rust_2018_idioms)] #![deny(clippy::all)] -mod check; - +pub mod check; pub mod definitions; use common::SourceLocationKey; use fnv::FnvHashMap; @@ -527,7 +526,7 @@ fn diff(current: Vec, previous: Vec) } } - for (_, definition) in current_map.drain().into_iter() { + for (_, definition) in current_map.drain() { add_definition(&mut changes, definition); } diff --git a/compiler/crates/schema-diff/tests/diff_schema_tests.rs b/compiler/crates/schema-diff/tests/diff_schema_tests.rs index 33195301adc14..4817bfcd3122e 100644 --- a/compiler/crates/schema-diff/tests/diff_schema_tests.rs +++ b/compiler/crates/schema-diff/tests/diff_schema_tests.rs @@ -6,7 +6,10 @@ */ use intern::string_key::Intern; +use rustc_hash::FxHashSet; use schema::build_schema; +use schema_diff::check::IncrementalBuildSchemaChange; +use schema_diff::check::SchemaChangeSafety; use schema_diff::definitions::*; use schema_diff::*; @@ -16,10 +19,10 @@ fn diff(current: &str, previous: &str) -> SchemaChange { change } -fn is_safe(current: &str, previous: &str) -> bool { +fn get_safety(current: &str, previous: &str) -> SchemaChangeSafety { let schema = build_schema(current).unwrap(); let change = detect_changes(&[current], &[previous]); - change.is_safe(&schema, &Default::default()) + change.get_safety(&schema, &Default::default()) } #[test] @@ -727,8 +730,9 @@ fn test_change_type_enum_union() { #[test] fn test_add_object_without_id() { - assert!(is_safe( - r" + assert_eq!( + get_safety( + r" type A { key: String } @@ -736,18 +740,21 @@ fn test_add_object_without_id() { key: String } #", - r" + r" type A { key: String } #" - )) + ), + SchemaChangeSafety::Safe + ) } #[test] fn test_change_object_interface() { - assert!(!is_safe( - r" + assert_eq!( + get_safety( + r" type A implements B { key: String } @@ -755,7 +762,7 @@ fn test_change_object_interface() { id: ID! } #", - r" + r" type A { key: String } @@ -763,13 +770,18 @@ fn test_change_object_interface() { id: ID! } #" - )) + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Object("A".intern()) + ])) + ) } #[test] fn test_add_object_with_id_node_interface() { - assert!(is_safe( - r" + assert_eq!( + get_safety( + r" type A { key: String } @@ -781,7 +793,7 @@ fn test_add_object_with_id_node_interface() { id: ID! } #", - r" + r" type A { key: String } @@ -789,56 +801,72 @@ fn test_add_object_with_id_node_interface() { id: ID! } #" - )) + ), + SchemaChangeSafety::Safe + ) } #[test] fn test_object_special_field_added() { - assert!(is_safe( - r" + assert_eq!( + get_safety( + r" type A { key: String foo: String # regular field is okay } #", - r" + r" type A { key: String } #", - )); - assert!(!is_safe( - r" + ), + SchemaChangeSafety::Safe + ); + assert_eq!( + get_safety( + r" type A { key: String id: String # id field is breaking } #", - r" + r" type A { key: String } #", - )); - assert!(!is_safe( - r" + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Object("A".intern()) + ])) + ); + assert_eq!( + get_safety( + r" type A { key: String js: String # js field is breaking } #", - r" + r" type A { key: String } #", - )); + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Object("A".intern()) + ])) + ); } #[test] fn test_add_type_with_id_actor_interface() { - assert!(!is_safe( - r" + assert_eq!( + get_safety( + r" type A { key: String } @@ -850,7 +878,7 @@ fn test_add_type_with_id_actor_interface() { name: String } #", - r" + r" type A { key: String } @@ -858,64 +886,80 @@ fn test_add_type_with_id_actor_interface() { name: String } #" - )) + ), + SchemaChangeSafety::Unsafe + ) } #[test] fn test_add_optional_field_arg() { - assert!(is_safe( - r" + assert_eq!( + get_safety( + r" type A { key(a: ID): String } #", - r" + r" type A { key: String } #" - )) + ), + SchemaChangeSafety::Safe + ) } #[test] fn test_add_required_field_arg() { - assert!(!is_safe( - r" + assert_eq!( + get_safety( + r" type A { key(a: ID!): String } #", - r" + r" type A { key: String } #" - )) + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Object("A".intern()) + ])) + ) } #[test] fn test_remove_field_arg() { - assert!(!is_safe( - r" + assert_eq!( + get_safety( + r" type A { key: String } #", - r" + r" type A { key(a: ID): String } #" - )) + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Object("A".intern()) + ])) + ) } #[test] fn test_add_safe_types() { - assert!(is_safe( - r" + assert_eq!( + get_safety( + r" type A { key: String } @@ -935,18 +979,21 @@ fn test_add_safe_types() { } scalar Mark #", - r" + r" type A { key: String } #" - )) + ), + SchemaChangeSafety::Safe + ) } #[test] fn test_unimplemented_changes() { - assert!(!is_safe( - r" + assert_eq!( + get_safety( + r" type A { key: String } @@ -954,12 +1001,380 @@ fn test_unimplemented_changes() { name: String } #", - r" + r" type A { key: String } #" - )) + ), + SchemaChangeSafety::Unsafe + ) +} + +#[test] +fn test_add_enum() { + assert_eq!( + get_safety( + r" + enum A { + OK + MAYBE + NOT_OK + } + enum B { + OTHER + } + #", + r" + enum A { + OK + MAYBE + NOT_OK + } + #", + ), + SchemaChangeSafety::Safe + ); +} + +#[test] +fn test_enums_safe_with_incremental_build_changes() { + // Change enum + assert_eq!( + get_safety( + r" + enum A { + OK + NOT_OK + MAYBE + } + #", + r" + enum A { + OK + MAYBE + NOT_OK + } + #", + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Enum("A".intern()) + ])) + ); + + // Add enum value + assert_eq!( + get_safety( + r" + enum A { + OK + NOT_OK + MAYBE + } + #", + r" + enum A { + OK + NOT_OK + } + #", + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Enum("A".intern()) + ])) + ); + + // Delete enum + assert_eq!( + get_safety( + r" + enum B { + OTHER + } + #", + r" + enum A { + OK + NOT_OK + MAYBE + } + enum B { + OTHER + } + #", + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Enum("A".intern()) + ])) + ); +} + +#[test] +fn test_unions_safe_with_incremental_build_changes() { + // Change object in union + assert_eq!( + get_safety( + r" + union U = A | B + type A { + A1: String + } + type B { + B1: String + } + #", + r" + union U = A | B + type A { + A1: String + } + type B { + B1: String + B2: String + } + #", + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Object("B".intern()), + IncrementalBuildSchemaChange::Union("U".intern()), + ])) + ); + + // Non-union objects don't affect the union + assert_eq!( + get_safety( + r" + union U = A | B + type A { + A1: String + } + type B { + B1: String + } + type C { + C1: String + } + #", + r" + union U = A | B + type A { + A1: String + } + type B { + B1: String + } + type C { + C1: String + C2: String + } + #", + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Object("C".intern()), + ])) + ); +} + +#[test] +fn test_interfaces_safe_with_incremental_build_changes() { + // Changing interfaces on an object without an id doesn't rebuild interfaces + assert_eq!( + get_safety( + r" + interface I { + x: String + } + type A implements I { + A1: String + x: String + } + #", + r" + type A { + A1: String + x: String + } + #", + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Object("A".intern()), + ])) + ); + // Changing interfaces on an object with an id rebuilds the interface + assert_eq!( + get_safety( + r" + interface I { + x: String + } + type A implements I { + id: ID! + x: String + } + #", + r" + interface I { + x: String + } + type A { + id: ID! + x: String + } + #", + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Object("A".intern()), + IncrementalBuildSchemaChange::Interface("I".intern()), + ])) + ); + assert_eq!( + get_safety( + r" + interface I { + x: String + } + type A { + id: ID! + x: String + } + #", + r" + interface I { + x: String + } + type A implements I { + id: ID! + x: String + } + #", + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Object("A".intern()), + IncrementalBuildSchemaChange::Interface("I".intern()), + ])) + ); + // Rebuild the correct interfaces + assert_eq!( + get_safety( + r" + interface I { + x: String + } + interface I2 { + y: String + } + interface I3 { + z: String + } + type A implements I & I2 { + id: ID! + x: String + y: String + } + #", + r" + interface I { + x: String + } + interface I2 { + y: String + } + interface I3 { + z: String + } + type A implements I { + id: ID! + x: String + y: String + } + #", + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Object("A".intern()), + IncrementalBuildSchemaChange::Interface("I".intern()), + IncrementalBuildSchemaChange::Interface("I2".intern()), + ])) + ); + // Adding an id to an object with interfaces rebuilds the interface + assert_eq!( + get_safety( + r" + interface I { + x: String + } + type A implements I { + id: ID! + x: String + } + #", + r" + interface I { + x: String + } + type A implements I { + x: String + } + #", + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Object("A".intern()), + IncrementalBuildSchemaChange::Interface("I".intern()), + ])) + ); + // Adding an id and interface together rebuilds the interface + assert_eq!( + get_safety( + r" + interface I { + x: String + } + type A implements I { + id: ID! + x: String + } + #", + r" + interface I { + x: String + } + type A { + x: String + } + #", + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Object("A".intern()), + IncrementalBuildSchemaChange::Interface("I".intern()), + ])) + ); + // Editing an id on an object with an interface rebuilds the interface + // Note: this is treated as an id add + remove by the compiler + assert_eq!( + get_safety( + r" + interface I { + x: String + } + type A implements I { + id: ID! + x: String + } + #", + r" + interface I { + x: String + } + type A implements I { + id: String + x: String + } + #", + ), + SchemaChangeSafety::SafeWithIncrementalBuild(FxHashSet::from_iter([ + IncrementalBuildSchemaChange::Object("A".intern()), + IncrementalBuildSchemaChange::Interface("I".intern()), + ])) + ); } fn sort_change(change: &mut SchemaChange) { diff --git a/compiler/crates/schema-documentation/Cargo.toml b/compiler/crates/schema-documentation/Cargo.toml index f5d86cc165225..1422134541cf4 100644 --- a/compiler/crates/schema-documentation/Cargo.toml +++ b/compiler/crates/schema-documentation/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/schema-documentation:schema-documentation + [package] name = "schema-documentation" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] diff --git a/compiler/crates/schema-documentation/src/combined_schema_documentation.rs b/compiler/crates/schema-documentation/src/combined_schema_documentation.rs index 0070eef1acb52..789aeb9b93da9 100644 --- a/compiler/crates/schema-documentation/src/combined_schema_documentation.rs +++ b/compiler/crates/schema-documentation/src/combined_schema_documentation.rs @@ -43,9 +43,19 @@ impl SchemaDocum .get_field_argument_description(type_name, field_name, argument_name) }) } + fn get_hack_source(&self, type_name: &str) -> Option<&str> { + self.primary + .get_hack_source(type_name) + .or_else(|| self.secondary.get_hack_source(type_name)) + } + fn get_field_hack_source(&self, type_name: &str, field_name: &str) -> Option<&str> { + self.primary + .get_field_hack_source(type_name, field_name) + .or_else(|| self.secondary.get_field_hack_source(type_name, field_name)) + } } -impl<'a, TPrimary: SchemaDocumentation, TSecondary: SchemaDocumentation> +impl CombinedSchemaDocumentation { pub fn new(primary: TPrimary, secondary: TSecondary) -> Self { diff --git a/compiler/crates/schema-documentation/src/lib.rs b/compiler/crates/schema-documentation/src/lib.rs index 1dcc4a7d12f2b..36775fa95d3c4 100644 --- a/compiler/crates/schema-documentation/src/lib.rs +++ b/compiler/crates/schema-documentation/src/lib.rs @@ -31,6 +31,12 @@ pub trait SchemaDocumentation: Send + Sync { ) -> Option<&str> { None } + fn get_hack_source(&self, _type_name: &str) -> Option<&str> { + None + } + fn get_field_hack_source(&self, _type_name: &str, _field_name: &str) -> Option<&str> { + None + } } // This can probably be implemented more generically for AsRef @@ -52,6 +58,12 @@ impl SchemaDocumentation for Arc Option<&str> { + self.as_ref().get_hack_source(type_name) + } + fn get_field_hack_source(&self, type_name: &str, field_name: &str) -> Option<&str> { + self.as_ref().get_field_hack_source(type_name, field_name) + } } impl SchemaDocumentation @@ -76,4 +88,11 @@ impl SchemaDocumentation self.as_ref() .and_then(|s| s.get_field_argument_description(type_name, field_name, argument_name)) } + fn get_hack_source(&self, type_name: &str) -> Option<&str> { + self.as_ref().and_then(|s| s.get_hack_source(type_name)) + } + fn get_field_hack_source(&self, type_name: &str, field_name: &str) -> Option<&str> { + self.as_ref() + .and_then(|s| s.get_field_hack_source(type_name, field_name)) + } } diff --git a/compiler/crates/schema-documentation/src/sdl_schema_impl.rs b/compiler/crates/schema-documentation/src/sdl_schema_impl.rs index bf509c9195987..100b80b3f7a8f 100644 --- a/compiler/crates/schema-documentation/src/sdl_schema_impl.rs +++ b/compiler/crates/schema-documentation/src/sdl_schema_impl.rs @@ -44,11 +44,25 @@ impl SchemaDocumentation for SDLSchema { field .arguments .iter() - .find(|argument| argument.name.0 == argument_name_string_key) + .find(|argument| argument.name.item.0 == argument_name_string_key) }) .and_then(|argument| argument.description) .map(|string_key| string_key.lookup()) } + + fn get_hack_source(&self, type_name: &str) -> Option<&str> { + self.get_type(type_name.intern()) + .and_then(|type_| get_hack_source_from_type(type_, self)) + .map(|string_key| string_key.lookup()) + } + + fn get_field_hack_source(&self, type_name: &str, field_name: &str) -> Option<&str> { + let field_name_string_key = field_name.intern(); + self.get_type(type_name.intern()) + .and_then(|type_| get_field_from_type(type_, self, field_name_string_key)) + .and_then(|field| field.hack_source) + .map(|string_key| string_key.lookup()) + } } fn get_description_from_type(type_: Type, schema: &SDLSchema) -> Option { @@ -81,3 +95,14 @@ fn get_field_from_type(type_: Type, schema: &SDLSchema, field_name: StringKey) - } }) } + +fn get_hack_source_from_type(type_: Type, schema: &SDLSchema) -> Option { + match type_ { + Type::Enum(id) => schema.enum_(id).hack_source, + Type::InputObject(id) => schema.input_object(id).hack_source, + Type::Interface(id) => schema.interface(id).hack_source, + Type::Object(id) => schema.object(id).hack_source, + Type::Scalar(id) => schema.scalar(id).hack_source, + Type::Union(id) => schema.union(id).hack_source, + } +} diff --git a/compiler/crates/schema-extractor/Cargo.toml b/compiler/crates/schema-extractor/Cargo.toml new file mode 100644 index 0000000000000..44f1b30dbb8d8 --- /dev/null +++ b/compiler/crates/schema-extractor/Cargo.toml @@ -0,0 +1,19 @@ +# @generated by autocargo from //relay/oss/crates/schema-extractor:schema-extractor + +[package] +name = "schema-extractor" +version = "0.0.0" +authors = ["Facebook"] +edition = "2021" +repository = "https://github.com/facebook/relay" +license = "MIT" + +[dependencies] +common = { path = "../common" } +hermes_comments = { git = "https://github.com/facebook/hermes.git" } +hermes_estree = { git = "https://github.com/facebook/hermes.git" } +hermes_parser = { git = "https://github.com/facebook/hermes.git" } +intern = { path = "../intern" } +rustc-hash = "1.1.0" +serde = { version = "1.0.185", features = ["derive", "rc"] } +thiserror = "1.0.49" diff --git a/compiler/crates/schema-extractor/src/errors.rs b/compiler/crates/schema-extractor/src/errors.rs new file mode 100644 index 0000000000000..1547c0c83a1d8 --- /dev/null +++ b/compiler/crates/schema-extractor/src/errors.rs @@ -0,0 +1,25 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use thiserror::Error; + +#[derive( + Clone, + Copy, + Debug, + Error, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + serde::Serialize +)] +pub enum ExtractError { + #[error("Unsupported type")] + UnsupportedType, +} diff --git a/compiler/crates/schema-extractor/src/lib.rs b/compiler/crates/schema-extractor/src/lib.rs new file mode 100644 index 0000000000000..685b2f5c57304 --- /dev/null +++ b/compiler/crates/schema-extractor/src/lib.rs @@ -0,0 +1,99 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +#![deny(warnings)] +#![deny(rust_2018_idioms)] +#![deny(clippy::all)] + +mod errors; + +use ::intern::string_key::Intern; +use ::intern::string_key::StringKey; +use common::Diagnostic; +use common::DiagnosticDisplay; +use common::DiagnosticsResult; +use common::Location; +use common::WithLocation; +use errors::ExtractError; +use hermes_estree::FlowTypeAnnotation; +use hermes_estree::GenericTypeAnnotation; +use hermes_estree::ObjectTypeAnnotation; +use hermes_estree::ObjectTypePropertyKey; +use hermes_estree::ObjectTypePropertyType; +use hermes_estree::Range; +use hermes_estree::TypeAnnotation as HermesTypeAnnotation; +use hermes_estree::TypeAnnotationEnum; +use hermes_estree::TypeIdentifier; +use rustc_hash::FxHashMap; + +pub trait SchemaExtractor { + fn to_location(&self, node: &T) -> Location; + + fn error_result( + &self, + message: M, + node: &T, + ) -> DiagnosticsResult { + Err(vec![Diagnostic::error(message, self.to_location(node))]) + } + + fn unwrap_annotation_enum<'a>( + &self, + node: &'a HermesTypeAnnotation, + ) -> DiagnosticsResult<&'a FlowTypeAnnotation> { + if let TypeAnnotationEnum::FlowTypeAnnotation(type_) = &node.type_annotation { + Ok(type_) + } else { + self.error_result(ExtractError::UnsupportedType, node) + } + } + + fn get_object_fields<'a>( + &self, + node: &'a ObjectTypeAnnotation, + ) -> DiagnosticsResult, &'a FlowTypeAnnotation>>> { + let mut field_map: Box, &FlowTypeAnnotation>> = + Box::default(); + for property in node.properties.iter() { + if let ObjectTypePropertyType::ObjectTypeProperty(prop) = property { + let location = self.to_location(&prop.key); + if let ObjectTypePropertyKey::Identifier(id) = &prop.key { + let name = WithLocation { + item: (&id.name).intern(), + location, + }; + field_map.insert(name, &prop.value); + } + } + } + Ok(field_map) + } +} + +pub fn unwrap_nullable_type(node: &FlowTypeAnnotation) -> (&FlowTypeAnnotation, bool) { + if let FlowTypeAnnotation::NullableTypeAnnotation(type_) = node { + (&type_.type_annotation, true) + } else { + (node, false) + } +} + +pub fn get_identifier_for_flow_generic( + node: WithLocation<&GenericTypeAnnotation>, +) -> DiagnosticsResult> { + match &node.item.id { + TypeIdentifier::Identifier(id) => Ok(WithLocation { + item: (&id.name).intern(), + location: node.location, + }), + TypeIdentifier::QualifiedTypeIdentifier(_) + | TypeIdentifier::QualifiedTypeofIdentifier(_) => Err(vec![Diagnostic::error( + ExtractError::UnsupportedType, + node.location, + )]), + } +} diff --git a/compiler/crates/schema-flatbuffer/Cargo.toml b/compiler/crates/schema-flatbuffer/Cargo.toml index e87f22ab90e90..025d79633b647 100644 --- a/compiler/crates/schema-flatbuffer/Cargo.toml +++ b/compiler/crates/schema-flatbuffer/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/schema-flatbuffer:schema-flatbuffer + [package] name = "schema-flatbuffer" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] diff --git a/compiler/crates/schema-print/Cargo.toml b/compiler/crates/schema-print/Cargo.toml index cf9fb75d4268c..c7579ebc92991 100644 --- a/compiler/crates/schema-print/Cargo.toml +++ b/compiler/crates/schema-print/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/schema-print:[schema-print,schema-print_test] + [package] name = "schema-print" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -13,8 +15,9 @@ path = "tests/print_schema_test.rs" [dependencies] fnv = "1.0" intern = { path = "../intern" } -itertools = "0.10.3" +itertools = "0.11.0" schema = { path = "../schema" } [dev-dependencies] fixture-tests = { path = "../fixture-tests" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/schema-print/src/print_schema.rs b/compiler/crates/schema-print/src/print_schema.rs index d8d3b8fdfba9f..c5264d8a3978a 100644 --- a/compiler/crates/schema-print/src/print_schema.rs +++ b/compiler/crates/schema-print/src/print_schema.rs @@ -170,7 +170,7 @@ struct Printer<'schema, 'writer> { type_writers_index: Option<(StringKey, usize)>, } -impl<'schema, 'writer, 'curent_writer> Printer<'schema, 'writer> { +impl<'schema, 'writer> Printer<'schema, 'writer> { fn new(schema: &'schema SDLSchema, writers: &'writer mut Vec) -> Self { Self { schema, @@ -242,7 +242,7 @@ impl<'schema, 'writer, 'curent_writer> Printer<'schema, 'writer> { } fn print_directive(&mut self, directive: &Directive) -> FmtResult { - write!(self.writer(), "directive @{}", directive.name)?; + write!(self.writer(), "directive @{}", directive.name.item)?; self.print_args(&directive.arguments)?; write!( self.writer(), @@ -303,7 +303,7 @@ impl<'schema, 'writer, 'curent_writer> Printer<'schema, 'writer> { fn print_union(&mut self, id: UnionID) -> FmtResult { let union_ = self.schema.union(id); - write!(self.writer(), "union {}", union_.name.item)?; + write!(self.writer(), "union {}", union_.name.item.0)?; self.print_directive_values(&union_.directives)?; if !union_.members.is_empty() { let union_members = union_ @@ -372,10 +372,11 @@ impl<'schema, 'writer, 'curent_writer> Printer<'schema, 'writer> { write!(self.writer(), ", ")?; } let type_string = self.schema.get_type_string(&arg.type_); - write!(self.writer(), "{}: {}", arg.name, type_string,)?; + write!(self.writer(), "{}: {}", arg.name.item, type_string,)?; if let Some(default) = &arg.default_value { write!(self.writer(), " = {}", default,)?; } + self.print_directive_values(&arg.directives)?; } write!(self.writer(), ")") } @@ -402,10 +403,11 @@ impl<'schema, 'writer, 'curent_writer> Printer<'schema, 'writer> { self.print_new_line()?; for arg in args.iter() { let type_string = self.schema.get_type_string(&arg.type_); - write!(self.writer(), " {}: {}", arg.name, type_string,)?; + write!(self.writer(), " {}: {}", arg.name.item, type_string,)?; if let Some(default) = &arg.default_value { write!(self.writer(), " = {}", default,)?; } + self.print_directive_values(&arg.directives)?; self.print_new_line()?; } write!(self.writer(), "}}") diff --git a/compiler/crates/schema-print/tests/print_schema.rs b/compiler/crates/schema-print/tests/print_schema.rs new file mode 100644 index 0000000000000..dc44e6d20dab0 --- /dev/null +++ b/compiler/crates/schema-print/tests/print_schema.rs @@ -0,0 +1,22 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use fixture_tests::Fixture; +use intern::string_key::Intern; +use schema::build_schema; +use schema_print::print_directives; +use schema_print::print_types_directives_as_shards; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let schema = build_schema(fixture.content).unwrap(); + let mut type_shard_count = fnv::FnvHashMap::default(); + type_shard_count.insert("Query".intern(), 2); + let directives = print_directives(&schema); + let types = print_types_directives_as_shards(&schema, 4, type_shard_count) + .join("\n=======Shard=======\n\n"); + Ok(format!("{}{}", directives, types)) +} diff --git a/compiler/crates/schema-print/tests/print_schema/fixtures/kitchen-sink.expected b/compiler/crates/schema-print/tests/print_schema/fixtures/kitchen-sink.expected index e86df45097905..58895cbfc2040 100644 --- a/compiler/crates/schema-print/tests/print_schema/fixtures/kitchen-sink.expected +++ b/compiler/crates/schema-print/tests/print_schema/fixtures/kitchen-sink.expected @@ -3,6 +3,7 @@ directive @source(schema: String, name: String) on OBJECT | INTERFACE | ENUM directive @ref_type(schema: String, name: String) on FIELD_DEFINITION directive @extern_type(schema: String, name: String) on INTERFACE directive @fetchable(field_name: String) on OBJECT +directive @fbid on ARGUMENT_DEFINITION | FIELD_DEFINITION | INPUT_FIELD_DEFINITION schema { query: Query @@ -77,9 +78,25 @@ interface Image implements Resource & Node { url: String thumbnail: String } + +input UserRequest { + user_id: ID! @fbid + other_user_ids: [ID!] = [] @fbid +} + +type UserResponse { + user_id: ID! @fbid + name: String +} + +type UsernameQuery { + fetch_user_name(user_id: ID @fbid): String +} ==================================== OUTPUT =================================== directive @extern_type(schema: String, name: String) on INTERFACE +directive @fbid on ARGUMENT_DEFINITION | FIELD_DEFINITION | INPUT_FIELD_DEFINITION + directive @fetchable(field_name: String) on OBJECT directive @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT @@ -92,6 +109,8 @@ directive @source(schema: String, name: String) on OBJECT | INTERFACE | ENUM directive @extern_type(schema: String, name: String) on INTERFACE +directive @fbid on ARGUMENT_DEFINITION | FIELD_DEFINITION | INPUT_FIELD_DEFINITION + directive @fetchable(field_name: String) on OBJECT directive @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT @@ -138,6 +157,15 @@ type User implements XIGHuman @fetchable(field_name: "id") { story(story: XIGStoryInput = {id: 123, name: "instagram_story"}): XIGStory } +type UserResponse { + user_id: ID! @fbid + name: String +} + +type UsernameQuery { + fetch_user_name(user_id: ID @fbid): String +} + interface XIGHuman @source(schema: "instagram", name: "Human") { name: String } @@ -168,6 +196,11 @@ interface Resource implements Node { url: String } +input UserRequest { + user_id: ID! @fbid + other_user_ids: [ID!] = [] @fbid +} + type XIGStory @fetchable(field_name: "id") @source(schema: "instagram", name: "Story") { id: ID name: String diff --git a/compiler/crates/schema-print/tests/print_schema/fixtures/kitchen-sink.graphql b/compiler/crates/schema-print/tests/print_schema/fixtures/kitchen-sink.graphql index 3d364469ea7cd..e6c0a2414e695 100644 --- a/compiler/crates/schema-print/tests/print_schema/fixtures/kitchen-sink.graphql +++ b/compiler/crates/schema-print/tests/print_schema/fixtures/kitchen-sink.graphql @@ -2,6 +2,7 @@ directive @source(schema: String, name: String) on OBJECT | INTERFACE | ENUM directive @ref_type(schema: String, name: String) on FIELD_DEFINITION directive @extern_type(schema: String, name: String) on INTERFACE directive @fetchable(field_name: String) on OBJECT +directive @fbid on ARGUMENT_DEFINITION | FIELD_DEFINITION | INPUT_FIELD_DEFINITION schema { query: Query @@ -76,3 +77,17 @@ interface Image implements Resource & Node { url: String thumbnail: String } + +input UserRequest { + user_id: ID! @fbid + other_user_ids: [ID!] = [] @fbid +} + +type UserResponse { + user_id: ID! @fbid + name: String +} + +type UsernameQuery { + fetch_user_name(user_id: ID @fbid): String +} diff --git a/compiler/crates/schema-print/tests/print_schema/mod.rs b/compiler/crates/schema-print/tests/print_schema/mod.rs deleted file mode 100644 index eea5bacf3cbbf..0000000000000 --- a/compiler/crates/schema-print/tests/print_schema/mod.rs +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use intern::string_key::Intern; -use schema::build_schema; -use schema_print::print_directives; -use schema_print::print_types_directives_as_shards; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let schema = build_schema(fixture.content).unwrap(); - let mut type_shard_count = fnv::FnvHashMap::default(); - type_shard_count.insert("Query".intern(), 2); - let directives = print_directives(&schema); - let types = print_types_directives_as_shards(&schema, 4, type_shard_count) - .join("\n=======Shard=======\n\n"); - Ok(format!("{}{}", directives, types)) -} diff --git a/compiler/crates/schema-print/tests/print_schema_test.rs b/compiler/crates/schema-print/tests/print_schema_test.rs index ceb0d65afc9ab..800c93d033ce6 100644 --- a/compiler/crates/schema-print/tests/print_schema_test.rs +++ b/compiler/crates/schema-print/tests/print_schema_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<08e2c143cd60aa5cd2bfbb8b67388e8b>> + * @generated SignedSource<> */ mod print_schema; @@ -12,16 +12,16 @@ mod print_schema; use print_schema::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn kitchen_sink() { +#[tokio::test] +async fn kitchen_sink() { let input = include_str!("print_schema/fixtures/kitchen-sink.graphql"); let expected = include_str!("print_schema/fixtures/kitchen-sink.expected"); - test_fixture(transform_fixture, "kitchen-sink.graphql", "print_schema/fixtures/kitchen-sink.expected", input, expected); + test_fixture(transform_fixture, file!(), "kitchen-sink.graphql", "print_schema/fixtures/kitchen-sink.expected", input, expected).await; } -#[test] -fn no_schema() { +#[tokio::test] +async fn no_schema() { let input = include_str!("print_schema/fixtures/no-schema.graphql"); let expected = include_str!("print_schema/fixtures/no-schema.expected"); - test_fixture(transform_fixture, "no-schema.graphql", "print_schema/fixtures/no-schema.expected", input, expected); + test_fixture(transform_fixture, file!(), "no-schema.graphql", "print_schema/fixtures/no-schema.expected", input, expected).await; } diff --git a/compiler/crates/schema-validate/Cargo.toml b/compiler/crates/schema-validate/Cargo.toml index e933c8476610d..6dfd1fad5e712 100644 --- a/compiler/crates/schema-validate/Cargo.toml +++ b/compiler/crates/schema-validate/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/schema-validate:[schema-validate,schema-validate-lib,schema-validate_test] + [package] name = "schema-validate-lib" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[bin]] @@ -15,16 +17,18 @@ name = "schema_validate_test" path = "tests/validate_schema_test.rs" [dependencies] -clap = { version = "3.2.23", features = ["derive", "env", "regex", "unicode", "wrap_help"] } +clap = { version = "3.2.25", features = ["derive", "env", "regex", "unicode", "wrap_help"] } common = { path = "../common" } fnv = "1.0" +graphql-cli = { path = "../graphql-cli" } intern = { path = "../intern" } lazy_static = "1.4" -rayon = "1.2" -regex = "1.6.0" +rayon = "1.9.0" +regex = "1.9.2" schema = { path = "../schema" } -schema-print = { path = "../schema-print" } -thiserror = "1.0.36" +serde = { version = "1.0.185", features = ["derive", "rc"] } +thiserror = "1.0.49" [dev-dependencies] fixture-tests = { path = "../fixture-tests" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/schema-validate/src/errors.rs b/compiler/crates/schema-validate/src/errors.rs index 0e5dfacf78e33..2e0bcf996008c 100644 --- a/compiler/crates/schema-validate/src/errors.rs +++ b/compiler/crates/schema-validate/src/errors.rs @@ -8,18 +8,17 @@ use common::ArgumentName; use common::InterfaceName; use common::ObjectName; +use common::UnionName; use intern::string_key::StringKey; -use schema::Type; -use schema::TypeReference; use thiserror::Error; -#[derive(Debug, Error)] +#[derive(Clone, Debug, Error, serde::Serialize)] pub enum SchemaValidationError { #[error("'{0}' root type must be provided.")] MissingRootType(StringKey), - #[error("'{0}' root type must be Object type. Found '{1:?}'")] - InvalidRootType(StringKey, Type), + #[error("'{0}' root type must be Object type. Found {1}")] + InvalidRootType(StringKey, String), #[error("Name '{0}' must not begin with '__', which is reserved by GraphQL introspection.")] InvalidNamePrefix(String), @@ -36,20 +35,20 @@ pub enum SchemaValidationError { #[error("Type must define one or more fields.")] TypeWithNoFields, - #[error("The type of '{0}.{1}' must be Output Type but got: '{2:?}'.")] - InvalidFieldType(StringKey, StringKey, TypeReference), + #[error("The type of '{0}.{1}' must be Output Type but got {2}.")] + InvalidFieldType(StringKey, StringKey, String), - #[error("The type of '{0}.{1}({2}:)' must be InputType but got: '{3:?}'.")] - InvalidArgumentType(StringKey, StringKey, ArgumentName, TypeReference), + #[error("The type of '{0}.{1}({2}:)' must be InputType but got: {3}.")] + InvalidArgumentType(StringKey, StringKey, ArgumentName, String), #[error("Type '{0}' can only implement '{1}' once.")] DuplicateInterfaceImplementation(StringKey, InterfaceName), - #[error("Interface field '{0}.{1}' expected but '{2}' does not provide it.")] - InterfaceFieldNotProvided(InterfaceName, StringKey, StringKey), + #[error("Interface field '{0}.{1}' expected but {2} '{3}' does not provide it.")] + InterfaceFieldNotProvided(InterfaceName, StringKey, StringKey, StringKey), #[error("Interface field '{0}.{1}' expects type '{2}' but '{3}.{1}' is of type '{4}'.")] - NotASubType(InterfaceName, StringKey, StringKey, StringKey, StringKey), + NotASubType(InterfaceName, StringKey, String, StringKey, String), #[error( "Interface field argument '{0}.{1}({2}:)' expected but '{3}.{1}' does not provide it." @@ -63,9 +62,9 @@ pub enum SchemaValidationError { InterfaceName, StringKey, ArgumentName, + String, StringKey, - StringKey, - StringKey, + String, ), #[error( @@ -73,8 +72,8 @@ pub enum SchemaValidationError { )] MissingRequiredArgument(StringKey, StringKey, ArgumentName, InterfaceName), - #[error("Union type must define one or more member types.")] - UnionWithNoMembers(StringKey), + #[error("Union type {0} must define one or more member types.")] + UnionWithNoMembers(UnionName), #[error("Union can only include member {0} once.")] DuplicateMember(ObjectName), diff --git a/compiler/crates/schema-validate/src/lib.rs b/compiler/crates/schema-validate/src/lib.rs index ab8581fa95ecb..dc37dc1774c1b 100644 --- a/compiler/crates/schema-validate/src/lib.rs +++ b/compiler/crates/schema-validate/src/lib.rs @@ -7,13 +7,13 @@ mod errors; -use std::fmt::Write; -use std::sync::Mutex; -use std::time::Instant; - -use common::DirectiveName; +use common::ArgumentName; +use common::Diagnostic; +use common::DiagnosticsResult; use common::InterfaceName; +use common::Location; use common::Named; +use common::WithLocation; use errors::*; use fnv::FnvHashMap; use fnv::FnvHashSet; @@ -34,8 +34,6 @@ use schema::Type; use schema::TypeReference; use schema::TypeWithFields; use schema::UnionID; -use schema_print::print_directive; -use schema_print::print_type; lazy_static! { static ref INTROSPECTION_TYPES: FnvHashSet = vec![ @@ -56,52 +54,42 @@ lazy_static! { static ref TYPE_NAME_REGEX: Regex = Regex::new(r"^[_a-zA-Z][_a-zA-Z0-9]*$").unwrap(); } -pub fn validate(schema: &SDLSchema) -> ValidationContext<'_> { - let mut validation_context = ValidationContext::new(schema); - validation_context.validate(); - validation_context -} - -#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] -pub enum ValidationContextType { - TypeNode(StringKey), - DirectiveNode(StringKey), - None, +pub struct SchemaValidationOptions { + pub allow_introspection_names: bool, } -impl ValidationContextType { - pub fn type_name(self) -> String { - match self { - ValidationContextType::DirectiveNode(type_name) - | ValidationContextType::TypeNode(type_name) => type_name.lookup().to_string(), - _ => "None".to_string(), - } +pub fn validate(schema: &SDLSchema, options: SchemaValidationOptions) -> DiagnosticsResult<()> { + let mut validation_context = ValidationContext::new(schema, &options); + validation_context.validate(); + if validation_context.diagnostics.is_empty() { + Ok(()) + } else { + validation_context + .diagnostics + .sort_by_key(|diagnostic| diagnostic.location()); + Err(validation_context.diagnostics) } } pub struct ValidationContext<'schema> { - pub schema: &'schema SDLSchema, - pub errors: Mutex>>, + schema: &'schema SDLSchema, + options: &'schema SchemaValidationOptions, + diagnostics: Vec, } impl<'schema> ValidationContext<'schema> { - pub fn new(schema: &'schema SDLSchema) -> Self { + pub fn new(schema: &'schema SDLSchema, options: &'schema SchemaValidationOptions) -> Self { Self { schema, - errors: Mutex::new(FnvHashMap::default()), + options, + diagnostics: Default::default(), } } fn validate(&mut self) { - let now = Instant::now(); self.validate_root_types(); self.validate_directives(); self.validate_types(); - println!("Validated Schema in {}ms", now.elapsed().as_millis()); - println!( - "Found {} validation errors", - self.errors.lock().unwrap().len() - ) } fn validate_root_types(&mut self) { @@ -110,259 +98,306 @@ impl<'schema> ValidationContext<'schema> { self.validate_root_type(self.schema.mutation_type(), *MUTATION); } - fn validate_root_type(&self, root_type: Option, type_name: StringKey) { + fn validate_root_type(&mut self, root_type: Option, type_name: StringKey) { if let Some(type_) = root_type { if !type_.is_object() { - self.report_error( - SchemaValidationError::InvalidRootType(type_name, type_), - ValidationContextType::TypeNode(type_name), - ); + self.report_diagnostic(Diagnostic::error( + SchemaValidationError::InvalidRootType( + type_name, + type_.get_variant_name().to_string(), + ), + self.get_type_definition_location(type_), + )); } } else if type_name == *QUERY { - self.add_error(SchemaValidationError::MissingRootType(type_name)); + self.diagnostics.push(Diagnostic::error( + SchemaValidationError::MissingRootType(type_name), + Location::generated(), + )); } } fn validate_directives(&mut self) { for directive in self.schema.get_directives() { - let context = ValidationContextType::DirectiveNode(directive.name.0); - self.validate_name(directive.name.0, context); - let mut arg_names = FnvHashSet::default(); + self.validate_name(directive.name.item.0, directive.name.location); + let mut arg_names: FnvHashMap = FnvHashMap::default(); for argument in directive.arguments.iter() { - self.validate_name(argument.name.0, context); + self.validate_name(argument.name.item.0, argument.name.location); // Ensure unique arguments per directive. - if arg_names.contains(&argument.name) { - self.report_error( - SchemaValidationError::DuplicateArgument(argument.name, directive.name.0), - context, + if let Some(prev_loc) = arg_names.get(&argument.name.item) { + self.report_diagnostic( + Diagnostic::error( + SchemaValidationError::DuplicateArgument( + argument.name.item, + directive.name.item.0, + ), + argument.name.location, + ) + .annotate("Previously defined here:", *prev_loc), ); continue; } - arg_names.insert(argument.name); + arg_names.insert(argument.name.item, argument.name.location); } } } fn validate_types(&mut self) { - let types = self.schema.get_type_map().collect::>(); - types.par_iter().for_each(|(type_name, type_)| { - // Ensure it is named correctly (excluding introspection types). - if !is_introspection_type(type_, **type_name) { - self.validate_name(**type_name, ValidationContextType::TypeNode(**type_name)); + let diagnostics = self + .schema + .get_type_map_par_iter() + .flat_map(|(type_name, type_)| { + let mut child_visitor = Self::new(self.schema, self.options); + child_visitor.validate_type(*type_name, type_); + child_visitor.diagnostics + }) + .collect::>(); + self.diagnostics.extend(diagnostics); + } + + fn validate_type(&mut self, type_name: StringKey, type_: &Type) { + // Ensure it is named correctly (excluding introspection types). + if !is_introspection_type(type_, type_name) { + self.validate_name(type_name, self.get_type_definition_location(*type_)); + } + match type_ { + Type::Enum(id) => { + // Ensure Enums have valid values. + self.validate_enum_type(*id); } - match type_ { - Type::Enum(id) => { - // Ensure Enums have valid values. - self.validate_enum_type(*id); - } - Type::InputObject(id) => { - // Ensure Input Object fields are valid. - self.validate_input_object_fields(*id); - } - Type::Interface(id) => { - let interface = self.schema.interface(*id); - // Ensure fields are valid - self.validate_fields(**type_name, &interface.fields); - - // Validate cyclic references - if !self.validate_cyclic_implements_reference(interface) { - // Ensure interface implement the interfaces they claim to. - self.validate_type_with_interfaces(interface); - } + Type::InputObject(id) => { + // Ensure Input Object fields are valid. + self.validate_input_object_fields(*id); + } + Type::Interface(id) => { + let interface = self.schema.interface(*id); + // Ensure fields are valid + self.validate_fields(type_name, &interface.fields); + + // Validate cyclic references + if !self.validate_cyclic_implements_reference(interface) { + // Ensure interface implement the interfaces they claim to. + self.validate_type_with_interfaces(interface); } - Type::Object(id) => { - let object = self.schema.object(*id); - // Ensure fields are valid - self.validate_fields(**type_name, &object.fields); + } + Type::Object(id) => { + let object = self.schema.object(*id); + // Ensure fields are valid + self.validate_fields(type_name, &object.fields); - // Ensure objects implement the interfaces they claim to. - self.validate_type_with_interfaces(object); - } - Type::Union(id) => { - // Ensure Unions include valid member types. - self.validate_union_members(*id); - } - Type::Scalar(_id) => {} - }; - }); + // Ensure objects implement the interfaces they claim to. + self.validate_type_with_interfaces(object); + } + Type::Union(id) => { + // Ensure Unions include valid member types. + self.validate_union_members(*id); + } + Type::Scalar(_id) => {} + }; } - fn validate_fields(&self, type_name: StringKey, fields: &[FieldID]) { - let context = ValidationContextType::TypeNode(type_name); + fn validate_fields(&mut self, type_name: StringKey, fields: &[FieldID]) { // Must define one or more fields. if fields.is_empty() { - self.report_error(SchemaValidationError::TypeWithNoFields, context) + self.report_diagnostic(Diagnostic::error( + SchemaValidationError::TypeWithNoFields, + self.get_type_definition_location(self.schema.get_type(type_name).unwrap()), + )); } - let mut field_names = FnvHashSet::default(); + let mut field_names: FnvHashMap = FnvHashMap::default(); for field_id in fields { let field = self.schema.field(*field_id); - if field_names.contains(&field.name.item) { - self.report_error( - SchemaValidationError::DuplicateField(field.name.item), - context, + if let Some(field_loc) = field_names.get(&field.name.item) { + self.report_diagnostic( + Diagnostic::error( + SchemaValidationError::DuplicateField(field.name.item), + field.name.location, + ) + .annotate("Previously defined here:", field_loc.clone()), ); continue; } - field_names.insert(field.name.item); + field_names.insert(field.name.item, field.name.location); // Ensure they are named correctly. - self.validate_name(field.name.item, context); + self.validate_name(field.name.item, field.name.location); // Ensure the type is an output type if !is_output_type(&field.type_) { - self.report_error( + self.report_diagnostic(Diagnostic::error( SchemaValidationError::InvalidFieldType( type_name, field.name.item, - field.type_.clone(), + field.type_.inner().get_variant_name().to_string(), ), - context, - ) + field.name.location, + )) } - let mut arg_names = FnvHashSet::default(); + let mut arg_names: FnvHashMap = FnvHashMap::default(); for argument in field.arguments.iter() { // Ensure they are named correctly. - self.validate_name(argument.name.0, context); + self.validate_name(argument.name.item.0, argument.name.location); // Ensure they are unique per field. // Ensure unique arguments per directive. - if arg_names.contains(&argument.name) { - self.report_error( - SchemaValidationError::DuplicateArgument(argument.name, field.name.item), - context, + if let Some(previous_loc) = arg_names.get(&argument.name.item) { + self.report_diagnostic( + Diagnostic::error( + SchemaValidationError::DuplicateArgument( + argument.name.item, + field.name.item, + ), + field.name.location, + ) + .annotate("Previously defined here:", *previous_loc), ); continue; } - arg_names.insert(argument.name); + arg_names.insert(argument.name.item, argument.name.location); // Ensure the type is an input type if !is_input_type(&argument.type_) { - self.report_error( + self.report_diagnostic(Diagnostic::error( SchemaValidationError::InvalidArgumentType( type_name, field.name.item, - argument.name, - argument.type_.clone(), + argument.name.item, + argument.type_.inner().get_variant_name().to_string(), ), - context, - ); + argument.name.location, // Note: Schema does not retain location information for argument type reference + )); } } } } - fn validate_union_members(&self, id: UnionID) { + fn validate_union_members(&mut self, id: UnionID) { let union = self.schema.union(id); - let context = ValidationContextType::TypeNode(union.name.item); if union.members.is_empty() { - self.report_error( + self.report_diagnostic(Diagnostic::error( SchemaValidationError::UnionWithNoMembers(union.name.item), - context, - ); + union.name.location, + )); } let mut member_names = FnvHashSet::default(); for member in union.members.iter() { - let member_name = self.schema.object(*member).name.item; - if member_names.contains(&member_name) { - self.report_error(SchemaValidationError::DuplicateMember(member_name), context); + let member_name = self.schema.object(*member).name; + if member_names.contains(&member_name.item) { + self.report_diagnostic(Diagnostic::error( + SchemaValidationError::DuplicateMember(member_name.item), + union.name.location, // Schema does not track location of union members + )); continue; } - member_names.insert(member_name); + member_names.insert(member_name.item); } } - fn validate_enum_type(&self, id: EnumID) { + fn validate_enum_type(&mut self, id: EnumID) { let enum_ = self.schema.enum_(id); - let context = ValidationContextType::TypeNode(enum_.name.item.0); if enum_.values.is_empty() { - self.report_error(SchemaValidationError::EnumWithNoValues, context); + self.report_diagnostic(Diagnostic::error( + SchemaValidationError::EnumWithNoValues, + enum_.name.location, + )) } for value in enum_.values.iter() { // Ensure valid name. - self.validate_name(value.value, context); + self.validate_name(value.value, enum_.name.location); // Note: Schema does not have location for enum value let value_name = value.value.lookup(); if value_name == "true" || value_name == "false" || value_name == "null" { - self.report_error( + self.report_diagnostic(Diagnostic::error( SchemaValidationError::InvalidEnumValue(value.value), - context, - ); + enum_.name.location, // Schema does not track location information for individual enum values + )); } } } - fn validate_input_object_fields(&self, id: InputObjectID) { + fn validate_input_object_fields(&mut self, id: InputObjectID) { let input_object = self.schema.input_object(id); - let context = ValidationContextType::TypeNode(input_object.name.item.0); if input_object.fields.is_empty() { - self.report_error(SchemaValidationError::TypeWithNoFields, context); + self.report_diagnostic(Diagnostic::error( + SchemaValidationError::TypeWithNoFields, + input_object.name.location, + )); } // Ensure the arguments are valid for field in input_object.fields.iter() { // Ensure they are named correctly. - self.validate_name(field.name.0, context); + self.validate_name(field.name.item.0, field.name.location); // Ensure the type is an input type if !is_input_type(&field.type_) { - self.report_error( + self.report_diagnostic(Diagnostic::error( SchemaValidationError::InvalidArgumentType( input_object.name.item.0, - field.name.0, - field.name, - field.type_.clone(), + field.name.item.0, + field.name.item, + field.type_.inner().get_variant_name().to_string(), ), - context, - ); + field.name.location, + )); } } } - fn validate_type_with_interfaces(&self, type_: &T) { + fn validate_type_with_interfaces(&mut self, type_: &T) { let typename = type_.name().lookup().intern(); - let mut interface_names = FnvHashSet::default(); + let mut interface_names: FnvHashMap = FnvHashMap::default(); for interface_id in type_.interfaces().iter() { let interface = self.schema.interface(*interface_id); - if interface_names.contains(&interface.name) { - self.report_error( - SchemaValidationError::DuplicateInterfaceImplementation( - typename, - interface.name.item, - ), - ValidationContextType::TypeNode(typename), + if let Some(prev_loc) = interface_names.get(&interface.name.item) { + self.report_diagnostic( + Diagnostic::error( + SchemaValidationError::DuplicateInterfaceImplementation( + typename, + interface.name.item, + ), + interface.name.location, + ) + .annotate("Previously defined here:", *prev_loc), ); continue; } - interface_names.insert(interface.name); + interface_names.insert(interface.name.item, interface.name.location); self.validate_type_implements_interface(type_, interface); } } fn validate_type_implements_interface( - &self, + &mut self, type_: &T, interface: &Interface, ) { let typename = type_.name().lookup().intern(); let object_field_map = self.field_map(type_.fields()); let interface_field_map = self.field_map(&interface.fields); - let context = ValidationContextType::TypeNode(typename); // Assert each interface field is implemented. for (field_name, interface_field) in interface_field_map { // Assert interface field exists on object. if !object_field_map.contains_key(&field_name) { - self.report_error( - SchemaValidationError::InterfaceFieldNotProvided( - interface.name.item, - field_name, - typename, + self.report_diagnostic( + Diagnostic::error( + SchemaValidationError::InterfaceFieldNotProvided( + interface.name.item, + field_name, + type_.type_kind(), + typename, + ), + *type_.location(), + ) + .annotate( + "The interface field is defined here:", + interface_field.name.location, ), - context, ); continue; } @@ -374,15 +409,21 @@ impl<'schema> ValidationContext<'schema> { .schema .is_type_subtype_of(&object_field.type_, &interface_field.type_) { - self.report_error( - SchemaValidationError::NotASubType( - interface.name.item, - field_name, - self.schema.get_type_name(interface_field.type_.inner()), - typename, - self.schema.get_type_name(object_field.type_.inner()), + self.report_diagnostic( + Diagnostic::error( + SchemaValidationError::NotASubType( + interface.name.item, + field_name, + self.schema.get_type_string(&interface_field.type_), + typename, + self.schema.get_type_string(&object_field.type_), + ), + object_field.name.location, + ) + .annotate( + "The interface field is defined here:", + interface_field.name.location, ), - context, ); } @@ -391,18 +432,24 @@ impl<'schema> ValidationContext<'schema> { let object_argument = object_field .arguments .iter() - .find(|arg| arg.name == interface_argument.name); + .find(|arg| arg.name.item == interface_argument.name.item); // Assert interface field arg exists on object field. if object_argument.is_none() { - self.report_error( - SchemaValidationError::InterfaceFieldArgumentNotProvided( - interface.name.item, - field_name, - interface_argument.name, - typename, + self.report_diagnostic( + Diagnostic::error( + SchemaValidationError::InterfaceFieldArgumentNotProvided( + interface.name.item, + field_name, + interface_argument.name.item, + typename, + ), + object_field.name.location, + ) + .annotate( + "The interface field argument is defined here:", + interface_argument.name.location, ), - context, ); continue; } @@ -412,16 +459,22 @@ impl<'schema> ValidationContext<'schema> { // (invariant) // TODO: change to contravariant? if interface_argument.type_ != object_argument.type_ { - self.report_error( - SchemaValidationError::NotEqualType( - interface.name.item, - field_name, - interface_argument.name, - self.schema.get_type_name(interface_argument.type_.inner()), - typename, - self.schema.get_type_name(object_argument.type_.inner()), + self.report_diagnostic( + Diagnostic::error( + SchemaValidationError::NotEqualType( + interface.name.item, + field_name, + interface_argument.name.item, + self.schema.get_type_string(&interface_argument.type_), + typename, + self.schema.get_type_string(&object_argument.type_), + ), + object_argument.name.location, + ) + .annotate( + "The interface field argument is defined here:", + interface_argument.name.location, ), - context, ); } // TODO: validate default values? @@ -429,24 +482,32 @@ impl<'schema> ValidationContext<'schema> { // Assert additional arguments must not be required. for object_argument in object_field.arguments.iter() { - if !interface_field.arguments.contains(object_argument.name.0) + if !interface_field + .arguments + .contains(object_argument.name.item.0) && object_argument.type_.is_non_null() { - self.report_error( - SchemaValidationError::MissingRequiredArgument( - typename, - field_name, - object_argument.name, - interface.name.item, + self.report_diagnostic( + Diagnostic::error( + SchemaValidationError::MissingRequiredArgument( + typename, + field_name, + object_argument.name.item, + interface.name.item, + ), + object_argument.name.location, + ) + .annotate( + "The interface field is define here:", + interface_field.name.location, ), - context, ); } } } } - fn validate_cyclic_implements_reference(&self, interface: &Interface) -> bool { + fn validate_cyclic_implements_reference(&mut self, interface: &Interface) -> bool { for id in interface.interfaces() { let mut path = Vec::new(); let mut visited = FnvHashSet::default(); @@ -456,17 +517,22 @@ impl<'schema> ValidationContext<'schema> { &mut path, &mut visited, ) { - self.report_error( + let mut diagnostic = Diagnostic::error( SchemaValidationError::CyclicInterfaceInheritance(format!( "{}->{}", path.iter() - .map(|name| name.lookup()) + .map(|name| name.item.lookup()) .collect::>() .join("->"), interface.name.item )), - ValidationContextType::TypeNode(interface.name.item.0), + interface.name.location, ); + + for name in path.iter().rev() { + diagnostic = diagnostic.annotate("->", name.location); + } + self.report_diagnostic(diagnostic); return true; } } @@ -477,7 +543,7 @@ impl<'schema> ValidationContext<'schema> { &self, root: &Interface, target: InterfaceName, - path: &mut Vec, + path: &mut Vec>, visited: &mut FnvHashSet, ) -> bool { if visited.contains(&root.name.item.0) { @@ -488,7 +554,7 @@ impl<'schema> ValidationContext<'schema> { return true; } - path.push(root.name.item.0); + path.push(root.name); visited.insert(root.name.item.0); for id in root.interfaces() { if self.has_path(self.schema.interface(*id), target, path, visited) { @@ -499,24 +565,28 @@ impl<'schema> ValidationContext<'schema> { false } - fn validate_name(&self, name: StringKey, context: ValidationContextType) { + fn validate_name(&mut self, name: StringKey, location: Location) { let name = name.lookup(); - let mut chars = name.chars(); - if name.len() > 1 && chars.next() == Some('_') && chars.next() == Some('_') { - self.report_error( - SchemaValidationError::InvalidNamePrefix(name.to_string()), - context, - ); + + if !self.options.allow_introspection_names { + let mut chars = name.chars(); + if name.len() > 1 && chars.next() == Some('_') && chars.next() == Some('_') { + self.report_diagnostic(Diagnostic::error( + SchemaValidationError::InvalidNamePrefix(name.to_string()), + location, + )); + } } + if !TYPE_NAME_REGEX.is_match(name) { - self.report_error( + self.report_diagnostic(Diagnostic::error( SchemaValidationError::InvalidName(name.to_string()), - context, - ); + location, + )); } } - fn field_map(&self, fields: &[FieldID]) -> FnvHashMap { + fn field_map(&mut self, fields: &[FieldID]) -> FnvHashMap { fields .iter() .map(|id| self.schema.field(*id)) @@ -524,59 +594,19 @@ impl<'schema> ValidationContext<'schema> { .collect::>() } - fn report_error(&self, error: SchemaValidationError, context: ValidationContextType) { - self.errors - .lock() - .unwrap() - .entry(context) - .or_insert_with(Vec::new) - .push(error); + fn report_diagnostic(&mut self, diagnostic: Diagnostic) { + self.diagnostics.push(diagnostic); } - fn add_error(&self, error: SchemaValidationError) { - self.report_error(error, ValidationContextType::None); - } - - pub fn print_errors(&self) -> String { - let mut builder: String = String::new(); - let errors = self.errors.lock().unwrap(); - let mut contexts: Vec<_> = errors.keys().collect(); - contexts.sort_by_key(|context| context.type_name()); - for context in contexts { - match context { - ValidationContextType::None => writeln!(builder, "Errors:").unwrap(), - ValidationContextType::TypeNode(type_name) => writeln!( - builder, - "Type {} with definition:\n\t{}\nhad errors:", - type_name, - print_type(self.schema, self.schema.get_type(*type_name).unwrap()).trim_end() - ) - .unwrap(), - ValidationContextType::DirectiveNode(directive_name) => writeln!( - builder, - "Directive {} with definition:\n\t{}\nhad errors:", - directive_name, - print_directive( - self.schema, - self.schema - .get_directive(DirectiveName(*directive_name)) - .unwrap() - ) - .trim_end() - ) - .unwrap(), - } - let mut error_strings = errors - .get(context) - .unwrap() - .iter() - .map(|error| format!("\t* {}", error)) - .collect::>(); - error_strings.sort(); - writeln!(builder, "{}", error_strings.join("\n")).unwrap(); - writeln!(builder).unwrap(); + fn get_type_definition_location(&self, type_: Type) -> Location { + match type_ { + Type::Enum(id) => self.schema.enum_(id).name.location, + Type::InputObject(id) => self.schema.input_object(id).name.location, + Type::Interface(id) => self.schema.interface(id).name.location, + Type::Object(id) => self.schema.object(id).name.location, + Type::Scalar(id) => self.schema.scalar(id).name.location, + Type::Union(id) => self.schema.union(id).name.location, } - builder } } diff --git a/compiler/crates/schema-validate/src/main.rs b/compiler/crates/schema-validate/src/main.rs index f2ea82703ea24..b6b09d9f79472 100644 --- a/compiler/crates/schema-validate/src/main.rs +++ b/compiler/crates/schema-validate/src/main.rs @@ -10,9 +10,14 @@ use std::path::Path; use clap::Parser; use common::DiagnosticsResult; -use schema::build_schema; +use common::SourceLocationKey; +use common::TextSource; +use graphql_cli::DiagnosticPrinter; +use intern::intern::Lookup; +use schema::build_schema_with_extensions; use schema::SDLSchema; use schema_validate_lib::validate; +use schema_validate_lib::SchemaValidationOptions; #[derive(Parser)] #[clap(name = "schema-validate", about = "Binary to Validate GraphQL Schema.")] @@ -24,34 +29,78 @@ struct Opt { pub fn main() { let opt = Opt::parse(); - match build_schema_from_file(&opt.schema_path) { + match build_schema_from_path(&opt.schema_path) { Ok(schema) => { - let validation_context = validate(&schema); - if !validation_context.errors.lock().unwrap().is_empty() { - eprintln!( + if let Err(diagnostics) = validate( + &schema, + SchemaValidationOptions { + allow_introspection_names: false, + }, + ) { + let printer = DiagnosticPrinter::new(sources); + println!( "Schema failed validation with below errors:\n{}", - validation_context.print_errors() + printer.diagnostics_to_string(&diagnostics) ); std::process::exit(1); } } - Err(error) => { - eprintln!("Failed to parse schema:\n{:?}", error); + Err(diagnostics) => { + let printer = DiagnosticPrinter::new(sources); + println!( + "Failed to parse schema:\n{}", + printer.diagnostics_to_string(&diagnostics) + ); std::process::exit(1); } } } -fn build_schema_from_file(schema_file: &str) -> DiagnosticsResult { +/// Returns a SDLSchema with a default location used for reporting non-location specific errors. +fn build_schema_from_path(schema_file: &str) -> DiagnosticsResult { let path = Path::new(schema_file); - let data = if path.is_file() { - fs::read_to_string(path).unwrap() + let extensions: &[(&str, SourceLocationKey)] = &[]; + + return if path.is_file() { + build_schema_with_extensions(&[path_to_schema_source(path)], extensions) } else { - let mut buffer = String::new(); - for entry in path.read_dir().unwrap() { - buffer.push_str(&fs::read_to_string(entry.unwrap().path()).unwrap()); + let paths = path + .read_dir() + .unwrap() + .map(|entry| entry.unwrap().path()) + .collect::>(); + + if paths.is_empty() { + println!("No schema files found in the directory: {}", schema_file); + std::process::exit(1); } - buffer + + let sdls: Vec<(String, SourceLocationKey)> = paths + .iter() + .map(|path| path_to_schema_source(path)) + .collect(); + + build_schema_with_extensions(&sdls, extensions) }; - build_schema(&data) +} + +fn path_to_schema_source(path: &Path) -> (String, SourceLocationKey) { + ( + fs::read_to_string(path).unwrap(), + SourceLocationKey::standalone(path.to_str().unwrap()), + ) +} + +fn sources(source_key: SourceLocationKey) -> Option { + match source_key { + SourceLocationKey::Standalone { path } => Some(TextSource::from_whole_document( + fs::read_to_string(Path::new(path.lookup())).unwrap(), + )), + SourceLocationKey::Embedded { .. } => { + panic!("Embedded sources are not supported in this context. This should not happen.",) + } + SourceLocationKey::Generated => { + panic!("Generated sources are not supported in this context. This should not happen.") + } + } } diff --git a/compiler/crates/schema-validate/tests/validate_schema.rs b/compiler/crates/schema-validate/tests/validate_schema.rs new file mode 100644 index 0000000000000..f287b7737930e --- /dev/null +++ b/compiler/crates/schema-validate/tests/validate_schema.rs @@ -0,0 +1,41 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use graphql_cli::DiagnosticPrinter; +use schema::build_schema_with_extensions; +use schema_validate_lib::validate; +use schema_validate_lib::SchemaValidationOptions; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let result = build_schema_with_extensions::<&str, &str>( + &[( + fixture.content, + SourceLocationKey::standalone(fixture.file_name), + )], + &[], + ) + .and_then(|schema| { + validate( + &schema, + SchemaValidationOptions { + allow_introspection_names: false, + }, + ) + }); + match result { + Ok(_) => Ok("OK".to_string()), + Err(diagnostics) => { + let printer = DiagnosticPrinter::new(|_| { + Some(TextSource::from_whole_document(fixture.content.to_string())) + }); + Ok(printer.diagnostics_to_string(&diagnostics)) + } + } +} diff --git a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_directives.expected b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_directives.expected index ed67523d77b85..ab53efd9398a3 100644 --- a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_directives.expected +++ b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_directives.expected @@ -30,23 +30,57 @@ type Query { user: Human } ==================================== OUTPUT =================================== -Directive __fetchable with definition: - directive @__fetchable(field_name: String) on OBJECT -had errors: - * Name '__fetchable' must not begin with '__', which is reserved by GraphQL introspection. - -Directive __fetchableOther with definition: - directive @__fetchableOther(field_name: String, field_name: Int) on OBJECT -had errors: - * Duplicate argument 'field_name' found on field/directive '__fetchableOther'. - * Name '__fetchableOther' must not begin with '__', which is reserved by GraphQL introspection. - -Directive fetchable with definition: - directive @fetchable(__field_name: String) on OBJECT -had errors: - * Name '__field_name' must not begin with '__', which is reserved by GraphQL introspection. - -Directive fetchableOther with definition: - directive @fetchableOther(field_name: String, field_name: Int) on OBJECT -had errors: - * Duplicate argument 'field_name' found on field/directive 'fetchableOther'. +✖︎ Name '__fetchable' must not begin with '__', which is reserved by GraphQL introspection. + + validate_directives.graphql:1:12 + 1 │ directive @__fetchable(field_name: String) on OBJECT + │ ^^^^^^^^^^^ + 2 │ + +✖︎ Name '__field_name' must not begin with '__', which is reserved by GraphQL introspection. + + validate_directives.graphql:3:22 + 2 │ + 3 │ directive @fetchable(__field_name: String) on OBJECT + │ ^^^^^^^^^^^^ + 4 │ + +✖︎ Duplicate argument 'field_name' found on field/directive 'fetchableOther'. + + validate_directives.graphql:5:47 + 4 │ + 5 │ directive @fetchableOther(field_name: String, field_name: Int) on OBJECT + │ ^^^^^^^^^^ + 6 │ + + ℹ︎ Previously defined here: + + validate_directives.graphql:5:27 + 4 │ + 5 │ directive @fetchableOther(field_name: String, field_name: Int) on OBJECT + │ ^^^^^^^^^^ + 6 │ + +✖︎ Name '__fetchableOther' must not begin with '__', which is reserved by GraphQL introspection. + + validate_directives.graphql:7:12 + 6 │ + 7 │ directive @__fetchableOther(field_name: String, field_name: Int) on OBJECT + │ ^^^^^^^^^^^^^^^^ + 8 │ + +✖︎ Duplicate argument 'field_name' found on field/directive '__fetchableOther'. + + validate_directives.graphql:7:49 + 6 │ + 7 │ directive @__fetchableOther(field_name: String, field_name: Int) on OBJECT + │ ^^^^^^^^^^ + 8 │ + + ℹ︎ Previously defined here: + + validate_directives.graphql:7:29 + 6 │ + 7 │ directive @__fetchableOther(field_name: String, field_name: Int) on OBJECT + │ ^^^^^^^^^^ + 8 │ diff --git a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_enum.expected b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_enum.expected index d764061b9fd71..69e4a595eb9f6 100644 --- a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_enum.expected +++ b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_enum.expected @@ -38,18 +38,34 @@ enum InvlidPetType { null } ==================================== OUTPUT =================================== -Type EmptyPetType with definition: - enum EmptyPetType -had errors: - * Enum must define one or more values. +✖︎ Enum must define one or more values. -Type InvlidPetType with definition: - enum InvlidPetType { - true - false - null -} -had errors: - * Enum cannot include value: false. - * Enum cannot include value: null. - * Enum cannot include value: true. + validate_enum.graphql:32:6 + 31 │ + 32 │ enum EmptyPetType + │ ^^^^^^^^^^^^ + 33 │ + +✖︎ Enum cannot include value: true. + + validate_enum.graphql:34:6 + 33 │ + 34 │ enum InvlidPetType { + │ ^^^^^^^^^^^^^ + 35 │ true + +✖︎ Enum cannot include value: false. + + validate_enum.graphql:34:6 + 33 │ + 34 │ enum InvlidPetType { + │ ^^^^^^^^^^^^^ + 35 │ true + +✖︎ Enum cannot include value: null. + + validate_enum.graphql:34:6 + 33 │ + 34 │ enum InvlidPetType { + │ ^^^^^^^^^^^^^ + 35 │ true diff --git a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_implements_interface.expected b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_implements_interface.expected new file mode 100644 index 0000000000000..ae4039d2805b2 --- /dev/null +++ b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_implements_interface.expected @@ -0,0 +1,126 @@ +==================================== INPUT ==================================== +# Reports full type, not just inner type + +interface Node { + id: ID! +} + +type Pet implements Node { + id: ID # <--- Missing ! +} + +# Subtypes for fields are allowed + +interface InterfaceA { + some_field: String +} + +type TypeA implements InterfaceA { + some_field: String! # More specific type of String! should be allowed +} + +# Checks multiple interfaces + +interface InterfaceB { + some_field: String +} + +interface InterfaceC { + another_field: String +} + +type TypeB implements InterfaceB & InterfaceC { + some_field: String + # Oops! Does not implement InterfaceC's field! +} + +# Checks interface implements interface + +interface InterfaceD { + some_field: String +} + +interface InterfaceE implements InterfaceD { + # Oops! Does not implement InterfaceD's field + another_field: String +} + +# Checks multi-dimensional lists + +interface InterfaceF { + some_field: [[[String]]] +} + +type TypeC implements InterfaceF { + some_field: [[[Int]]] # Oops! Should be String in there +} + +# Required for global validation + +type Query { + node: Node +} +==================================== OUTPUT =================================== +✖︎ Interface field 'Node.id' expects type 'ID!' but 'Pet.id' is of type 'ID'. + + validate_implements_interface.graphql:8:3 + 7 │ type Pet implements Node { + 8 │ id: ID # <--- Missing ! + │ ^^ + 9 │ } + + ℹ︎ The interface field is defined here: + + validate_implements_interface.graphql:4:3 + 3 │ interface Node { + 4 │ id: ID! + │ ^^ + 5 │ } + +✖︎ Interface field 'InterfaceC.another_field' expected but type 'TypeB' does not provide it. + + validate_implements_interface.graphql:31:6 + 30 │ + 31 │ type TypeB implements InterfaceB & InterfaceC { + │ ^^^^^ + 32 │ some_field: String + + ℹ︎ The interface field is defined here: + + validate_implements_interface.graphql:28:3 + 27 │ interface InterfaceC { + 28 │ another_field: String + │ ^^^^^^^^^^^^^ + 29 │ } + +✖︎ Interface field 'InterfaceD.some_field' expected but interface 'InterfaceE' does not provide it. + + validate_implements_interface.graphql:42:11 + 41 │ + 42 │ interface InterfaceE implements InterfaceD { + │ ^^^^^^^^^^ + 43 │ # Oops! Does not implement InterfaceD's field + + ℹ︎ The interface field is defined here: + + validate_implements_interface.graphql:39:3 + 38 │ interface InterfaceD { + 39 │ some_field: String + │ ^^^^^^^^^^ + 40 │ } + +✖︎ Interface field 'InterfaceF.some_field' expects type '[[[String]]]' but 'TypeC.some_field' is of type '[[[Int]]]'. + + validate_implements_interface.graphql:54:3 + 53 │ type TypeC implements InterfaceF { + 54 │ some_field: [[[Int]]] # Oops! Should be String in there + │ ^^^^^^^^^^ + 55 │ } + + ℹ︎ The interface field is defined here: + + validate_implements_interface.graphql:50:3 + 49 │ interface InterfaceF { + 50 │ some_field: [[[String]]] + │ ^^^^^^^^^^ + 51 │ } diff --git a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_implements_interface.graphql b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_implements_interface.graphql new file mode 100644 index 0000000000000..955cd77be3e5d --- /dev/null +++ b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_implements_interface.graphql @@ -0,0 +1,61 @@ +# Reports full type, not just inner type + +interface Node { + id: ID! +} + +type Pet implements Node { + id: ID # <--- Missing ! +} + +# Subtypes for fields are allowed + +interface InterfaceA { + some_field: String +} + +type TypeA implements InterfaceA { + some_field: String! # More specific type of String! should be allowed +} + +# Checks multiple interfaces + +interface InterfaceB { + some_field: String +} + +interface InterfaceC { + another_field: String +} + +type TypeB implements InterfaceB & InterfaceC { + some_field: String + # Oops! Does not implement InterfaceC's field! +} + +# Checks interface implements interface + +interface InterfaceD { + some_field: String +} + +interface InterfaceE implements InterfaceD { + # Oops! Does not implement InterfaceD's field + another_field: String +} + +# Checks multi-dimensional lists + +interface InterfaceF { + some_field: [[[String]]] +} + +type TypeC implements InterfaceF { + some_field: [[[Int]]] # Oops! Should be String in there +} + +# Required for global validation + +type Query { + node: Node +} diff --git a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_interface_implements_interface_cyclic.expected b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_interface_implements_interface_cyclic.expected index 9031d55f07b3f..7ddd2ecd714a4 100644 --- a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_interface_implements_interface_cyclic.expected +++ b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_interface_implements_interface_cyclic.expected @@ -32,39 +32,149 @@ type Query { image: Image } ==================================== OUTPUT =================================== -Type BaseNode with definition: - interface BaseNode implements BaseNode2 { - id: ID! -} -had errors: - * Cyclic reference found for interface inheritance: BaseNode2->BaseNode1->Resource->Node->BaseNode. +✖︎ Cyclic reference found for interface inheritance: BaseNode2->BaseNode1. -Type BaseNode1 with definition: - interface BaseNode1 implements BaseNode2 & Resource { - id: ID! -} -had errors: - * Cyclic reference found for interface inheritance: BaseNode2->BaseNode1. + validate_interface_implements_interface_cyclic.graphql:1:11 + 1 │ interface BaseNode1 implements BaseNode2 & Resource { + │ ^^^^^^^^^ + 2 │ id: ID! -Type BaseNode2 with definition: - interface BaseNode2 implements BaseNode1 { - id: ID! -} -had errors: - * Cyclic reference found for interface inheritance: BaseNode1->BaseNode2. + ℹ︎ -> -Type Node with definition: - interface Node implements BaseNode & BaseNode1 { - name: String! -} -had errors: - * Cyclic reference found for interface inheritance: BaseNode->BaseNode2->BaseNode1->Resource->Node. + validate_interface_implements_interface_cyclic.graphql:5:11 + 4 │ + 5 │ interface BaseNode2 implements BaseNode1 { + │ ^^^^^^^^^ + 6 │ id: ID! -Type Resource with definition: - interface Resource implements Node { - id: ID! - name: String! - url: String -} -had errors: - * Cyclic reference found for interface inheritance: Node->BaseNode->BaseNode2->BaseNode1->Resource. +✖︎ Cyclic reference found for interface inheritance: BaseNode1->BaseNode2. + + validate_interface_implements_interface_cyclic.graphql:5:11 + 4 │ + 5 │ interface BaseNode2 implements BaseNode1 { + │ ^^^^^^^^^ + 6 │ id: ID! + + ℹ︎ -> + + validate_interface_implements_interface_cyclic.graphql:1:11 + 1 │ interface BaseNode1 implements BaseNode2 & Resource { + │ ^^^^^^^^^ + 2 │ id: ID! + +✖︎ Cyclic reference found for interface inheritance: BaseNode2->BaseNode1->Resource->Node->BaseNode. + + validate_interface_implements_interface_cyclic.graphql:9:11 + 8 │ + 9 │ interface BaseNode implements BaseNode2 { + │ ^^^^^^^^ + 10 │ id: ID! + + ℹ︎ -> + + validate_interface_implements_interface_cyclic.graphql:13:11 + 12 │ + 13 │ interface Node implements BaseNode & BaseNode1 { + │ ^^^^ + 14 │ name: String! + + ℹ︎ -> + + validate_interface_implements_interface_cyclic.graphql:17:11 + 16 │ + 17 │ interface Resource implements Node { + │ ^^^^^^^^ + 18 │ id: ID! + + ℹ︎ -> + + validate_interface_implements_interface_cyclic.graphql:1:11 + 1 │ interface BaseNode1 implements BaseNode2 & Resource { + │ ^^^^^^^^^ + 2 │ id: ID! + + ℹ︎ -> + + validate_interface_implements_interface_cyclic.graphql:5:11 + 4 │ + 5 │ interface BaseNode2 implements BaseNode1 { + │ ^^^^^^^^^ + 6 │ id: ID! + +✖︎ Cyclic reference found for interface inheritance: BaseNode->BaseNode2->BaseNode1->Resource->Node. + + validate_interface_implements_interface_cyclic.graphql:13:11 + 12 │ + 13 │ interface Node implements BaseNode & BaseNode1 { + │ ^^^^ + 14 │ name: String! + + ℹ︎ -> + + validate_interface_implements_interface_cyclic.graphql:17:11 + 16 │ + 17 │ interface Resource implements Node { + │ ^^^^^^^^ + 18 │ id: ID! + + ℹ︎ -> + + validate_interface_implements_interface_cyclic.graphql:1:11 + 1 │ interface BaseNode1 implements BaseNode2 & Resource { + │ ^^^^^^^^^ + 2 │ id: ID! + + ℹ︎ -> + + validate_interface_implements_interface_cyclic.graphql:5:11 + 4 │ + 5 │ interface BaseNode2 implements BaseNode1 { + │ ^^^^^^^^^ + 6 │ id: ID! + + ℹ︎ -> + + validate_interface_implements_interface_cyclic.graphql:9:11 + 8 │ + 9 │ interface BaseNode implements BaseNode2 { + │ ^^^^^^^^ + 10 │ id: ID! + +✖︎ Cyclic reference found for interface inheritance: Node->BaseNode->BaseNode2->BaseNode1->Resource. + + validate_interface_implements_interface_cyclic.graphql:17:11 + 16 │ + 17 │ interface Resource implements Node { + │ ^^^^^^^^ + 18 │ id: ID! + + ℹ︎ -> + + validate_interface_implements_interface_cyclic.graphql:1:11 + 1 │ interface BaseNode1 implements BaseNode2 & Resource { + │ ^^^^^^^^^ + 2 │ id: ID! + + ℹ︎ -> + + validate_interface_implements_interface_cyclic.graphql:5:11 + 4 │ + 5 │ interface BaseNode2 implements BaseNode1 { + │ ^^^^^^^^^ + 6 │ id: ID! + + ℹ︎ -> + + validate_interface_implements_interface_cyclic.graphql:9:11 + 8 │ + 9 │ interface BaseNode implements BaseNode2 { + │ ^^^^^^^^ + 10 │ id: ID! + + ℹ︎ -> + + validate_interface_implements_interface_cyclic.graphql:13:11 + 12 │ + 13 │ interface Node implements BaseNode & BaseNode1 { + │ ^^^^ + 14 │ name: String! diff --git a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_object.expected b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_object.expected index df0b990224ddf..a69e7566b894d 100644 --- a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_object.expected +++ b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_object.expected @@ -34,6 +34,22 @@ type Human implements Hominid @fetchable(field_name: "id") { other_friends(location: Location, location: Location): [Human] } +type Ape implements Hominid { + pet: Canine + friends: [Hominid] # Missing argument +} + +type Chimp implements Hominid { + pet: Canine + friends(location: String): [Hominid] # Missing argument +} + +# Implement Hominid twice +type Bonobo implements Hominid & Hominid { + pet: Canine + friends(location: Location): [Hominid] +} + type Query { fetch__Human(id: ID!): Human user: Human @@ -63,33 +79,146 @@ type Fur { color: String } ==================================== OUTPUT =================================== -Type EmptyFurType with definition: - union EmptyFurType -had errors: - * Union type must define one or more member types. +✖︎ Type 'Bonobo' can only implement 'Hominid' once. -Type Human with definition: - type Human implements Hominid @fetchable(field_name: "id") { - id: ID - pet: OtherPet - friends(location: Location, radius: Int!, Country: String): [Human] - location: Location - location: ID - other_friends(location: Location, location: Location): [Human] -} -had errors: - * Duplicate argument 'location' found on field/directive 'other_friends'. - * Duplicate field 'location' found. - * Interface field 'Hominid.pet' expects type 'Canine' but 'Human.pet' is of type 'OtherPet'. - * Object field 'Human.friends' includes required argument 'radius' that is missing from the Interface field 'Hominid.friends'. - * The type of 'Human.location' must be Output Type but got: 'Named(InputObject(0))'. - -Type Pet with definition: - type Pet implements Canine @fetchable(field_name: "id") { - id: ID - owner: Human - type: PetType - fur: FurType -} -had errors: - * Interface field 'Canine.name' expected but 'Pet' does not provide it. + validate_object.graphql:3:11 + 2 │ + 3 │ interface Hominid { + │ ^^^^^^^ + 4 │ pet: Canine + + ℹ︎ Previously defined here: + + validate_object.graphql:3:11 + 2 │ + 3 │ interface Hominid { + │ ^^^^^^^ + 4 │ pet: Canine + +✖︎ Interface field 'Canine.name' expected but type 'Pet' does not provide it. + + validate_object.graphql:13:6 + 12 │ + 13 │ type Pet implements Canine @fetchable(field_name: "id") { + │ ^^^ + 14 │ id: ID + + ℹ︎ The interface field is defined here: + + validate_object.graphql:10:3 + 9 │ owner: Human + 10 │ name: String + │ ^^^^ + 11 │ } + +✖︎ Interface field 'Hominid.pet' expects type 'Canine' but 'Human.pet' is of type 'OtherPet'. + + validate_object.graphql:29:3 + 28 │ id: ID + 29 │ pet: OtherPet + │ ^^^ + 30 │ friends(location: Location, radius: Int!, Country: String): [Human] + + ℹ︎ The interface field is defined here: + + validate_object.graphql:4:3 + 3 │ interface Hominid { + 4 │ pet: Canine + │ ^^^ + 5 │ friends(location: Location): [Hominid] + +✖︎ Object field 'Human.friends' includes required argument 'radius' that is missing from the Interface field 'Hominid.friends'. + + validate_object.graphql:30:31 + 29 │ pet: OtherPet + 30 │ friends(location: Location, radius: Int!, Country: String): [Human] + │ ^^^^^^ + 31 │ location: Location + + ℹ︎ The interface field is define here: + + validate_object.graphql:5:3 + 4 │ pet: Canine + 5 │ friends(location: Location): [Hominid] + │ ^^^^^^^ + 6 │ } + +✖︎ The type of 'Human.location' must be Output Type but got an input object. + + validate_object.graphql:31:3 + 30 │ friends(location: Location, radius: Int!, Country: String): [Human] + 31 │ location: Location + │ ^^^^^^^^ + 32 │ location: ID + +✖︎ Duplicate field 'location' found. + + validate_object.graphql:32:3 + 31 │ location: Location + 32 │ location: ID + │ ^^^^^^^^ + 33 │ other_friends(location: Location, location: Location): [Human] + + ℹ︎ Previously defined here: + + validate_object.graphql:31:3 + 30 │ friends(location: Location, radius: Int!, Country: String): [Human] + 31 │ location: Location + │ ^^^^^^^^ + 32 │ location: ID + +✖︎ Duplicate argument 'location' found on field/directive 'other_friends'. + + validate_object.graphql:33:3 + 32 │ location: ID + 33 │ other_friends(location: Location, location: Location): [Human] + │ ^^^^^^^^^^^^^ + 34 │ } + + ℹ︎ Previously defined here: + + validate_object.graphql:33:17 + 32 │ location: ID + 33 │ other_friends(location: Location, location: Location): [Human] + │ ^^^^^^^^ + 34 │ } + +✖︎ Interface field argument 'Hominid.friends(location:)' expected but 'Ape.friends' does not provide it. + + validate_object.graphql:38:3 + 37 │ pet: Canine + 38 │ friends: [Hominid] # Missing argument + │ ^^^^^^^ + 39 │ } + + ℹ︎ The interface field argument is defined here: + + validate_object.graphql:5:11 + 4 │ pet: Canine + 5 │ friends(location: Location): [Hominid] + │ ^^^^^^^^ + 6 │ } + +✖︎ Interface field argument 'Hominid.friends(location:)' expects type 'Location' but 'Chimp.friends(location:)' is type 'String'. + + validate_object.graphql:43:11 + 42 │ pet: Canine + 43 │ friends(location: String): [Hominid] # Missing argument + │ ^^^^^^^^ + 44 │ } + + ℹ︎ The interface field argument is defined here: + + validate_object.graphql:5:11 + 4 │ pet: Canine + 5 │ friends(location: Location): [Hominid] + │ ^^^^^^^^ + 6 │ } + +✖︎ Union type EmptyFurType must define one or more member types. + + validate_object.graphql:69:7 + 68 │ + 69 │ union EmptyFurType + │ ^^^^^^^^^^^^ + 70 │ diff --git a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_object.graphql b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_object.graphql index b105dee9283b3..8687f856552f6 100644 --- a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_object.graphql +++ b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_object.graphql @@ -33,6 +33,22 @@ type Human implements Hominid @fetchable(field_name: "id") { other_friends(location: Location, location: Location): [Human] } +type Ape implements Hominid { + pet: Canine + friends: [Hominid] # Missing argument +} + +type Chimp implements Hominid { + pet: Canine + friends(location: String): [Hominid] # Missing argument +} + +# Implement Hominid twice +type Bonobo implements Hominid & Hominid { + pet: Canine + friends(location: Location): [Hominid] +} + type Query { fetch__Human(id: ID!): Human user: Human diff --git a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_root_types.expected b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_root_types.expected index 6399e2e8393c6..07f283ee9203a 100644 --- a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_root_types.expected +++ b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_root_types.expected @@ -19,5 +19,9 @@ type Human implements Hominid @fetchable(field_name: "id") { pet: Pet } ==================================== OUTPUT =================================== -Errors: - * 'Query' root type must be provided. +✖︎ 'Query' root type must be provided. + + :1:1 + 1 │ directive @fetchable(field_name: String) on OBJECT + │ ^ + 2 │ diff --git a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_root_types_kind.expected b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_root_types_kind.expected new file mode 100644 index 0000000000000..a8d2908f8c6f1 --- /dev/null +++ b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_root_types_kind.expected @@ -0,0 +1,26 @@ +==================================== INPUT ==================================== +scalar Query + +enum Mutation { + CREATE + UPDATE + DELETE +} + +input Subscription { + id: ID! + name: String! +} + +schema { + query: Query + mutation: Mutation + subscription: Subscription +} +==================================== OUTPUT =================================== +✖︎ Expected an object type for name 'Query', got a scalar. + + :1:1 + 1 │ scalar Query + │ ^ + 2 │ diff --git a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_root_types_kind.graphql b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_root_types_kind.graphql new file mode 100644 index 0000000000000..061cd84ed2ddd --- /dev/null +++ b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_root_types_kind.graphql @@ -0,0 +1,18 @@ +scalar Query + +enum Mutation { + CREATE + UPDATE + DELETE +} + +input Subscription { + id: ID! + name: String! +} + +schema { + query: Query + mutation: Mutation + subscription: Subscription +} \ No newline at end of file diff --git a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_union.expected b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_union.expected index 5c3abc3283705..397660e0691d3 100644 --- a/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_union.expected +++ b/compiler/crates/schema-validate/tests/validate_schema/fixtures/validate_union.expected @@ -51,12 +51,18 @@ type Fur { color: String } ==================================== OUTPUT =================================== -Type EmptyFurType with definition: - union EmptyFurType -had errors: - * Union type must define one or more member types. - -Type InvalidFurType with definition: - union InvalidFurType = Hair | Fur | Hair -had errors: - * Union can only include member Hair once. +✖︎ Union type EmptyFurType must define one or more member types. + + validate_union.graphql:41:7 + 40 │ + 41 │ union EmptyFurType + │ ^^^^^^^^^^^^ + 42 │ + +✖︎ Union can only include member Hair once. + + validate_union.graphql:43:7 + 42 │ + 43 │ union InvalidFurType = Hair | Fur | Hair + │ ^^^^^^^^^^^^^^ + 44 │ diff --git a/compiler/crates/schema-validate/tests/validate_schema/mod.rs b/compiler/crates/schema-validate/tests/validate_schema/mod.rs deleted file mode 100644 index c7fd07c7a5e9d..0000000000000 --- a/compiler/crates/schema-validate/tests/validate_schema/mod.rs +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use fixture_tests::Fixture; -use schema::build_schema; -use schema_validate_lib::validate; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let schema = build_schema(fixture.content).unwrap(); - Ok(validate(&schema).print_errors()) -} diff --git a/compiler/crates/schema-validate/tests/validate_schema_test.rs b/compiler/crates/schema-validate/tests/validate_schema_test.rs index acfbcb478f815..f7b6fd838820e 100644 --- a/compiler/crates/schema-validate/tests/validate_schema_test.rs +++ b/compiler/crates/schema-validate/tests/validate_schema_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<> + * @generated SignedSource<> */ mod validate_schema; @@ -12,44 +12,58 @@ mod validate_schema; use validate_schema::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn validate_directives() { +#[tokio::test] +async fn validate_directives() { let input = include_str!("validate_schema/fixtures/validate_directives.graphql"); let expected = include_str!("validate_schema/fixtures/validate_directives.expected"); - test_fixture(transform_fixture, "validate_directives.graphql", "validate_schema/fixtures/validate_directives.expected", input, expected); + test_fixture(transform_fixture, file!(), "validate_directives.graphql", "validate_schema/fixtures/validate_directives.expected", input, expected).await; } -#[test] -fn validate_enum() { +#[tokio::test] +async fn validate_enum() { let input = include_str!("validate_schema/fixtures/validate_enum.graphql"); let expected = include_str!("validate_schema/fixtures/validate_enum.expected"); - test_fixture(transform_fixture, "validate_enum.graphql", "validate_schema/fixtures/validate_enum.expected", input, expected); + test_fixture(transform_fixture, file!(), "validate_enum.graphql", "validate_schema/fixtures/validate_enum.expected", input, expected).await; } -#[test] -fn validate_interface_implements_interface_cyclic() { +#[tokio::test] +async fn validate_implements_interface() { + let input = include_str!("validate_schema/fixtures/validate_implements_interface.graphql"); + let expected = include_str!("validate_schema/fixtures/validate_implements_interface.expected"); + test_fixture(transform_fixture, file!(), "validate_implements_interface.graphql", "validate_schema/fixtures/validate_implements_interface.expected", input, expected).await; +} + +#[tokio::test] +async fn validate_interface_implements_interface_cyclic() { let input = include_str!("validate_schema/fixtures/validate_interface_implements_interface_cyclic.graphql"); let expected = include_str!("validate_schema/fixtures/validate_interface_implements_interface_cyclic.expected"); - test_fixture(transform_fixture, "validate_interface_implements_interface_cyclic.graphql", "validate_schema/fixtures/validate_interface_implements_interface_cyclic.expected", input, expected); + test_fixture(transform_fixture, file!(), "validate_interface_implements_interface_cyclic.graphql", "validate_schema/fixtures/validate_interface_implements_interface_cyclic.expected", input, expected).await; } -#[test] -fn validate_object() { +#[tokio::test] +async fn validate_object() { let input = include_str!("validate_schema/fixtures/validate_object.graphql"); let expected = include_str!("validate_schema/fixtures/validate_object.expected"); - test_fixture(transform_fixture, "validate_object.graphql", "validate_schema/fixtures/validate_object.expected", input, expected); + test_fixture(transform_fixture, file!(), "validate_object.graphql", "validate_schema/fixtures/validate_object.expected", input, expected).await; } -#[test] -fn validate_root_types() { +#[tokio::test] +async fn validate_root_types() { let input = include_str!("validate_schema/fixtures/validate_root_types.graphql"); let expected = include_str!("validate_schema/fixtures/validate_root_types.expected"); - test_fixture(transform_fixture, "validate_root_types.graphql", "validate_schema/fixtures/validate_root_types.expected", input, expected); + test_fixture(transform_fixture, file!(), "validate_root_types.graphql", "validate_schema/fixtures/validate_root_types.expected", input, expected).await; +} + +#[tokio::test] +async fn validate_root_types_kind() { + let input = include_str!("validate_schema/fixtures/validate_root_types_kind.graphql"); + let expected = include_str!("validate_schema/fixtures/validate_root_types_kind.expected"); + test_fixture(transform_fixture, file!(), "validate_root_types_kind.graphql", "validate_schema/fixtures/validate_root_types_kind.expected", input, expected).await; } -#[test] -fn validate_union() { +#[tokio::test] +async fn validate_union() { let input = include_str!("validate_schema/fixtures/validate_union.graphql"); let expected = include_str!("validate_schema/fixtures/validate_union.expected"); - test_fixture(transform_fixture, "validate_union.graphql", "validate_schema/fixtures/validate_union.expected", input, expected); + test_fixture(transform_fixture, file!(), "validate_union.graphql", "validate_schema/fixtures/validate_union.expected", input, expected).await; } diff --git a/compiler/crates/schema/Cargo.toml b/compiler/crates/schema/Cargo.toml index e1e7df77cda31..50f632e2c78c2 100644 --- a/compiler/crates/schema/Cargo.toml +++ b/compiler/crates/schema/Cargo.toml @@ -1,9 +1,11 @@ # @generated by autocargo from //relay/oss/crates/schema:[schema,schema_test] + [package] name = "schema" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [[test]] @@ -12,17 +14,20 @@ path = "tests/build_schema_test.rs" [dependencies] common = { path = "../common" } -dashmap = { version = "5.4", features = ["raw-api", "rayon", "serde"] } +dashmap = { version = "5.5.3", features = ["rayon", "serde"] } flatbuffers = "2.0" fnv = "1.0" graphql-syntax = { path = "../graphql-syntax" } intern = { path = "../intern" } lazy_static = "1.4" -ouroboros = "0.8" +ouroboros = "0.18.4" +rayon = "1.9.0" schema-flatbuffer = { path = "../schema-flatbuffer" } +serde = { version = "1.0.185", features = ["derive", "rc"] } strsim = "0.10.0" -thiserror = "1.0.36" +thiserror = "1.0.49" [dev-dependencies] fixture-tests = { path = "../fixture-tests" } graphql-cli = { path = "../graphql-cli" } +tokio = { version = "1.37.0", features = ["full", "test-util", "tracing"] } diff --git a/compiler/crates/schema/src/definitions.rs b/compiler/crates/schema/src/definitions.rs new file mode 100644 index 0000000000000..297d25a7ee6bc --- /dev/null +++ b/compiler/crates/schema/src/definitions.rs @@ -0,0 +1,714 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +mod interface; + +use std::collections::HashMap; +use std::fmt; +use std::hash::Hash; +use std::slice::Iter; + +use ::intern::string_key::Intern; +use ::intern::string_key::StringKey; +use common::ArgumentName; +use common::DirectiveName; +use common::EnumName; +use common::InputObjectName; +use common::InterfaceName; +use common::Location; +use common::Named; +use common::NamedItem; +use common::ObjectName; +use common::ScalarName; +use common::UnionName; +use common::WithLocation; +use graphql_syntax::ConstantValue; +use graphql_syntax::DirectiveLocation; +pub use interface::*; +use intern::intern; +use lazy_static::lazy_static; + +use crate::Schema; + +lazy_static! { + static ref DIRECTIVE_DEPRECATED: DirectiveName = DirectiveName("deprecated".intern()); + static ref ARGUMENT_REASON: ArgumentName = ArgumentName("reason".intern()); + static ref SEMANTIC_NON_NULL_DIRECTIVE: DirectiveName = + DirectiveName("semanticNonNull".intern()); + static ref LEVELS_ARGUMENT: ArgumentName = ArgumentName("levels".intern()); +} + +pub(crate) type TypeMap = HashMap; + +macro_rules! type_id { + ($name:ident, $type:ident) => { + #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, serde::Serialize)] + pub struct $name(pub $type); + impl $name { + pub(crate) fn as_usize(&self) -> usize { + self.0 as usize + } + } + + impl fmt::Debug for $name { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}({})", stringify!($name), self.0) + } + } + + impl fmt::Display for $name { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } + } + }; +} + +type_id!(EnumID, u32); +type_id!(InputObjectID, u32); +type_id!(InterfaceID, u32); +type_id!(ObjectID, u32); +type_id!(ScalarID, u32); +type_id!(UnionID, u32); +type_id!(FieldID, u32); + +#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, serde::Serialize)] +pub enum Type { + Enum(EnumID), + InputObject(InputObjectID), + Interface(InterfaceID), + Object(ObjectID), + Scalar(ScalarID), + Union(UnionID), +} + +impl Type { + pub fn is_scalar(self) -> bool { + matches!(self, Type::Scalar(_)) + } + + pub fn is_enum(self) -> bool { + matches!(self, Type::Enum(_)) + } + + pub fn is_input_type(self) -> bool { + matches!(self, Type::Scalar(_) | Type::Enum(_) | Type::InputObject(_)) + } + + pub fn is_abstract_type(self) -> bool { + matches!(self, Type::Union(_) | Type::Interface(_)) + } + + pub fn is_composite_type(self) -> bool { + matches!(self, Type::Object(_) | Type::Interface(_) | Type::Union(_)) + } + + pub fn is_object(self) -> bool { + matches!(self, Type::Object(_)) + } + + pub fn is_input_object(self) -> bool { + matches!(self, Type::InputObject(_)) + } + + pub fn is_interface(self) -> bool { + matches!(self, Type::Interface(_)) + } + + pub fn is_object_or_interface(self) -> bool { + matches!(self, Type::Object(_) | Type::Interface(_)) + } + + pub fn is_union(self) -> bool { + matches!(self, Type::Union(_)) + } + + pub fn is_root_type(&self, schema: &S) -> bool { + Some(*self) == schema.query_type() + || Some(*self) == schema.mutation_type() + || Some(*self) == schema.subscription_type() + } + + pub fn get_enum_id(self) -> Option { + match self { + Type::Enum(id) => Some(id), + _ => None, + } + } + + pub fn get_input_object_id(self) -> Option { + match self { + Type::InputObject(id) => Some(id), + _ => None, + } + } + + pub fn get_interface_id(self) -> Option { + match self { + Type::Interface(id) => Some(id), + _ => None, + } + } + + pub fn get_object_id(self) -> Option { + match self { + Type::Object(id) => Some(id), + _ => None, + } + } + + pub fn get_scalar_id(self) -> Option { + match self { + Type::Scalar(id) => Some(id), + _ => None, + } + } + + pub fn get_union_id(self) -> Option { + match self { + Type::Union(id) => Some(id), + _ => None, + } + } +} + +impl fmt::Debug for Type { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Type::Enum(id) => f.write_fmt(format_args!("Enum({:?})", id.0)), + Type::InputObject(id) => f.write_fmt(format_args!("InputObject({:?})", id.0)), + Type::Interface(id) => f.write_fmt(format_args!("Interface({:?})", id.0)), + Type::Object(id) => f.write_fmt(format_args!("Object({:?})", id.0)), + Type::Scalar(id) => f.write_fmt(format_args!("Scalar({:?})", id.0)), + Type::Union(id) => f.write_fmt(format_args!("Union({:?})", id.0)), + } + } +} + +impl Type { + pub fn get_variant_name(&self) -> &'static str { + match self { + Type::Enum(_) => "an enum", + Type::InputObject(_) => "an input object", + Type::Interface(_) => "an interface", + Type::Object(_) => "an object", + Type::Scalar(_) => "a scalar", + Type::Union(_) => "a union", + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, serde::Serialize)] +pub enum TypeReference { + Named(T), + NonNull(Box>), + List(Box>), +} + +impl TypeReference { + pub fn inner(&self) -> T { + match self { + TypeReference::Named(type_) => *type_, + TypeReference::List(of) => of.inner(), + TypeReference::NonNull(of) => of.inner(), + } + } + + pub fn non_null(&self) -> TypeReference { + match self { + TypeReference::Named(_) => TypeReference::NonNull(Box::new(self.clone())), + TypeReference::List(_) => TypeReference::NonNull(Box::new(self.clone())), + TypeReference::NonNull(_) => self.clone(), + } + } + + // Given a multi-dimensional list type, return a new type where the level'th nested + // list is non-null. + pub fn with_non_null_level(&self, level: i64) -> TypeReference { + match self { + TypeReference::Named(_) => { + if level == 0 { + self.non_null() + } else { + panic!("Invalid level {} for Named type", level) + } + } + TypeReference::List(of) => { + if level == 0 { + self.non_null() + } else { + TypeReference::List(Box::new(of.with_non_null_level(level - 1))) + } + } + TypeReference::NonNull(of) => { + if level == 0 { + panic!("Invalid level {} for NonNull type", level) + } else { + TypeReference::NonNull(Box::new(of.with_non_null_level(level))) + } + } + } + } + + // If the type is Named or NonNull return the inner named. + // If the type is a List or NonNull returns a matching list with nullable items. + pub fn with_nullable_item_type(&self) -> TypeReference { + match self { + TypeReference::Named(_) => self.clone(), + TypeReference::List(of) => TypeReference::List(Box::new(of.nullable_type().clone())), + TypeReference::NonNull(of) => { + let inner: &TypeReference = of; + match inner { + TypeReference::List(_) => { + TypeReference::NonNull(Box::new(of.with_nullable_item_type())) + } + TypeReference::Named(_) => inner.clone(), + TypeReference::NonNull(_) => { + unreachable!("Invalid nested TypeReference::NonNull") + } + } + } + } + } + + // Return None if the type is a List, otherwise return the inner type + pub fn non_list_type(&self) -> Option { + match self { + TypeReference::List(_) => None, + TypeReference::Named(type_) => Some(*type_), + TypeReference::NonNull(of) => of.non_list_type(), + } + } +} + +// Tests for TypeReference::with_non_null_level +#[test] +fn test_with_non_null_level() { + let matrix = TypeReference::List(Box::new(TypeReference::List(Box::new( + TypeReference::Named(Type::Scalar(ScalarID(0))), + )))); + + assert_eq!( + matrix.with_non_null_level(0), + TypeReference::NonNull(Box::new(TypeReference::List(Box::new( + TypeReference::List(Box::new(TypeReference::Named(Type::Scalar(ScalarID(0))))) + )))) + ); + + assert_eq!( + matrix.with_non_null_level(1), + TypeReference::List(Box::new(TypeReference::NonNull(Box::new( + TypeReference::List(Box::new(TypeReference::Named(Type::Scalar(ScalarID(0))))) + )))) + ); + + assert_eq!( + matrix.with_non_null_level(0), + TypeReference::NonNull(Box::new(TypeReference::List(Box::new( + TypeReference::List(Box::new(TypeReference::Named(Type::Scalar(ScalarID(0))))) + )))) + ); + + assert_eq!( + TypeReference::Named(Type::Scalar(ScalarID(0))).with_non_null_level(0), + TypeReference::NonNull(Box::new(TypeReference::Named(Type::Scalar(ScalarID(0))))), + ); +} + +impl TypeReference { + pub fn map(self, transform: impl FnOnce(T) -> U) -> TypeReference { + match self { + TypeReference::Named(inner) => TypeReference::Named(transform(inner)), + TypeReference::NonNull(inner) => TypeReference::NonNull(Box::new(inner.map(transform))), + TypeReference::List(inner) => TypeReference::List(Box::new(inner.map(transform))), + } + } + + pub fn as_ref(&self) -> TypeReference<&T> { + match self { + TypeReference::Named(inner) => TypeReference::Named(inner), + TypeReference::NonNull(inner) => { + TypeReference::NonNull(Box::new(Box::as_ref(inner).as_ref())) + } + TypeReference::List(inner) => { + TypeReference::List(Box::new(Box::as_ref(inner).as_ref())) + } + } + } + + pub fn nullable_type(&self) -> &TypeReference { + match self { + TypeReference::Named(_) => self, + TypeReference::List(_) => self, + TypeReference::NonNull(of) => of, + } + } + + pub fn is_non_null(&self) -> bool { + matches!(self, TypeReference::NonNull(_)) + } + + pub fn is_list(&self) -> bool { + matches!(self.nullable_type(), TypeReference::List(_)) + } + + pub fn list_item_type(&self) -> Option<&TypeReference> { + match self.nullable_type() { + TypeReference::List(of) => Some(of), + _ => None, + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct Directive { + pub name: WithLocation, + pub arguments: ArgumentDefinitions, + pub locations: Vec, + pub repeatable: bool, + pub is_extension: bool, + pub description: Option, + pub hack_source: Option, +} + +impl Named for Directive { + type Name = DirectiveName; + fn name(&self) -> DirectiveName { + self.name.item + } +} + +#[derive(Clone, Debug)] +pub struct Scalar { + pub name: WithLocation, + pub is_extension: bool, + pub directives: Vec, + pub description: Option, + pub hack_source: Option, +} + +#[derive(Clone, Debug, Eq, PartialEq, Hash)] +pub struct Object { + pub name: WithLocation, + pub is_extension: bool, + pub fields: Vec, + pub interfaces: Vec, + pub directives: Vec, + pub description: Option, + pub hack_source: Option, +} + +impl Object { + pub fn named_field(&self, name: StringKey, schema: &S) -> Option { + self.fields + .iter() + .find(|field_id| schema.field(**field_id).name.item == name) + .copied() + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Hash)] +pub struct InputObject { + pub name: WithLocation, + pub fields: ArgumentDefinitions, + pub directives: Vec, + pub description: Option, + pub hack_source: Option, +} + +#[derive(Clone, Debug, Eq, PartialEq, Hash)] +pub struct Enum { + pub name: WithLocation, + pub is_extension: bool, + pub values: Vec, + pub directives: Vec, + pub description: Option, + pub hack_source: Option, +} + +#[derive(Clone, Debug, Eq, PartialEq, Hash)] +pub struct Union { + pub name: WithLocation, + pub is_extension: bool, + pub members: Vec, + pub directives: Vec, + pub description: Option, + pub hack_source: Option, +} + +#[derive(Clone, Debug, Eq, PartialEq, Hash)] +pub struct Field { + pub name: WithLocation, + pub is_extension: bool, + pub arguments: ArgumentDefinitions, + pub type_: TypeReference, + pub directives: Vec, + /// The type on which this field was defined. This field is (should) + /// always be set, except for special fields such as __typename and + /// __id, which are queryable on all types and therefore don't have + /// a single parent type. + pub parent_type: Option, + pub description: Option, + pub hack_source: Option, +} + +pub struct Deprecation { + pub reason: Option, +} + +impl Field { + pub fn deprecated(&self) -> Option { + self.directives + .named(*DIRECTIVE_DEPRECATED) + .map(|directive| Deprecation { + reason: directive + .arguments + .named(*ARGUMENT_REASON) + .and_then(|reason| reason.value.get_string_literal()), + }) + } + + /// Returns semantic type of a field with the `@semanticNonNull` directive, i.e. its non-null type + /// for the purposes of type generation. + /// + /// The `@semanticNonNull` directive is used to annotate fields that are only null when an error occurs. This + /// differs from the GraphQL spec's non-null syntax which is used to denote fields that are never null; if such + /// a field were going to become null due to an error, the error would bubble up to the next nullable field/object in the query. + /// @semanticNonNull fields do not bubble up errors, instead becoming null in the error case but in no other case. + /// See also: https://specs.apollo.dev/nullability/v0.3/#@semanticNonNull + pub fn semantic_type(&self) -> TypeReference { + match self.directives.named(*SEMANTIC_NON_NULL_DIRECTIVE) { + Some(directive) => { + match directive + .arguments + .named(*LEVELS_ARGUMENT) + .map(|levels| levels.expect_int_list()) + { + Some(levels) => { + let mut type_ = self.type_.clone(); + for level in levels { + type_ = type_.with_non_null_level(level); + } + type_ + } + None => self.type_.non_null(), + } + } + None => self.type_.clone(), + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub struct Argument { + pub name: WithLocation, + pub type_: TypeReference, + pub default_value: Option, + pub description: Option, + pub directives: Vec, +} + +impl Argument { + pub fn deprecated(&self) -> Option { + self.directives + .named(*DIRECTIVE_DEPRECATED) + .map(|directive| Deprecation { + reason: directive + .arguments + .named(*ARGUMENT_REASON) + .and_then(|reason| reason.value.get_string_literal()), + }) + } +} + +impl Named for Argument { + type Name = ArgumentName; + fn name(&self) -> ArgumentName { + self.name.item + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] +pub struct ArgumentValue { + pub name: ArgumentName, + pub value: ConstantValue, +} + +impl ArgumentValue { + /// If the value is a constant string literal, return the value, otherwise None. + pub fn get_string_literal(&self) -> Option { + if let ConstantValue::String(string_node) = &self.value { + Some(string_node.value) + } else { + None + } + } + /// Return the constant string literal of this value. + /// Panics if the value is not a constant string literal. + pub fn expect_string_literal(&self) -> StringKey { + self.get_string_literal().unwrap_or_else(|| { + panic!("expected a string literal, got {:?}", self); + }) + } + /// Return the constant string literal of this value. + /// Panics if the value is not a constant string literal. + pub fn expect_int_list(&self) -> Vec { + if let ConstantValue::List(list) = &self.value { + list.items + .iter() + .map(|item| { + if let ConstantValue::Int(int) = item { + int.value + } else { + panic!("expected a int literal, got {:?}", item); + } + }) + .collect() + } else { + panic!("expected a list, got {:?}", self); + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] +pub struct DirectiveValue { + pub name: DirectiveName, + pub arguments: Vec, +} + +#[derive(Clone, Debug, Eq, PartialEq, Hash)] +pub struct EnumValue { + pub value: StringKey, + pub directives: Vec, +} + +#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub struct ArgumentDefinitions(pub(crate) Vec); + +impl ArgumentDefinitions { + pub fn new(arguments: Vec) -> Self { + Self(arguments) + } + + pub fn named(&self, name: ArgumentName) -> Option<&Argument> { + self.0.named(name) + } + + pub fn contains(&self, name: StringKey) -> bool { + self.0.iter().any(|x| x.name.item == ArgumentName(name)) + } + + pub fn iter(&self) -> Iter<'_, Argument> { + self.0.iter() + } + + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +impl fmt::Debug for ArgumentDefinitions { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_fmt(format_args!("{:#?}", self.0)) + } +} + +impl IntoIterator for ArgumentDefinitions { + type Item = Argument; + type IntoIter = std::vec::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +pub trait TypeWithFields { + fn type_kind(&self) -> StringKey; + fn fields(&self) -> &Vec; + fn interfaces(&self) -> &Vec; + fn location(&self) -> &Location; +} + +impl TypeWithFields for Interface { + fn type_kind(&self) -> StringKey { + intern!("interface") + } + + fn fields(&self) -> &Vec { + &self.fields + } + + fn interfaces(&self) -> &Vec { + &self.interfaces + } + + fn location(&self) -> &Location { + &self.name.location + } +} + +impl TypeWithFields for Object { + fn type_kind(&self) -> StringKey { + intern!("type") + } + fn fields(&self) -> &Vec { + &self.fields + } + + fn interfaces(&self) -> &Vec { + &self.interfaces + } + + fn location(&self) -> &Location { + &self.name.location + } +} + +#[allow(unused_macros)] +macro_rules! impl_named { + ($type_name:ident) => { + impl Named for $type_name { + type Name = StringKey; + fn name(&self) -> StringKey { + self.name + } + } + }; +} + +macro_rules! impl_named_for_with_location { + ($type_name:ident, $name_type:ident) => { + impl Named for $type_name { + type Name = $name_type; + fn name(&self) -> $name_type { + self.name.item + } + } + }; +} + +impl_named_for_with_location!(Object, ObjectName); +impl_named_for_with_location!(Field, StringKey); +impl_named_for_with_location!(InputObject, InputObjectName); +impl_named_for_with_location!(Interface, InterfaceName); +impl_named_for_with_location!(Union, UnionName); +impl_named_for_with_location!(Scalar, ScalarName); +impl_named_for_with_location!(Enum, EnumName); + +impl Named for DirectiveValue { + type Name = DirectiveName; + fn name(&self) -> DirectiveName { + self.name + } +} + +impl Named for ArgumentValue { + type Name = ArgumentName; + fn name(&self) -> ArgumentName { + self.name + } +} diff --git a/compiler/crates/schema/src/definitions/interface.rs b/compiler/crates/schema/src/definitions/interface.rs index 284b35f2b48b8..6c0b7914301ba 100644 --- a/compiler/crates/schema/src/definitions/interface.rs +++ b/compiler/crates/schema/src/definitions/interface.rs @@ -27,9 +27,17 @@ pub struct Interface { pub directives: Vec, pub interfaces: Vec, pub description: Option, + pub hack_source: Option, } impl Interface { + pub fn named_field(&self, name: StringKey, schema: &S) -> Option { + self.fields + .iter() + .find(|field_id| schema.field(**field_id).name.item == name) + .copied() + } + /// Return all objects that implement, directly or recursively, a given interface. /// The iteration order of the HashSet might depend on the order in which schema files /// are processed. It should not be relied upon when generating artifacts. @@ -227,6 +235,7 @@ mod test { directives: vec![], interfaces: vec![], description: None, + hack_source: None, } } diff --git a/compiler/crates/schema/src/definitions/mod.rs b/compiler/crates/schema/src/definitions/mod.rs deleted file mode 100644 index 3b1eeae2dbc2a..0000000000000 --- a/compiler/crates/schema/src/definitions/mod.rs +++ /dev/null @@ -1,565 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -mod interface; - -use std::collections::HashMap; -use std::fmt; -use std::hash::Hash; -use std::slice::Iter; - -use common::ArgumentName; -use common::DirectiveName; -use common::EnumName; -use common::InputObjectName; -use common::InterfaceName; -use common::Named; -use common::NamedItem; -use common::ObjectName; -use common::ScalarName; -use common::WithLocation; -use graphql_syntax::ConstantValue; -use graphql_syntax::DirectiveLocation; -pub use interface::*; -use intern::string_key::Intern; -use intern::string_key::StringKey; -use lazy_static::lazy_static; - -use crate::Schema; - -lazy_static! { - static ref DIRECTIVE_DEPRECATED: DirectiveName = DirectiveName("deprecated".intern()); - static ref ARGUMENT_REASON: ArgumentName = ArgumentName("reason".intern()); -} - -pub(crate) type TypeMap = HashMap; - -macro_rules! type_id { - ($name:ident, $type:ident) => { - #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] - pub struct $name(pub $type); - impl $name { - pub(crate) fn as_usize(&self) -> usize { - self.0 as usize - } - } - - impl fmt::Debug for $name { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}({})", stringify!($name), self.0) - } - } - - impl fmt::Display for $name { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.0) - } - } - }; -} - -type_id!(EnumID, u32); -type_id!(InputObjectID, u32); -type_id!(InterfaceID, u32); -type_id!(ObjectID, u32); -type_id!(ScalarID, u32); -type_id!(UnionID, u32); -type_id!(FieldID, u32); - -#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] -pub enum Type { - Enum(EnumID), - InputObject(InputObjectID), - Interface(InterfaceID), - Object(ObjectID), - Scalar(ScalarID), - Union(UnionID), -} - -impl Type { - pub fn is_scalar(self) -> bool { - matches!(self, Type::Scalar(_)) - } - - pub fn is_enum(self) -> bool { - matches!(self, Type::Enum(_)) - } - - pub fn is_input_type(self) -> bool { - matches!(self, Type::Scalar(_) | Type::Enum(_) | Type::InputObject(_)) - } - - pub fn is_abstract_type(self) -> bool { - matches!(self, Type::Union(_) | Type::Interface(_)) - } - - pub fn is_composite_type(self) -> bool { - matches!(self, Type::Object(_) | Type::Interface(_) | Type::Union(_)) - } - - pub fn is_object(self) -> bool { - matches!(self, Type::Object(_)) - } - - pub fn is_input_object(self) -> bool { - matches!(self, Type::InputObject(_)) - } - - pub fn is_interface(self) -> bool { - matches!(self, Type::Interface(_)) - } - - pub fn is_object_or_interface(self) -> bool { - matches!(self, Type::Object(_) | Type::Interface(_)) - } - - pub fn is_union(self) -> bool { - matches!(self, Type::Union(_)) - } - - pub fn is_root_type(&self, schema: &S) -> bool { - Some(*self) == schema.query_type() - || Some(*self) == schema.mutation_type() - || Some(*self) == schema.subscription_type() - } - - pub fn get_enum_id(self) -> Option { - match self { - Type::Enum(id) => Some(id), - _ => None, - } - } - - pub fn get_input_object_id(self) -> Option { - match self { - Type::InputObject(id) => Some(id), - _ => None, - } - } - - pub fn get_interface_id(self) -> Option { - match self { - Type::Interface(id) => Some(id), - _ => None, - } - } - - pub fn get_object_id(self) -> Option { - match self { - Type::Object(id) => Some(id), - _ => None, - } - } - - pub fn get_scalar_id(self) -> Option { - match self { - Type::Scalar(id) => Some(id), - _ => None, - } - } - - pub fn get_union_id(self) -> Option { - match self { - Type::Union(id) => Some(id), - _ => None, - } - } -} - -impl fmt::Debug for Type { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Type::Enum(id) => f.write_fmt(format_args!("Enum({:?})", id.0)), - Type::InputObject(id) => f.write_fmt(format_args!("InputObject({:?})", id.0)), - Type::Interface(id) => f.write_fmt(format_args!("Interface({:?})", id.0)), - Type::Object(id) => f.write_fmt(format_args!("Object({:?})", id.0)), - Type::Scalar(id) => f.write_fmt(format_args!("Scalar({:?})", id.0)), - Type::Union(id) => f.write_fmt(format_args!("Union({:?})", id.0)), - } - } -} - -impl Type { - pub fn get_variant_name(&self) -> &'static str { - match self { - Type::Enum(_) => "an enum", - Type::InputObject(_) => "an input object", - Type::Interface(_) => "an interface", - Type::Object(_) => "an object", - Type::Scalar(_) => "a scalar", - Type::Union(_) => "a union", - } - } -} - -#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] -pub enum TypeReference { - Named(T), - NonNull(Box>), - List(Box>), -} - -impl TypeReference { - pub fn inner(&self) -> T { - match self { - TypeReference::Named(type_) => *type_, - TypeReference::List(of) => of.inner(), - TypeReference::NonNull(of) => of.inner(), - } - } - - pub fn non_null(&self) -> TypeReference { - match self { - TypeReference::Named(_) => TypeReference::NonNull(Box::new(self.clone())), - TypeReference::List(_) => TypeReference::NonNull(Box::new(self.clone())), - TypeReference::NonNull(_) => self.clone(), - } - } - - // If the type is Named or NonNull return the inner named. - // If the type is a List or NonNull returns a matching list with nullable items. - pub fn with_nullable_item_type(&self) -> TypeReference { - match self { - TypeReference::Named(_) => self.clone(), - TypeReference::List(of) => TypeReference::List(Box::new(of.nullable_type().clone())), - TypeReference::NonNull(of) => { - let inner: &TypeReference = of; - match inner { - TypeReference::List(_) => { - TypeReference::NonNull(Box::new(of.with_nullable_item_type())) - } - TypeReference::Named(_) => inner.clone(), - TypeReference::NonNull(_) => { - unreachable!("Invalid nested TypeReference::NonNull") - } - } - } - } - } - - // Return None if the type is a List, otherwise return the inner type - pub fn non_list_type(&self) -> Option { - match self { - TypeReference::List(_) => None, - TypeReference::Named(type_) => Some(*type_), - TypeReference::NonNull(of) => of.non_list_type(), - } - } -} - -impl TypeReference { - pub fn map(self, transform: impl FnOnce(T) -> U) -> TypeReference { - match self { - TypeReference::Named(inner) => TypeReference::Named(transform(inner)), - TypeReference::NonNull(inner) => TypeReference::NonNull(Box::new(inner.map(transform))), - TypeReference::List(inner) => TypeReference::List(Box::new(inner.map(transform))), - } - } - - pub fn as_ref(&self) -> TypeReference<&T> { - match self { - TypeReference::Named(inner) => TypeReference::Named(inner), - TypeReference::NonNull(inner) => { - TypeReference::NonNull(Box::new(Box::as_ref(inner).as_ref())) - } - TypeReference::List(inner) => { - TypeReference::List(Box::new(Box::as_ref(inner).as_ref())) - } - } - } - - pub fn nullable_type(&self) -> &TypeReference { - match self { - TypeReference::Named(_) => self, - TypeReference::List(_) => self, - TypeReference::NonNull(of) => of, - } - } - - pub fn is_non_null(&self) -> bool { - matches!(self, TypeReference::NonNull(_)) - } - - pub fn is_list(&self) -> bool { - matches!(self.nullable_type(), TypeReference::List(_)) - } - - pub fn list_item_type(&self) -> Option<&TypeReference> { - match self.nullable_type() { - TypeReference::List(of) => Some(of), - _ => None, - } - } -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct Directive { - pub name: DirectiveName, - pub arguments: ArgumentDefinitions, - pub locations: Vec, - pub repeatable: bool, - pub is_extension: bool, - pub description: Option, -} - -impl Named for Directive { - type Name = DirectiveName; - fn name(&self) -> DirectiveName { - self.name - } -} - -#[derive(Clone, Debug)] -pub struct Scalar { - pub name: WithLocation, - pub is_extension: bool, - pub directives: Vec, - pub description: Option, -} - -#[derive(Clone, Debug, Eq, PartialEq, Hash)] -pub struct Object { - pub name: WithLocation, - pub is_extension: bool, - pub fields: Vec, - pub interfaces: Vec, - pub directives: Vec, - pub description: Option, -} - -#[derive(Clone, Debug, Eq, PartialEq, Hash)] -pub struct InputObject { - pub name: WithLocation, - pub fields: ArgumentDefinitions, - pub directives: Vec, - pub description: Option, -} - -#[derive(Clone, Debug, Eq, PartialEq, Hash)] -pub struct Enum { - pub name: WithLocation, - pub is_extension: bool, - pub values: Vec, - pub directives: Vec, - pub description: Option, -} - -#[derive(Clone, Debug, Eq, PartialEq, Hash)] -pub struct Union { - pub name: WithLocation, - pub is_extension: bool, - pub members: Vec, - pub directives: Vec, - pub description: Option, -} - -#[derive(Clone, Debug, Eq, PartialEq, Hash)] -pub struct Field { - pub name: WithLocation, - pub is_extension: bool, - pub arguments: ArgumentDefinitions, - pub type_: TypeReference, - pub directives: Vec, - /// The type on which this field was defined. This field is (should) - /// always be set, except for special fields such as __typename and - /// __id, which are queryable on all types and therefore don't have - /// a single parent type. - pub parent_type: Option, - pub description: Option, -} - -pub struct Deprecation { - pub reason: Option, -} - -impl Field { - pub fn deprecated(&self) -> Option { - self.directives - .named(*DIRECTIVE_DEPRECATED) - .map(|directive| Deprecation { - reason: directive - .arguments - .named(*ARGUMENT_REASON) - .and_then(|reason| reason.value.get_string_literal()), - }) - } -} - -#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] -pub struct Argument { - pub name: ArgumentName, - pub type_: TypeReference, - pub default_value: Option, - pub description: Option, - pub directives: Vec, -} - -impl Argument { - pub fn deprecated(&self) -> Option { - self.directives - .named(*DIRECTIVE_DEPRECATED) - .map(|directive| Deprecation { - reason: directive - .arguments - .named(*ARGUMENT_REASON) - .and_then(|reason| reason.value.get_string_literal()), - }) - } -} - -impl Named for Argument { - type Name = ArgumentName; - fn name(&self) -> ArgumentName { - self.name - } -} - -#[derive(Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] -pub struct ArgumentValue { - pub name: ArgumentName, - pub value: ConstantValue, -} - -impl ArgumentValue { - /// If the value is a constant string literal, return the value, otherwise None. - pub fn get_string_literal(&self) -> Option { - if let ConstantValue::String(string_node) = &self.value { - Some(string_node.value) - } else { - None - } - } - /// Return the constant string literal of this value. - /// Panics if the value is not a constant string literal. - pub fn expect_string_literal(&self) -> StringKey { - self.get_string_literal().unwrap_or_else(|| { - panic!("expected a string literal, got {:?}", self); - }) - } -} - -#[derive(Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] -pub struct DirectiveValue { - pub name: DirectiveName, - pub arguments: Vec, -} - -#[derive(Clone, Debug, Eq, PartialEq, Hash)] -pub struct EnumValue { - pub value: StringKey, - pub directives: Vec, -} - -#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] -pub struct ArgumentDefinitions(pub(crate) Vec); - -impl ArgumentDefinitions { - pub fn new(arguments: Vec) -> Self { - Self(arguments) - } - - pub fn named(&self, name: ArgumentName) -> Option<&Argument> { - self.0.named(name) - } - - pub fn contains(&self, name: StringKey) -> bool { - self.0.iter().any(|x| x.name == ArgumentName(name)) - } - - pub fn iter(&self) -> Iter<'_, Argument> { - self.0.iter() - } - - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } -} - -impl fmt::Debug for ArgumentDefinitions { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_fmt(format_args!("{:#?}", self.0)) - } -} - -impl IntoIterator for ArgumentDefinitions { - type Item = Argument; - type IntoIter = std::vec::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.0.into_iter() - } -} - -pub trait TypeWithFields { - fn fields(&self) -> &Vec; - fn interfaces(&self) -> &Vec; -} - -impl TypeWithFields for Interface { - fn fields(&self) -> &Vec { - &self.fields - } - - fn interfaces(&self) -> &Vec { - &self.interfaces - } -} - -impl TypeWithFields for Object { - fn fields(&self) -> &Vec { - &self.fields - } - - fn interfaces(&self) -> &Vec { - &self.interfaces - } -} - -#[allow(unused_macros)] -macro_rules! impl_named { - ($type_name:ident) => { - impl Named for $type_name { - type Name = StringKey; - fn name(&self) -> StringKey { - self.name - } - } - }; -} - -macro_rules! impl_named_for_with_location { - ($type_name:ident, $name_type:ident) => { - impl Named for $type_name { - type Name = $name_type; - fn name(&self) -> $name_type { - self.name.item - } - } - }; -} - -impl_named_for_with_location!(Object, ObjectName); -impl_named_for_with_location!(Field, StringKey); -impl_named_for_with_location!(InputObject, InputObjectName); -impl_named_for_with_location!(Interface, InterfaceName); -impl_named_for_with_location!(Union, StringKey); -impl_named_for_with_location!(Scalar, ScalarName); -impl_named_for_with_location!(Enum, EnumName); - -impl Named for DirectiveValue { - type Name = DirectiveName; - fn name(&self) -> DirectiveName { - self.name - } -} - -impl Named for ArgumentValue { - type Name = ArgumentName; - fn name(&self) -> ArgumentName { - self.name - } -} diff --git a/compiler/crates/schema/src/errors.rs b/compiler/crates/schema/src/errors.rs index 6a8cc11158fc4..43f73bb66a713 100644 --- a/compiler/crates/schema/src/errors.rs +++ b/compiler/crates/schema/src/errors.rs @@ -5,18 +5,16 @@ * LICENSE file in the root directory of this source tree. */ -use graphql_syntax::OperationType; use intern::string_key::StringKey; use thiserror::Error; -use crate::definitions::Type; - pub type Result = std::result::Result; -#[derive(Debug, Error)] +#[derive(Debug, Error, serde::Serialize)] +#[serde(tag = "type", content = "args")] pub enum SchemaError { - #[error("Duplicate {0:?} type definition, got '{1}' and '{2}'.")] - DuplicateOperationDefinition(OperationType, StringKey, StringKey), + #[error("Duplicate {0} type definition, got '{1}' and '{2}'.")] + DuplicateOperationDefinition(String, StringKey, StringKey), #[error("Duplicate directive definition '{0}'.")] DuplicateDirectiveDefinition(StringKey), @@ -24,11 +22,11 @@ pub enum SchemaError { #[error("Cannot extend type '{0}', the type is not defined on the server schema.")] ExtendUndefinedType(StringKey), - #[error("Expected an object type for name '{0}', got '{1:?}'.")] - ExpectedObjectReference(StringKey, Type), + #[error("Expected an object type for name '{0}', got {1}.")] + ExpectedObjectReference(StringKey, String), - #[error("Expected an interface type for name '{0}', got '{1:?}'.")] - ExpectedInterfaceReference(StringKey, Type), + #[error("Expected an interface type for name '{0}', got {1}.")] + ExpectedInterfaceReference(StringKey, String), #[error("Reference to undefined type '{0}'.")] UndefinedType(StringKey), diff --git a/compiler/crates/schema/src/flatbuffer.rs b/compiler/crates/schema/src/flatbuffer.rs new file mode 100644 index 0000000000000..a6190375d1c29 --- /dev/null +++ b/compiler/crates/schema/src/flatbuffer.rs @@ -0,0 +1,665 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +mod serialize; +mod wrapper; + +use std::cmp::Ordering; + +use common::ArgumentName; +use common::DirectiveName; +use common::EnumName; +use common::InputObjectName; +use common::InterfaceName; +use common::ObjectName; +use common::ScalarName; +use common::Span; +use common::UnionName; +use common::WithLocation; +use flatbuffers::ForwardsUOffset; +use flatbuffers::Vector; +use graphql_syntax::BooleanNode; +use graphql_syntax::ConstantArgument; +use graphql_syntax::ConstantValue; +use graphql_syntax::DirectiveLocation; +use graphql_syntax::EnumNode; +use graphql_syntax::FloatNode; +use graphql_syntax::FloatValue; +use graphql_syntax::Identifier; +use graphql_syntax::IntNode; +use graphql_syntax::List; +use graphql_syntax::StringNode; +use graphql_syntax::Token; +use graphql_syntax::TokenKind; +use intern::string_key::Intern; +use intern::string_key::StringKey; +use intern::Lookup; +pub use serialize::serialize_as_flatbuffer; +pub use wrapper::SchemaWrapper; + +use crate::definitions::Argument; +use crate::definitions::Directive; +use crate::definitions::*; + +#[derive(Debug)] +pub struct FlatBufferSchema<'fb> { + query_type: Type, + mutation_type: Option, + subscription_type: Option, + directives: Vector<'fb, ForwardsUOffset>>, + enums: Vector<'fb, ForwardsUOffset>>, + fields: Vector<'fb, ForwardsUOffset>>, + input_objects: Vector<'fb, ForwardsUOffset>>, + interfaces: Vector<'fb, ForwardsUOffset>>, + objects: Vector<'fb, ForwardsUOffset>>, + scalars: Vector<'fb, ForwardsUOffset>>, + types: Vector<'fb, ForwardsUOffset>>, + unions: Vector<'fb, ForwardsUOffset>>, +} + +impl<'fb> FlatBufferSchema<'fb> { + pub fn build(bytes: &'fb [u8]) -> Self { + #![allow(deprecated)] + let fb_schema: schema_flatbuffer::Schema<'fb> = + schema_flatbuffer::get_root_as_schema(bytes); + + let query_type = Type::Object(ObjectID(fb_schema.query_type())); + let mutation_type = fb_schema + .has_mutation_type() + .then(|| Type::Object(ObjectID(fb_schema.mutation_type()))); + let subscription_type = fb_schema + .has_subscription_type() + .then(|| Type::Object(ObjectID(fb_schema.subscription_type()))); + + Self { + query_type, + mutation_type, + subscription_type, + directives: fb_schema.directives(), + enums: fb_schema.enums(), + fields: fb_schema.fields(), + input_objects: fb_schema.input_objects(), + interfaces: fb_schema.interfaces(), + objects: fb_schema.objects(), + scalars: fb_schema.scalars(), + types: fb_schema.types(), + unions: fb_schema.unions(), + } + } + + pub fn query_type(&self) -> Type { + self.query_type + } + + pub fn mutation_type(&self) -> Option { + self.mutation_type + } + + pub fn subscription_type(&self) -> Option { + self.subscription_type + } + + pub fn get_type(&self, type_name: StringKey) -> Option { + self.read_type(type_name) + } + + pub fn has_type(&self, type_name: StringKey) -> bool { + self.get_type(type_name).is_some() + } + + pub fn get_directive(&self, directive_name: DirectiveName) -> Option { + self.read_directive(directive_name) + } + + pub fn input_object(&self, id: InputObjectID) -> InputObject { + self.parse_input_object(id).unwrap() + } + + pub fn enum_(&self, id: EnumID) -> Enum { + self.parse_enum(id).unwrap() + } + + pub fn scalar(&self, id: ScalarID) -> Scalar { + self.parse_scalar(id).unwrap() + } + + pub fn field(&self, id: FieldID) -> Field { + self.parse_field(id).unwrap() + } + + pub fn object(&self, id: ObjectID) -> Object { + self.parse_object(id).unwrap() + } + + pub fn union(&self, id: UnionID) -> Union { + self.parse_union(id).unwrap() + } + + pub fn interface(&self, id: InterfaceID) -> Interface { + self.parse_interface(id).unwrap() + } + + fn read_directive(&self, name: DirectiveName) -> Option { + let mut start = 0; + let mut end = self.directives.len(); + let name = name.0.lookup(); + while start < end { + let mid = start + ((end - start) / 2); + match self.directives.get(mid).key_compare_with_value(name) { + Ordering::Less => { + start = mid + 1; + } + Ordering::Equal => { + let directive = self.directives.get(mid).value()?; + return self.parse_directive(directive); + } + Ordering::Greater => end = mid, + } + } + None + } + + fn parse_directive(&self, directive: schema_flatbuffer::Directive<'fb>) -> Option { + let locations = directive + .locations()? + .iter() + .map(get_mapped_location) + .collect::>(); + let parsed_directive = Directive { + name: WithLocation::generated(DirectiveName(directive.name()?.intern())), + is_extension: directive.is_extension(), + arguments: self.parse_arguments(directive.arguments()?)?, + locations, + repeatable: directive.repeatable(), + description: None, + hack_source: None, + }; + Some(parsed_directive) + } + + fn read_type(&self, type_name: StringKey) -> Option { + let mut start = 0; + let mut end = self.types.len(); + let type_name = type_name.lookup(); + while start < end { + let mid = start + ((end - start) / 2); + match self.types.get(mid).key_compare_with_value(type_name) { + Ordering::Less => { + start = mid + 1; + } + Ordering::Equal => { + let type_ = self.types.get(mid).value()?; + return Some(self.parse_type(type_)); + } + Ordering::Greater => end = mid, + } + } + None + } + + fn parse_type(&self, type_: schema_flatbuffer::Type<'_>) -> Type { + match type_.kind() { + schema_flatbuffer::TypeKind::Scalar => Type::Scalar(ScalarID(type_.scalar_id())), + schema_flatbuffer::TypeKind::InputObject => { + Type::InputObject(InputObjectID(type_.input_object_id())) + } + schema_flatbuffer::TypeKind::Enum => Type::Enum(EnumID(type_.enum_id())), + schema_flatbuffer::TypeKind::Object => Type::Object(ObjectID(type_.object_id())), + schema_flatbuffer::TypeKind::Interface => { + Type::Interface(InterfaceID(type_.interface_id())) + } + schema_flatbuffer::TypeKind::Union => Type::Union(UnionID(type_.union_id())), + unknown => panic!("unknown TypeKind value: {:?}", unknown), + } + } + + fn parse_scalar(&self, id: ScalarID) -> Option { + let scalar = self.scalars.get(id.0.try_into().unwrap()); + let name = ScalarName(scalar.name()?.intern()); + let parsed_scalar = Scalar { + name: WithLocation::generated(name), + is_extension: scalar.is_extension(), + directives: self.parse_directive_values(scalar.directives()?)?, + description: None, + hack_source: None, + }; + Some(parsed_scalar) + } + + fn parse_input_object(&self, id: InputObjectID) -> Option { + let input_object = self.input_objects.get(id.0.try_into().unwrap()); + let name = InputObjectName(input_object.name()?.to_string().intern()); + let parsed_input_object = InputObject { + name: WithLocation::generated(name), + fields: self.parse_arguments(input_object.fields()?)?, + directives: self.parse_directive_values(input_object.directives()?)?, + description: None, + hack_source: None, + }; + Some(parsed_input_object) + } + + fn parse_enum(&self, id: EnumID) -> Option { + let enum_ = self.enums.get(id.0.try_into().unwrap()); + let name = EnumName(enum_.name()?.to_string().intern()); + let parsed_enum = Enum { + name: WithLocation::generated(name), + is_extension: enum_.is_extension(), + values: self.parse_enum_values(enum_.values()?)?, + directives: self.parse_directive_values(enum_.directives()?)?, + description: None, + hack_source: None, + }; + Some(parsed_enum) + } + + fn parse_object(&self, id: ObjectID) -> Option { + let object = self.objects.get(id.0.try_into().unwrap()); + let name = object.name()?.intern(); + let parsed_object = Object { + name: WithLocation::generated(ObjectName(name)), + is_extension: object.is_extension(), + fields: object.fields()?.iter().map(FieldID).collect(), + interfaces: object.interfaces()?.iter().map(InterfaceID).collect(), + directives: self.parse_directive_values(object.directives()?)?, + description: None, + hack_source: None, + }; + Some(parsed_object) + } + + fn parse_interface(&self, id: InterfaceID) -> Option { + let interface = self.interfaces.get(id.0.try_into().unwrap()); + let name = InterfaceName(interface.name()?.intern()); + + let parsed_interface = Interface { + name: WithLocation::generated(name), + is_extension: interface.is_extension(), + implementing_interfaces: wrap_ids(interface.implementing_interfaces(), InterfaceID), + implementing_objects: wrap_ids(interface.implementing_objects(), ObjectID), + fields: wrap_ids(interface.fields(), FieldID), + directives: self.parse_directive_values(interface.directives()?)?, + interfaces: wrap_ids(interface.interfaces(), InterfaceID), + description: None, + hack_source: None, + }; + Some(parsed_interface) + } + + fn parse_union(&self, id: UnionID) -> Option { + let union = self.unions.get(id.0.try_into().unwrap()); + let name = UnionName(union.name()?.intern()); + let parsed_union = Union { + name: WithLocation::generated(name), + is_extension: union.is_extension(), + members: wrap_ids(union.members(), ObjectID), + directives: self.parse_directive_values(union.directives()?)?, + description: None, + hack_source: None, + }; + Some(parsed_union) + } + + fn parse_field(&self, id: FieldID) -> Option { + let field = self.fields.get(id.0.try_into().unwrap()); + let parsed_field = Field { + name: WithLocation::generated(field.name()?.intern()), + is_extension: field.is_extension(), + arguments: self.parse_arguments(field.arguments()?)?, + type_: self.parse_type_reference(field.type_()?)?, + directives: self.parse_directive_values(field.directives()?)?, + parent_type: self.get_type(self.get_fbtype_name(&field.parent_type()?)), + description: None, + hack_source: None, + }; + Some(parsed_field) + } + + fn parse_enum_values( + &self, + values: Vector<'_, ForwardsUOffset>>, + ) -> Option> { + values + .iter() + .map(|value| self.parse_enum_value(value)) + .collect::>>() + } + + fn parse_enum_value(&self, value: schema_flatbuffer::EnumValue<'fb>) -> Option { + let directives = self.parse_directive_values(value.directives()?)?; + Some(EnumValue { + value: value.value()?.intern(), + directives, + }) + } + + fn parse_arguments( + &self, + arguments: Vector<'fb, ForwardsUOffset>>, + ) -> Option { + let items = arguments + .iter() + .map(|argument| self.parse_argument(argument)) + .collect::>>(); + Some(ArgumentDefinitions::new(items?)) + } + + fn parse_argument(&self, argument: schema_flatbuffer::Argument<'fb>) -> Option { + Some(Argument { + name: WithLocation::generated(ArgumentName(argument.name().unwrap().intern())), + default_value: match argument.value() { + Some(value) => Some(self.parse_const_value(value)?), + _ => None, + }, + type_: self.parse_type_reference(argument.type_()?)?, + description: None, + directives: Default::default(), + }) + } + + fn parse_type_reference( + &self, + type_reference: schema_flatbuffer::TypeReference<'fb>, + ) -> Option> { + Some(match type_reference.kind() { + schema_flatbuffer::TypeReferenceKind::Named => { + let type_name = self.get_fbtype_name(&type_reference.named()?); + TypeReference::Named(self.get_type(type_name).unwrap()) + } + schema_flatbuffer::TypeReferenceKind::NonNull => { + TypeReference::NonNull(Box::new(self.parse_type_reference(type_reference.null()?)?)) + } + schema_flatbuffer::TypeReferenceKind::List => { + TypeReference::List(Box::new(self.parse_type_reference(type_reference.list()?)?)) + } + unknown => panic!("unknown TypeReferenceKind value: {:?}", unknown), + }) + } + + fn parse_directive_values( + &self, + directives: Vector<'_, ForwardsUOffset>>, + ) -> Option> { + directives + .iter() + .map(|directive| self.parse_directive_value(directive)) + .collect::>>() + } + + fn parse_directive_value( + &self, + directive: schema_flatbuffer::DirectiveValue<'fb>, + ) -> Option { + let arguments = self.parse_argument_values(directive.arguments()?)?; + Some(DirectiveValue { + name: DirectiveName(directive.name()?.intern()), + arguments, + }) + } + + fn parse_argument_values( + &self, + arguments: Vector<'_, ForwardsUOffset>>, + ) -> Option> { + arguments + .iter() + .map(|argument| self.parse_argument_value(argument)) + .collect::>>() + } + + fn parse_argument_value( + &self, + argument: schema_flatbuffer::ArgumentValue<'fb>, + ) -> Option { + Some(ArgumentValue { + name: ArgumentName(argument.name()?.intern()), + value: self.parse_const_value(argument.value()?)?, + }) + } + + fn parse_const_value( + &self, + value: schema_flatbuffer::ConstValue<'fb>, + ) -> Option { + use schema_flatbuffer::ConstValueKind as FB; + Some(match value.kind() { + FB::Null => ConstantValue::Null(get_empty_token()), + FB::String => ConstantValue::String(get_string_node(value.string_value()?.to_string())), + FB::Bool => ConstantValue::Boolean(get_boolean_node(value.bool_value())), + FB::Int => ConstantValue::Int(get_int_node(value.int_value()?.to_string())), + FB::Float => ConstantValue::Float(get_float_node(value.float_value()?.to_string())), + FB::Enum => ConstantValue::Enum(get_enum_node(value.enum_value()?.to_string())), + FB::List => ConstantValue::List(self.parse_list_value(value.list_value()?)?), + FB::Object => ConstantValue::Object(self.parse_object_value(value.object_value()?)?), + unknown => panic!("unknown ConstValueKind value: {:?}", unknown), + }) + } + + fn parse_list_value( + &self, + list_value: schema_flatbuffer::ListValue<'fb>, + ) -> Option> { + let items = list_value + .values()? + .iter() + .map(|value| self.parse_const_value(value)) + .collect::>>(); + Some(List { + span: get_empty_span(), + start: get_empty_token(), + items: items?, + end: get_empty_token(), + }) + } + + fn parse_object_value( + &self, + object_value: schema_flatbuffer::ObjectValue<'fb>, + ) -> Option> { + let items = object_value + .fields()? + .iter() + .map(|field| { + Some(ConstantArgument { + span: get_empty_span(), + name: get_identifier(field.name()?.to_string()), + colon: get_empty_token(), + value: self.parse_const_value(field.value()?)?, + }) + }) + .collect::>>(); + Some(List { + span: get_empty_span(), + start: get_empty_token(), + items: items?, + end: get_empty_token(), + }) + } + + fn get_fbtype_name(&self, type_: &schema_flatbuffer::Type<'_>) -> StringKey { + match type_.kind() { + schema_flatbuffer::TypeKind::Scalar => self + .scalars + .get(type_.scalar_id().try_into().unwrap()) + .name(), + schema_flatbuffer::TypeKind::InputObject => self + .input_objects + .get(type_.input_object_id().try_into().unwrap()) + .name(), + schema_flatbuffer::TypeKind::Enum => { + self.enums.get(type_.enum_id().try_into().unwrap()).name() + } + schema_flatbuffer::TypeKind::Object => self + .objects + .get(type_.object_id().try_into().unwrap()) + .name(), + schema_flatbuffer::TypeKind::Interface => self + .interfaces + .get(type_.interface_id().try_into().unwrap()) + .name(), + schema_flatbuffer::TypeKind::Union => { + self.unions.get(type_.union_id().try_into().unwrap()).name() + } + unknown => panic!("unknown TypeKind value: {:?}", unknown), + } + .unwrap() + .intern() + } +} + +fn get_identifier(value: String) -> Identifier { + Identifier { + span: get_empty_span(), + token: get_empty_token(), + value: value.intern(), + } +} + +fn get_enum_node(value: String) -> EnumNode { + EnumNode { + token: get_empty_token(), + value: value.intern(), + } +} + +fn get_float_node(value: String) -> FloatNode { + FloatNode { + token: get_empty_token(), + value: FloatValue::new(value.parse::().unwrap()), + source_value: value.intern(), + } +} + +fn get_int_node(value: String) -> IntNode { + IntNode { + token: get_empty_token(), + value: value.parse().unwrap(), + } +} + +fn get_boolean_node(value: bool) -> BooleanNode { + BooleanNode { + token: get_empty_token(), + value, + } +} + +fn get_string_node(value: String) -> StringNode { + StringNode { + token: get_empty_token(), + value: value.intern(), + } +} + +fn get_empty_token() -> Token { + Token { + span: get_empty_span(), + kind: TokenKind::EndOfFile, + } +} + +fn get_empty_span() -> Span { + Span { start: 0, end: 0 } +} + +fn wrap_ids(ids: Option>, f: impl Fn(u32) -> T) -> Vec { + ids.map_or_else(Vec::new, |ids| ids.into_iter().map(f).collect()) +} + +fn get_mapped_location(location: schema_flatbuffer::DirectiveLocation) -> DirectiveLocation { + use schema_flatbuffer::DirectiveLocation as FDL; + use DirectiveLocation as DL; + match location { + FDL::Query => DL::Query, + FDL::Mutation => DL::Mutation, + FDL::Subscription => DL::Subscription, + FDL::Field => DL::Field, + FDL::FragmentDefinition => DL::FragmentDefinition, + FDL::FragmentSpread => DL::FragmentSpread, + FDL::InlineFragment => DL::InlineFragment, + FDL::Schema => DL::Schema, + FDL::Scalar => DL::Scalar, + FDL::Object => DL::Object, + FDL::FieldDefinition => DL::FieldDefinition, + FDL::ArgumentDefinition => DL::ArgumentDefinition, + FDL::Interface => DL::Interface, + FDL::Union => DL::Union, + FDL::Enum => DL::Enum, + FDL::EnumValue => DL::EnumValue, + FDL::InputObject => DL::InputObject, + FDL::InputFieldDefinition => DL::InputFieldDefinition, + FDL::VariableDefinition => DL::VariableDefinition, + unknown => panic!("unknown DirectiveLocation value: {:?}", unknown), + } +} + +#[cfg(test)] +mod tests { + use common::DiagnosticsResult; + + use super::*; + use crate::build_schema; + + #[test] + fn binary_search() -> DiagnosticsResult<()> { + let sdl = " + directive @ref_type(schema: String, name: String) on FIELD_DEFINITION + directive @extern_type(schema: String, name: String) on INTERFACE + directive @fetchable(field_name: String) on OBJECT + + type Query { id: ID } + type User { id: ID } + type MailingAddress { id: ID } + type Country { id: ID } + "; + let sdl_schema = build_schema(sdl)?.unwrap_in_memory_impl(); + let bytes = serialize_as_flatbuffer(&sdl_schema); + let fb_schema = FlatBufferSchema::build(&bytes); + + assert!(fb_schema.read_type("Query".intern()).is_some()); + assert!(fb_schema.read_type("User".intern()).is_some()); + assert!(fb_schema.read_type("MailingAddress".intern()).is_some()); + + assert!(fb_schema.read_type("State".intern()).is_none()); + assert!(fb_schema.read_type("Aaaa".intern()).is_none()); + assert!(fb_schema.read_type("Zzzz".intern()).is_none()); + + assert!( + fb_schema + .read_directive(DirectiveName("ref_type".intern())) + .is_some() + ); + assert!( + fb_schema + .read_directive(DirectiveName("extern_type".intern())) + .is_some() + ); + assert!( + fb_schema + .read_directive(DirectiveName("fetchable".intern())) + .is_some() + ); + + assert!( + fb_schema + .read_directive(DirectiveName("goto".intern())) + .is_none() + ); + assert!( + fb_schema + .read_directive(DirectiveName("aaaa".intern())) + .is_none() + ); + assert!( + fb_schema + .read_directive(DirectiveName("zzzz".intern())) + .is_none() + ); + + Ok(()) + } +} diff --git a/compiler/crates/schema/src/flatbuffer/mod.rs b/compiler/crates/schema/src/flatbuffer/mod.rs deleted file mode 100644 index 7ac9a6f7fe152..0000000000000 --- a/compiler/crates/schema/src/flatbuffer/mod.rs +++ /dev/null @@ -1,656 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -mod serialize; -mod wrapper; - -use std::cmp::Ordering; - -use common::ArgumentName; -use common::DirectiveName; -use common::EnumName; -use common::InputObjectName; -use common::InterfaceName; -use common::ObjectName; -use common::ScalarName; -use common::Span; -use common::WithLocation; -use flatbuffers::ForwardsUOffset; -use flatbuffers::Vector; -use graphql_syntax::BooleanNode; -use graphql_syntax::ConstantArgument; -use graphql_syntax::ConstantValue; -use graphql_syntax::DirectiveLocation; -use graphql_syntax::EnumNode; -use graphql_syntax::FloatNode; -use graphql_syntax::FloatValue; -use graphql_syntax::Identifier; -use graphql_syntax::IntNode; -use graphql_syntax::List; -use graphql_syntax::StringNode; -use graphql_syntax::Token; -use graphql_syntax::TokenKind; -use intern::string_key::Intern; -use intern::string_key::StringKey; -use intern::Lookup; -pub use serialize::serialize_as_flatbuffer; -pub use wrapper::SchemaWrapper; - -use crate::definitions::Argument; -use crate::definitions::Directive; -use crate::definitions::*; - -#[derive(Debug)] -pub struct FlatBufferSchema<'fb> { - query_type: Type, - mutation_type: Option, - subscription_type: Option, - directives: Vector<'fb, ForwardsUOffset>>, - enums: Vector<'fb, ForwardsUOffset>>, - fields: Vector<'fb, ForwardsUOffset>>, - input_objects: Vector<'fb, ForwardsUOffset>>, - interfaces: Vector<'fb, ForwardsUOffset>>, - objects: Vector<'fb, ForwardsUOffset>>, - scalars: Vector<'fb, ForwardsUOffset>>, - types: Vector<'fb, ForwardsUOffset>>, - unions: Vector<'fb, ForwardsUOffset>>, -} - -impl<'fb> FlatBufferSchema<'fb> { - pub fn build(bytes: &'fb [u8]) -> Self { - #![allow(deprecated)] - let fb_schema: schema_flatbuffer::Schema<'fb> = - schema_flatbuffer::get_root_as_schema(bytes); - - let query_type = Type::Object(ObjectID(fb_schema.query_type())); - let mutation_type = fb_schema - .has_mutation_type() - .then(|| Type::Object(ObjectID(fb_schema.mutation_type()))); - let subscription_type = fb_schema - .has_subscription_type() - .then(|| Type::Object(ObjectID(fb_schema.subscription_type()))); - - Self { - query_type, - mutation_type, - subscription_type, - directives: fb_schema.directives(), - enums: fb_schema.enums(), - fields: fb_schema.fields(), - input_objects: fb_schema.input_objects(), - interfaces: fb_schema.interfaces(), - objects: fb_schema.objects(), - scalars: fb_schema.scalars(), - types: fb_schema.types(), - unions: fb_schema.unions(), - } - } - - pub fn query_type(&self) -> Type { - self.query_type - } - - pub fn mutation_type(&self) -> Option { - self.mutation_type - } - - pub fn subscription_type(&self) -> Option { - self.subscription_type - } - - pub fn get_type(&self, type_name: StringKey) -> Option { - self.read_type(type_name) - } - - pub fn has_type(&self, type_name: StringKey) -> bool { - self.get_type(type_name).is_some() - } - - pub fn get_directive(&self, directive_name: DirectiveName) -> Option { - self.read_directive(directive_name) - } - - pub fn input_object(&self, id: InputObjectID) -> InputObject { - self.parse_input_object(id).unwrap() - } - - pub fn enum_(&self, id: EnumID) -> Enum { - self.parse_enum(id).unwrap() - } - - pub fn scalar(&self, id: ScalarID) -> Scalar { - self.parse_scalar(id).unwrap() - } - - pub fn field(&self, id: FieldID) -> Field { - self.parse_field(id).unwrap() - } - - pub fn object(&self, id: ObjectID) -> Object { - self.parse_object(id).unwrap() - } - - pub fn union(&self, id: UnionID) -> Union { - self.parse_union(id).unwrap() - } - - pub fn interface(&self, id: InterfaceID) -> Interface { - self.parse_interface(id).unwrap() - } - - fn read_directive(&self, name: DirectiveName) -> Option { - let mut start = 0; - let mut end = self.directives.len(); - let name = name.0.lookup(); - while start < end { - let mid = start + ((end - start) / 2); - match self.directives.get(mid).key_compare_with_value(name) { - Ordering::Less => { - start = mid + 1; - } - Ordering::Equal => { - let directive = self.directives.get(mid).value()?; - return self.parse_directive(directive); - } - Ordering::Greater => end = mid, - } - } - None - } - - fn parse_directive(&self, directive: schema_flatbuffer::Directive<'fb>) -> Option { - let locations = directive - .locations()? - .iter() - .map(get_mapped_location) - .collect::>(); - let parsed_directive = Directive { - name: DirectiveName(directive.name()?.intern()), - is_extension: directive.is_extension(), - arguments: self.parse_arguments(directive.arguments()?)?, - locations, - repeatable: directive.repeatable(), - description: None, - }; - Some(parsed_directive) - } - - fn read_type(&self, type_name: StringKey) -> Option { - let mut start = 0; - let mut end = self.types.len(); - let type_name = type_name.lookup(); - while start < end { - let mid = start + ((end - start) / 2); - match self.types.get(mid).key_compare_with_value(type_name) { - Ordering::Less => { - start = mid + 1; - } - Ordering::Equal => { - let type_ = self.types.get(mid).value()?; - return Some(self.parse_type(type_)); - } - Ordering::Greater => end = mid, - } - } - None - } - - fn parse_type(&self, type_: schema_flatbuffer::Type<'_>) -> Type { - match type_.kind() { - schema_flatbuffer::TypeKind::Scalar => Type::Scalar(ScalarID(type_.scalar_id())), - schema_flatbuffer::TypeKind::InputObject => { - Type::InputObject(InputObjectID(type_.input_object_id())) - } - schema_flatbuffer::TypeKind::Enum => Type::Enum(EnumID(type_.enum_id())), - schema_flatbuffer::TypeKind::Object => Type::Object(ObjectID(type_.object_id())), - schema_flatbuffer::TypeKind::Interface => { - Type::Interface(InterfaceID(type_.interface_id())) - } - schema_flatbuffer::TypeKind::Union => Type::Union(UnionID(type_.union_id())), - unknown => panic!("unknown TypeKind value: {:?}", unknown), - } - } - - fn parse_scalar(&self, id: ScalarID) -> Option { - let scalar = self.scalars.get(id.0.try_into().unwrap()); - let name = ScalarName(scalar.name()?.intern()); - let parsed_scalar = Scalar { - name: WithLocation::generated(name), - is_extension: scalar.is_extension(), - directives: self.parse_directive_values(scalar.directives()?)?, - description: None, - }; - Some(parsed_scalar) - } - - fn parse_input_object(&self, id: InputObjectID) -> Option { - let input_object = self.input_objects.get(id.0.try_into().unwrap()); - let name = InputObjectName(input_object.name()?.to_string().intern()); - let parsed_input_object = InputObject { - name: WithLocation::generated(name), - fields: self.parse_arguments(input_object.fields()?)?, - directives: self.parse_directive_values(input_object.directives()?)?, - description: None, - }; - Some(parsed_input_object) - } - - fn parse_enum(&self, id: EnumID) -> Option { - let enum_ = self.enums.get(id.0.try_into().unwrap()); - let name = EnumName(enum_.name()?.to_string().intern()); - let parsed_enum = Enum { - name: WithLocation::generated(name), - is_extension: enum_.is_extension(), - values: self.parse_enum_values(enum_.values()?)?, - directives: self.parse_directive_values(enum_.directives()?)?, - description: None, - }; - Some(parsed_enum) - } - - fn parse_object(&self, id: ObjectID) -> Option { - let object = self.objects.get(id.0.try_into().unwrap()); - let name = object.name()?.intern(); - let parsed_object = Object { - name: WithLocation::generated(ObjectName(name)), - is_extension: object.is_extension(), - fields: object.fields()?.iter().map(FieldID).collect(), - interfaces: object.interfaces()?.iter().map(InterfaceID).collect(), - directives: self.parse_directive_values(object.directives()?)?, - description: None, - }; - Some(parsed_object) - } - - fn parse_interface(&self, id: InterfaceID) -> Option { - let interface = self.interfaces.get(id.0.try_into().unwrap()); - let name = InterfaceName(interface.name()?.intern()); - - let parsed_interface = Interface { - name: WithLocation::generated(name), - is_extension: interface.is_extension(), - implementing_interfaces: wrap_ids(interface.implementing_interfaces(), InterfaceID), - implementing_objects: wrap_ids(interface.implementing_objects(), ObjectID), - fields: wrap_ids(interface.fields(), FieldID), - directives: self.parse_directive_values(interface.directives()?)?, - interfaces: wrap_ids(interface.interfaces(), InterfaceID), - description: None, - }; - Some(parsed_interface) - } - - fn parse_union(&self, id: UnionID) -> Option { - let union = self.unions.get(id.0.try_into().unwrap()); - let name = union.name()?.intern(); - let parsed_union = Union { - name: WithLocation::generated(name), - is_extension: union.is_extension(), - members: wrap_ids(union.members(), ObjectID), - directives: self.parse_directive_values(union.directives()?)?, - description: None, - }; - Some(parsed_union) - } - - fn parse_field(&self, id: FieldID) -> Option { - let field = self.fields.get(id.0.try_into().unwrap()); - let parsed_field = Field { - name: WithLocation::generated(field.name()?.intern()), - is_extension: field.is_extension(), - arguments: self.parse_arguments(field.arguments()?)?, - type_: self.parse_type_reference(field.type_()?)?, - directives: self.parse_directive_values(field.directives()?)?, - parent_type: self.get_type(self.get_fbtype_name(&field.parent_type()?)), - description: None, - }; - Some(parsed_field) - } - - fn parse_enum_values( - &self, - values: Vector<'_, ForwardsUOffset>>, - ) -> Option> { - values - .iter() - .map(|value| self.parse_enum_value(value)) - .collect::>>() - } - - fn parse_enum_value(&self, value: schema_flatbuffer::EnumValue<'fb>) -> Option { - let directives = self.parse_directive_values(value.directives()?)?; - Some(EnumValue { - value: value.value()?.intern(), - directives, - }) - } - - fn parse_arguments( - &self, - arguments: Vector<'fb, ForwardsUOffset>>, - ) -> Option { - let items = arguments - .iter() - .map(|argument| self.parse_argument(argument)) - .collect::>>(); - Some(ArgumentDefinitions::new(items?)) - } - - fn parse_argument(&self, argument: schema_flatbuffer::Argument<'fb>) -> Option { - Some(Argument { - name: ArgumentName(argument.name().unwrap().intern()), - default_value: match argument.value() { - Some(value) => Some(self.parse_const_value(value)?), - _ => None, - }, - type_: self.parse_type_reference(argument.type_()?)?, - description: None, - directives: Default::default(), - }) - } - - fn parse_type_reference( - &self, - type_reference: schema_flatbuffer::TypeReference<'fb>, - ) -> Option> { - Some(match type_reference.kind() { - schema_flatbuffer::TypeReferenceKind::Named => { - let type_name = self.get_fbtype_name(&type_reference.named()?); - TypeReference::Named(self.get_type(type_name).unwrap()) - } - schema_flatbuffer::TypeReferenceKind::NonNull => { - TypeReference::NonNull(Box::new(self.parse_type_reference(type_reference.null()?)?)) - } - schema_flatbuffer::TypeReferenceKind::List => { - TypeReference::List(Box::new(self.parse_type_reference(type_reference.list()?)?)) - } - unknown => panic!("unknown TypeReferenceKind value: {:?}", unknown), - }) - } - - fn parse_directive_values( - &self, - directives: Vector<'_, ForwardsUOffset>>, - ) -> Option> { - directives - .iter() - .map(|directive| self.parse_directive_value(directive)) - .collect::>>() - } - - fn parse_directive_value( - &self, - directive: schema_flatbuffer::DirectiveValue<'fb>, - ) -> Option { - let arguments = self.parse_argument_values(directive.arguments()?)?; - Some(DirectiveValue { - name: DirectiveName(directive.name()?.intern()), - arguments, - }) - } - - fn parse_argument_values( - &self, - arguments: Vector<'_, ForwardsUOffset>>, - ) -> Option> { - arguments - .iter() - .map(|argument| self.parse_argument_value(argument)) - .collect::>>() - } - - fn parse_argument_value( - &self, - argument: schema_flatbuffer::ArgumentValue<'fb>, - ) -> Option { - Some(ArgumentValue { - name: ArgumentName(argument.name()?.intern()), - value: self.parse_const_value(argument.value()?)?, - }) - } - - fn parse_const_value( - &self, - value: schema_flatbuffer::ConstValue<'fb>, - ) -> Option { - use schema_flatbuffer::ConstValueKind as FB; - Some(match value.kind() { - FB::Null => ConstantValue::Null(get_empty_token()), - FB::String => ConstantValue::String(get_string_node(value.string_value()?.to_string())), - FB::Bool => ConstantValue::Boolean(get_boolean_node(value.bool_value())), - FB::Int => ConstantValue::Int(get_int_node(value.int_value()?.to_string())), - FB::Float => ConstantValue::Float(get_float_node(value.float_value()?.to_string())), - FB::Enum => ConstantValue::Enum(get_enum_node(value.enum_value()?.to_string())), - FB::List => ConstantValue::List(self.parse_list_value(value.list_value()?)?), - FB::Object => ConstantValue::Object(self.parse_object_value(value.object_value()?)?), - unknown => panic!("unknown ConstValueKind value: {:?}", unknown), - }) - } - - fn parse_list_value( - &self, - list_value: schema_flatbuffer::ListValue<'fb>, - ) -> Option> { - let items = list_value - .values()? - .iter() - .map(|value| self.parse_const_value(value)) - .collect::>>(); - Some(List { - span: get_empty_span(), - start: get_empty_token(), - items: items?, - end: get_empty_token(), - }) - } - - fn parse_object_value( - &self, - object_value: schema_flatbuffer::ObjectValue<'fb>, - ) -> Option> { - let items = object_value - .fields()? - .iter() - .map(|field| { - Some(ConstantArgument { - span: get_empty_span(), - name: get_identifier(field.name()?.to_string()), - colon: get_empty_token(), - value: self.parse_const_value(field.value()?)?, - }) - }) - .collect::>>(); - Some(List { - span: get_empty_span(), - start: get_empty_token(), - items: items?, - end: get_empty_token(), - }) - } - - fn get_fbtype_name(&self, type_: &schema_flatbuffer::Type<'_>) -> StringKey { - match type_.kind() { - schema_flatbuffer::TypeKind::Scalar => self - .scalars - .get(type_.scalar_id().try_into().unwrap()) - .name(), - schema_flatbuffer::TypeKind::InputObject => self - .input_objects - .get(type_.input_object_id().try_into().unwrap()) - .name(), - schema_flatbuffer::TypeKind::Enum => { - self.enums.get(type_.enum_id().try_into().unwrap()).name() - } - schema_flatbuffer::TypeKind::Object => self - .objects - .get(type_.object_id().try_into().unwrap()) - .name(), - schema_flatbuffer::TypeKind::Interface => self - .interfaces - .get(type_.interface_id().try_into().unwrap()) - .name(), - schema_flatbuffer::TypeKind::Union => { - self.unions.get(type_.union_id().try_into().unwrap()).name() - } - unknown => panic!("unknown TypeKind value: {:?}", unknown), - } - .unwrap() - .intern() - } -} - -fn get_identifier(value: String) -> Identifier { - Identifier { - span: get_empty_span(), - token: get_empty_token(), - value: value.intern(), - } -} - -fn get_enum_node(value: String) -> EnumNode { - EnumNode { - token: get_empty_token(), - value: value.intern(), - } -} - -fn get_float_node(value: String) -> FloatNode { - FloatNode { - token: get_empty_token(), - value: FloatValue::new(value.parse::().unwrap()), - source_value: value.intern(), - } -} - -fn get_int_node(value: String) -> IntNode { - IntNode { - token: get_empty_token(), - value: value.parse().unwrap(), - } -} - -fn get_boolean_node(value: bool) -> BooleanNode { - BooleanNode { - token: get_empty_token(), - value, - } -} - -fn get_string_node(value: String) -> StringNode { - StringNode { - token: get_empty_token(), - value: value.intern(), - } -} - -fn get_empty_token() -> Token { - Token { - span: get_empty_span(), - kind: TokenKind::EndOfFile, - } -} - -fn get_empty_span() -> Span { - Span { start: 0, end: 0 } -} - -fn wrap_ids(ids: Option>, f: impl Fn(u32) -> T) -> Vec { - ids.map_or_else(Vec::new, |ids| ids.into_iter().map(f).collect()) -} - -fn get_mapped_location(location: schema_flatbuffer::DirectiveLocation) -> DirectiveLocation { - use schema_flatbuffer::DirectiveLocation as FDL; - use DirectiveLocation as DL; - match location { - FDL::Query => DL::Query, - FDL::Mutation => DL::Mutation, - FDL::Subscription => DL::Subscription, - FDL::Field => DL::Field, - FDL::FragmentDefinition => DL::FragmentDefinition, - FDL::FragmentSpread => DL::FragmentSpread, - FDL::InlineFragment => DL::InlineFragment, - FDL::Schema => DL::Schema, - FDL::Scalar => DL::Scalar, - FDL::Object => DL::Object, - FDL::FieldDefinition => DL::FieldDefinition, - FDL::ArgumentDefinition => DL::ArgumentDefinition, - FDL::Interface => DL::Interface, - FDL::Union => DL::Union, - FDL::Enum => DL::Enum, - FDL::EnumValue => DL::EnumValue, - FDL::InputObject => DL::InputObject, - FDL::InputFieldDefinition => DL::InputFieldDefinition, - FDL::VariableDefinition => DL::VariableDefinition, - unknown => panic!("unknown DirectiveLocation value: {:?}", unknown), - } -} - -#[cfg(test)] -mod tests { - use common::DiagnosticsResult; - - use super::*; - use crate::build_schema; - - #[test] - fn binary_search() -> DiagnosticsResult<()> { - let sdl = " - directive @ref_type(schema: String, name: String) on FIELD_DEFINITION - directive @extern_type(schema: String, name: String) on INTERFACE - directive @fetchable(field_name: String) on OBJECT - - type Query { id: ID } - type User { id: ID } - type MailingAddress { id: ID } - type Country { id: ID } - "; - let sdl_schema = build_schema(sdl)?.unwrap_in_memory_impl(); - let bytes = serialize_as_flatbuffer(&sdl_schema); - let fb_schema = FlatBufferSchema::build(&bytes); - - assert!(fb_schema.read_type("Query".intern()).is_some()); - assert!(fb_schema.read_type("User".intern()).is_some()); - assert!(fb_schema.read_type("MailingAddress".intern()).is_some()); - - assert!(fb_schema.read_type("State".intern()).is_none()); - assert!(fb_schema.read_type("Aaaa".intern()).is_none()); - assert!(fb_schema.read_type("Zzzz".intern()).is_none()); - - assert!( - fb_schema - .read_directive(DirectiveName("ref_type".intern())) - .is_some() - ); - assert!( - fb_schema - .read_directive(DirectiveName("extern_type".intern())) - .is_some() - ); - assert!( - fb_schema - .read_directive(DirectiveName("fetchable".intern())) - .is_some() - ); - - assert!( - fb_schema - .read_directive(DirectiveName("goto".intern())) - .is_none() - ); - assert!( - fb_schema - .read_directive(DirectiveName("aaaa".intern())) - .is_none() - ); - assert!( - fb_schema - .read_directive(DirectiveName("zzzz".intern())) - .is_none() - ); - - Ok(()) - } -} diff --git a/compiler/crates/schema/src/flatbuffer/serialize.rs b/compiler/crates/schema/src/flatbuffer/serialize.rs index fbcd5ffa8fb8b..4ca7e00b9ec75 100644 --- a/compiler/crates/schema/src/flatbuffer/serialize.rs +++ b/compiler/crates/schema/src/flatbuffer/serialize.rs @@ -141,7 +141,7 @@ impl<'fb, 'schema> Serializer<'fb, 'schema> { } fn serialize_directive(&mut self, directive: &Directive) { - let name = directive.name.0.lookup(); + let name = directive.name.item.0.lookup(); if self.directives.contains_key(name) { return; } @@ -339,7 +339,7 @@ impl<'fb, 'schema> Serializer<'fb, 'schema> { let union = self.schema.union(id); let name = union.name; let idx = self.unions.len(); - self.add_to_type_map(idx, schema_flatbuffer::TypeKind::Union, name.item); + self.add_to_type_map(idx, schema_flatbuffer::TypeKind::Union, name.item.0); self.unions.push(schema_flatbuffer::Union::create( &mut self.bldr, &schema_flatbuffer::UnionArgs::default(), @@ -421,7 +421,7 @@ impl<'fb, 'schema> Serializer<'fb, 'schema> { value: &Argument, ) -> WIPOffset> { let args = schema_flatbuffer::ArgumentArgs { - name: Some(self.bldr.create_string(value.name.0.lookup())), + name: Some(self.bldr.create_string(value.name.item.0.lookup())), value: value .default_value .as_ref() diff --git a/compiler/crates/schema/src/flatbuffer/wrapper.rs b/compiler/crates/schema/src/flatbuffer/wrapper.rs index d139773ae4973..1fb6fcd7e54cc 100644 --- a/compiler/crates/schema/src/flatbuffer/wrapper.rs +++ b/compiler/crates/schema/src/flatbuffer/wrapper.rs @@ -124,6 +124,7 @@ impl SchemaWrapper { directives: Vec::new(), parent_type: None, description: None, + hack_source: None, }); result.fields.get(CLIENTID_FIELD_ID, || -> Field { Field { @@ -136,6 +137,7 @@ impl SchemaWrapper { directives: Vec::new(), parent_type: None, description: Some(*CLIENT_ID_DESCRIPTION), + hack_source: None, } }); result.fields.get(STRONGID_FIELD_ID, || Field { @@ -146,6 +148,7 @@ impl SchemaWrapper { directives: Vec::new(), parent_type: None, description: Some(*TYPENAME_DESCRIPTION), + hack_source: None, }); result.fields.get(FETCH_TOKEN_FIELD_ID, || Field { name: WithLocation::generated(result.fetch_token_field_name), @@ -157,12 +160,13 @@ impl SchemaWrapper { directives: Vec::new(), parent_type: None, description: None, + hack_source: None, }); result.fields.get(IS_FULFILLED_FIELD_ID, || Field { name: WithLocation::generated(result.is_fulfilled_field_name), is_extension: true, arguments: ArgumentDefinitions::new(vec![Argument { - name: ArgumentName("name".intern()), + name: WithLocation::generated(ArgumentName("name".intern())), type_: TypeReference::NonNull(Box::new(TypeReference::Named( result.get_type("String".intern()).unwrap(), ))), @@ -176,6 +180,7 @@ impl SchemaWrapper { directives: Vec::new(), parent_type: None, description: None, + hack_source: None, }); result.unchecked_argument_type_sentinel = Some(TypeReference::Named( @@ -248,26 +253,7 @@ impl Schema for SchemaWrapper { fn get_directive(&self, name: DirectiveName) -> Option<&Directive> { self.directives - .get(name, || { - match ( - name.0.lookup(), - self.flatbuffer_schema().get_directive(name), - ) { - ("defer", Some(mut directive)) | ("stream", Some(mut directive)) => { - let mut next_args: Vec<_> = directive.arguments.iter().cloned().collect(); - for arg in next_args.iter_mut() { - if arg.name.0.lookup() == "label" { - if let TypeReference::NonNull(of) = &arg.type_ { - arg.type_ = *of.clone() - }; - } - } - directive.arguments = ArgumentDefinitions::new(next_args); - Some(directive) - } - (_, result) => result, - } - }) + .get(name, || self.flatbuffer_schema().get_directive(name)) .as_ref() } @@ -308,7 +294,7 @@ impl Schema for SchemaWrapper { Type::Interface(id) => self.interface(id).name.item.0, Type::Object(id) => self.object(id).name.item.0, Type::Scalar(id) => self.scalar(id).name.item.0, - Type::Union(id) => self.union(id).name.item, + Type::Union(id) => self.union(id).name.item.0, } } diff --git a/compiler/crates/schema/src/in_memory.rs b/compiler/crates/schema/src/in_memory.rs new file mode 100644 index 0000000000000..94d8db2bf047f --- /dev/null +++ b/compiler/crates/schema/src/in_memory.rs @@ -0,0 +1,1967 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::collections::hash_map::Entry; +use std::collections::BTreeMap; +use std::collections::HashMap; + +use common::ArgumentName; +use common::Diagnostic; +use common::DiagnosticsResult; +use common::DirectiveName; +use common::EnumName; +use common::InputObjectName; +use common::InterfaceName; +use common::Location; +use common::ObjectName; +use common::ScalarName; +use common::SourceLocationKey; +use common::UnionName; +use common::WithLocation; +use graphql_syntax::*; +use intern::string_key::Intern; +use intern::string_key::StringKey; +use intern::Lookup; +use rayon::iter::IntoParallelRefIterator; +use rayon::iter::ParallelIterator; + +use crate::definitions::Argument; +use crate::definitions::Directive; +use crate::definitions::*; +use crate::errors::SchemaError; +use crate::field_descriptions::CLIENT_ID_DESCRIPTION; +use crate::field_descriptions::TYPENAME_DESCRIPTION; +use crate::graphql_schema::Schema; + +fn todo_add_location(error: SchemaError) -> DiagnosticsResult { + Err(vec![Diagnostic::error(error, Location::generated())]) +} + +#[derive(Debug, Clone)] +pub struct InMemorySchema { + query_type: Option, + mutation_type: Option, + subscription_type: Option, + type_map: TypeMap, + + clientid_field: FieldID, + strongid_field: FieldID, + typename_field: FieldID, + fetch_token_field: FieldID, + is_fulfilled_field: FieldID, + + clientid_field_name: StringKey, + strongid_field_name: StringKey, + typename_field_name: StringKey, + fetch_token_field_name: StringKey, + is_fulfilled_field_name: StringKey, + + string_type: Option, + id_type: Option, + + unchecked_argument_type_sentinel: Option>, + + directives: HashMap, + + enums: Vec, + fields: Vec, + input_objects: Vec, + interfaces: Vec, + objects: Vec, + scalars: Vec, + unions: Vec, +} + +impl Schema for InMemorySchema { + fn query_type(&self) -> Option { + self.query_type.map(Type::Object) + } + + fn mutation_type(&self) -> Option { + self.mutation_type.map(Type::Object) + } + + fn subscription_type(&self) -> Option { + self.subscription_type.map(Type::Object) + } + + fn clientid_field(&self) -> FieldID { + self.clientid_field + } + + fn strongid_field(&self) -> FieldID { + self.strongid_field + } + + fn typename_field(&self) -> FieldID { + self.typename_field + } + + fn fetch_token_field(&self) -> FieldID { + self.fetch_token_field + } + + fn is_fulfilled_field(&self) -> FieldID { + self.is_fulfilled_field + } + + fn get_type(&self, type_name: StringKey) -> Option { + self.type_map.get(&type_name).copied() + } + + fn get_directive(&self, name: DirectiveName) -> Option<&Directive> { + self.directives.get(&name) + } + + fn input_object(&self, id: InputObjectID) -> &InputObject { + &self.input_objects[id.as_usize()] + } + + fn enum_(&self, id: EnumID) -> &Enum { + &self.enums[id.as_usize()] + } + + fn scalar(&self, id: ScalarID) -> &Scalar { + &self.scalars[id.as_usize()] + } + + fn field(&self, id: FieldID) -> &Field { + &self.fields[id.as_usize()] + } + + fn object(&self, id: ObjectID) -> &Object { + &self.objects[id.as_usize()] + } + + fn union(&self, id: UnionID) -> &Union { + &self.unions[id.as_usize()] + } + + fn interface(&self, id: InterfaceID) -> &Interface { + &self.interfaces[id.as_usize()] + } + + fn get_type_name(&self, type_: Type) -> StringKey { + match type_ { + Type::InputObject(id) => self.input_objects[id.as_usize()].name.item.0, + Type::Enum(id) => self.enums[id.as_usize()].name.item.0, + Type::Interface(id) => self.interfaces[id.as_usize()].name.item.0, + Type::Object(id) => self.objects[id.as_usize()].name.item.0, + Type::Scalar(id) => self.scalars[id.as_usize()].name.item.0, + Type::Union(id) => self.unions[id.as_usize()].name.item.0, + } + } + + fn is_extension_type(&self, type_: Type) -> bool { + match type_ { + Type::Enum(id) => self.enums[id.as_usize()].is_extension, + Type::Interface(id) => self.interfaces[id.as_usize()].is_extension, + Type::Object(id) => self.objects[id.as_usize()].is_extension, + Type::Scalar(id) => self.scalars[id.as_usize()].is_extension, + Type::Union(id) => self.unions[id.as_usize()].is_extension, + Type::InputObject(_) => false, + } + } + + fn is_string(&self, type_: Type) -> bool { + type_ == Type::Scalar(self.string_type.unwrap()) + } + + fn is_id(&self, type_: Type) -> bool { + type_ == Type::Scalar(self.id_type.unwrap()) + } + + fn named_field(&self, parent_type: Type, name: StringKey) -> Option { + // Special case for __typename and __id fields, which should not be in the list of type fields + // but should be fine to select. + let can_have_typename = matches!( + parent_type, + Type::Object(_) | Type::Interface(_) | Type::Union(_) + ); + if can_have_typename { + if name == self.typename_field_name { + return Some(self.typename_field); + } + // TODO(inanc): Also check if the parent type is fetchable? + if name == self.fetch_token_field_name { + return Some(self.fetch_token_field); + } + if name == self.clientid_field_name { + return Some(self.clientid_field); + } + if name == self.strongid_field_name { + return Some(self.strongid_field); + } + if name == self.is_fulfilled_field_name { + return Some(self.is_fulfilled_field); + } + } + + let fields = match parent_type { + Type::Object(id) => { + let object = &self.objects[id.as_usize()]; + &object.fields + } + Type::Interface(id) => { + let interface = &self.interfaces[id.as_usize()]; + &interface.fields + } + // Unions don't have any fields, but can have selections like __typename + // or a field with @fixme_fat_interface + Type::Union(_) => return None, + _ => panic!( + "Cannot get field {} on type '{:?}', this type does not have fields", + name, + self.get_type_name(parent_type) + ), + }; + fields + .iter() + .find(|field_id| { + let field = &self.fields[field_id.as_usize()]; + field.name.item == name + }) + .cloned() + } + + /// A value that represents a type of unchecked arguments where we don't + /// have a type to instantiate the argument. + /// + /// TODO: we probably want to replace this with a proper `Unknown` type. + fn unchecked_argument_type_sentinel(&self) -> &TypeReference { + self.unchecked_argument_type_sentinel.as_ref().unwrap() + } + + fn snapshot_print(&self) -> String { + let Self { + query_type, + mutation_type, + subscription_type, + directives, + clientid_field: _clientid_field, + strongid_field: _strongid_field, + typename_field: _typename_field, + fetch_token_field: _fetch_token_field, + is_fulfilled_field: _is_fulfilled_field, + clientid_field_name: _clientid_field_name, + strongid_field_name: _strongid_field_name, + typename_field_name: _typename_field_name, + fetch_token_field_name: _fetch_token_field_name, + is_fulfilled_field_name: _is_fulfilled_field_name, + string_type: _string_type, + id_type: _id_type, + unchecked_argument_type_sentinel: _unchecked_argument_type_sentinel, + type_map, + enums, + fields, + input_objects, + interfaces, + objects, + scalars, + unions, + } = self; + let ordered_type_map: BTreeMap<_, _> = type_map.iter().collect(); + + let mut ordered_directives = directives.values().collect::>(); + ordered_directives.sort_by_key(|dir| dir.name.item.0.lookup()); + + format!( + r#"Schema {{ + query_type: {:#?} + mutation_type: {:#?} + subscription_type: {:#?} + directives: {:#?} + type_map: {:#?} + enums: {:#?} + fields: {:#?} + input_objects: {:#?} + interfaces: {:#?} + objects: {:#?} + scalars: {:#?} + unions: {:#?} + }}"#, + query_type, + mutation_type, + subscription_type, + ordered_directives, + ordered_type_map, + enums, + fields, + input_objects, + interfaces, + objects, + scalars, + unions, + ) + } + + fn input_objects<'a>(&'a self) -> Box + 'a> { + Box::new(self.input_objects.iter()) + } + + fn enums<'a>(&'a self) -> Box + 'a> { + Box::new(self.enums.iter()) + } + + fn scalars<'a>(&'a self) -> Box + 'a> { + Box::new(self.scalars.iter()) + } + + fn fields<'a>(&'a self) -> Box + 'a> { + Box::new(self.fields.iter()) + } + + fn objects<'a>(&'a self) -> Box + 'a> { + Box::new(self.objects.iter()) + } + + fn unions<'a>(&'a self) -> Box + 'a> { + Box::new(self.unions.iter()) + } + + fn interfaces<'a>(&'a self) -> Box + 'a> { + Box::new(self.interfaces.iter()) + } +} + +impl InMemorySchema { + pub fn get_directive_mut(&mut self, name: DirectiveName) -> Option<&mut Directive> { + self.directives.get_mut(&name) + } + + pub fn get_type_map(&self) -> impl Iterator { + self.type_map.iter() + } + + pub fn get_type_map_par_iter(&self) -> impl ParallelIterator { + self.type_map.par_iter() + } + + pub fn get_directives(&self) -> impl Iterator { + self.directives.values() + } + + /// Returns all directives applicable for a given location(Query, Field, etc). + pub fn directives_for_location(&self, location: DirectiveLocation) -> Vec<&Directive> { + self.directives + .values() + .filter(|directive| directive.locations.contains(&location)) + .collect() + } + + pub fn get_fields(&self) -> impl Iterator { + self.fields.iter() + } + + pub fn get_interfaces(&self) -> impl Iterator { + self.interfaces.iter() + } + + pub fn get_enums(&self) -> impl Iterator { + self.enums.iter() + } + + pub fn get_objects(&self) -> impl Iterator { + self.objects.iter() + } + + pub fn get_unions(&self) -> impl Iterator { + self.unions.iter() + } + + pub fn has_directive(&self, directive_name: DirectiveName) -> bool { + self.directives.contains_key(&directive_name) + } + + pub fn has_type(&self, type_name: StringKey) -> bool { + self.type_map.contains_key(&type_name) + } + + pub fn add_directive(&mut self, directive: Directive) -> DiagnosticsResult<()> { + if self.directives.contains_key(&directive.name.item) { + return todo_add_location(SchemaError::DuplicateDirectiveDefinition( + directive.name.item.0, + )); + } + self.directives.insert(directive.name.item, directive); + Ok(()) + } + + pub fn remove_directive(&mut self, directive_name: DirectiveName) -> DiagnosticsResult<()> { + if !self.directives.contains_key(&directive_name) { + // Cannot find the directive to remove + return todo_add_location(SchemaError::UndefinedDirective(directive_name.0)); + } + self.directives.remove(&directive_name); + Ok(()) + } + + pub fn add_field(&mut self, field: Field) -> DiagnosticsResult { + Ok(self.build_field(field)) + } + + pub fn add_enum(&mut self, enum_: Enum) -> DiagnosticsResult { + if self.type_map.contains_key(&enum_.name.item.0) { + return todo_add_location(SchemaError::DuplicateType(enum_.name.item.0)); + } + let index: u32 = self.enums.len().try_into().unwrap(); + let name = enum_.name; + self.enums.push(enum_); + self.type_map.insert(name.item.0, Type::Enum(EnumID(index))); + Ok(EnumID(index)) + } + + pub fn add_input_object( + &mut self, + input_object: InputObject, + ) -> DiagnosticsResult { + if self.type_map.contains_key(&input_object.name.item.0) { + return todo_add_location(SchemaError::DuplicateType(input_object.name.item.0)); + } + let index: u32 = self.input_objects.len().try_into().unwrap(); + let name = input_object.name; + self.input_objects.push(input_object); + self.type_map + .insert(name.item.0, Type::InputObject(InputObjectID(index))); + Ok(InputObjectID(index)) + } + + pub fn add_interface(&mut self, interface: Interface) -> DiagnosticsResult { + if self.type_map.contains_key(&interface.name.item.0) { + return todo_add_location(SchemaError::DuplicateType(interface.name.item.0)); + } + let index: u32 = self.interfaces.len().try_into().unwrap(); + let name = interface.name; + self.interfaces.push(interface); + self.type_map + .insert(name.item.0, Type::Interface(InterfaceID(index))); + Ok(InterfaceID(index)) + } + + pub fn add_object(&mut self, object: Object) -> DiagnosticsResult { + if self.type_map.contains_key(&object.name.item.0) { + return Err(vec![Diagnostic::error( + SchemaError::DuplicateType(object.name.item.0), + object.name.location, + )]); + } + let index: u32 = self.objects.len().try_into().unwrap(); + let name = object.name; + self.objects.push(object); + self.type_map + .insert(name.item.0, Type::Object(ObjectID(index))); + Ok(ObjectID(index)) + } + + pub fn add_scalar(&mut self, scalar: Scalar) -> DiagnosticsResult { + if self.type_map.contains_key(&scalar.name.item.0) { + return todo_add_location(SchemaError::DuplicateType(scalar.name.item.0)); + } + let index: u32 = self.scalars.len().try_into().unwrap(); + let name = scalar.name.item; + self.scalars.push(scalar); + self.type_map.insert(name.0, Type::Scalar(ScalarID(index))); + Ok(ScalarID(index)) + } + + pub fn add_union(&mut self, union: Union) -> DiagnosticsResult { + if self.type_map.contains_key(&union.name.item.0) { + return todo_add_location(SchemaError::DuplicateType(union.name.item.0)); + } + let index: u32 = self.unions.len().try_into().unwrap(); + let name = union.name.item; + self.unions.push(union); + self.type_map.insert(name.0, Type::Union(UnionID(index))); + Ok(UnionID(index)) + } + + pub fn add_field_to_interface( + &mut self, + interface_id: InterfaceID, + field_id: FieldID, + ) -> DiagnosticsResult { + let interface = self.interfaces.get_mut(interface_id.as_usize()).unwrap(); + interface.fields.push(field_id); + Ok(interface_id) + } + + pub fn add_field_to_object( + &mut self, + obj_id: ObjectID, + field_id: FieldID, + ) -> DiagnosticsResult { + let object = self.objects.get_mut(obj_id.as_usize()).unwrap(); + object.fields.push(field_id); + Ok(obj_id) + } + + pub fn add_interface_to_object( + &mut self, + obj_id: ObjectID, + interface_id: InterfaceID, + ) -> DiagnosticsResult { + let object = self.objects.get_mut(obj_id.as_usize()).unwrap(); + object.interfaces.push(interface_id); + Ok(obj_id) + } + + pub fn add_parent_interface_to_interface( + &mut self, + interface_id: InterfaceID, + parent_interface_id: InterfaceID, + ) -> DiagnosticsResult { + let interface = self.interfaces.get_mut(interface_id.as_usize()).unwrap(); + interface.interfaces.push(parent_interface_id); + Ok(interface_id) + } + + pub fn add_implementing_object_to_interface( + &mut self, + interface_id: InterfaceID, + object_id: ObjectID, + ) -> DiagnosticsResult { + let interface = self.interfaces.get_mut(interface_id.as_usize()).unwrap(); + interface.implementing_objects.push(object_id); + Ok(interface_id) + } + + pub fn add_member_to_union( + &mut self, + union_id: UnionID, + object_id: ObjectID, + ) -> DiagnosticsResult { + let union = self.unions.get_mut(union_id.as_usize()).unwrap(); + union.members.push(object_id); + Ok(union_id) + } + + /// Sets argument definitions for a given input object. + /// Any existing argument definitions will be erased. + pub fn set_input_object_args( + &mut self, + input_object_id: InputObjectID, + fields: ArgumentDefinitions, + ) -> DiagnosticsResult { + let input_object = self + .input_objects + .get_mut(input_object_id.as_usize()) + .unwrap(); + input_object.fields = fields; + Ok(input_object_id) + } + + /// Sets argument definitions for a given field. + /// Any existing argument definitions on the field will be erased. + pub fn set_field_args( + &mut self, + field_id: FieldID, + args: ArgumentDefinitions, + ) -> DiagnosticsResult { + let field = self.fields.get_mut(field_id.as_usize()).unwrap(); + field.arguments = args; + Ok(field_id) + } + + /// Replaces the definition of interface type, but keeps the same id. + /// Existing references to the old type now reference the replacement. + pub fn replace_interface( + &mut self, + id: InterfaceID, + interface: Interface, + ) -> DiagnosticsResult<()> { + if id.as_usize() >= self.interfaces.len() { + return todo_add_location(SchemaError::UnknownTypeID( + id.as_usize(), + String::from("Interface"), + )); + } + self.type_map + .remove(&self.get_type_name(Type::Interface(id))); + self.type_map + .insert(interface.name.item.0, Type::Interface(id)); + self.interfaces[id.as_usize()] = interface; + Ok(()) + } + + /// Replaces the definition of object type, but keeps the same id. + /// Existing references to the old type now reference the replacement. + pub fn replace_object(&mut self, id: ObjectID, object: Object) -> DiagnosticsResult<()> { + if id.as_usize() >= self.objects.len() { + return todo_add_location(SchemaError::UnknownTypeID( + id.as_usize(), + String::from("Object"), + )); + } + self.type_map.remove(&self.get_type_name(Type::Object(id))); + self.type_map.insert(object.name.item.0, Type::Object(id)); + self.objects[id.as_usize()] = object; + Ok(()) + } + + /// Replaces the definition of enum type, but keeps the same id. + /// Existing references to the old type now reference the replacement. + pub fn replace_enum(&mut self, id: EnumID, enum_: Enum) -> DiagnosticsResult<()> { + if id.as_usize() >= self.enums.len() { + return todo_add_location(SchemaError::UnknownTypeID( + id.as_usize(), + String::from("Enum"), + )); + } + self.type_map.remove(&self.get_type_name(Type::Enum(id))); + self.type_map.insert(enum_.name.item.0, Type::Enum(id)); + self.enums[id.as_usize()] = enum_; + Ok(()) + } + + /// Replaces the definition of input object type, but keeps the same id. + /// Existing references to the old type now reference the replacement. + pub fn replace_input_object( + &mut self, + id: InputObjectID, + input_object: InputObject, + ) -> DiagnosticsResult<()> { + if id.as_usize() >= self.input_objects.len() { + return todo_add_location(SchemaError::UnknownTypeID( + id.as_usize(), + String::from("Input Object"), + )); + } + self.type_map + .remove(&self.get_type_name(Type::InputObject(id))); + self.type_map + .insert(input_object.name.item.0, Type::InputObject(id)); + self.input_objects[id.as_usize()] = input_object; + Ok(()) + } + + /// Replaces the definition of union type, but keeps the same id. + /// Existing references to the old type now reference the replacement. + pub fn replace_union(&mut self, id: UnionID, union: Union) -> DiagnosticsResult<()> { + if id.as_usize() >= self.unions.len() { + return todo_add_location(SchemaError::UnknownTypeID( + id.as_usize(), + String::from("Union"), + )); + } + self.type_map.remove(&self.get_type_name(Type::Union(id))); + self.type_map.insert(union.name.item.0, Type::Union(id)); + self.unions[id.as_usize()] = union; + Ok(()) + } + + /// Replaces the definition of field, but keeps the same id. + /// Existing references to the old field now reference the replacement. + pub fn replace_field(&mut self, id: FieldID, field: Field) -> DiagnosticsResult<()> { + let id = id.as_usize(); + if id >= self.fields.len() { + return Err(vec![Diagnostic::error( + SchemaError::UnknownTypeID(id, String::from("Field")), + field.name.location, + )]); + } + self.fields[id] = field; + Ok(()) + } + + /// Creates an uninitialized, invalid schema which can then be added to using the add_* + /// methods. Note that we still bake in some assumptions about the clientid and typename + /// fields, but in practice this is not an issue. + pub fn create_uninitialized() -> InMemorySchema { + InMemorySchema { + query_type: None, + mutation_type: None, + subscription_type: None, + type_map: HashMap::new(), + clientid_field: FieldID(0), + strongid_field: FieldID(0), + typename_field: FieldID(0), + fetch_token_field: FieldID(0), + is_fulfilled_field: FieldID(0), + clientid_field_name: "__id".intern(), + strongid_field_name: "strong_id__".intern(), + typename_field_name: "__typename".intern(), + fetch_token_field_name: "__token".intern(), + is_fulfilled_field_name: "is_fulfilled__".intern(), + string_type: None, + id_type: None, + unchecked_argument_type_sentinel: None, + directives: HashMap::new(), + enums: Vec::new(), + fields: Vec::new(), + input_objects: Vec::new(), + interfaces: Vec::new(), + objects: Vec::new(), + scalars: Vec::new(), + unions: Vec::new(), + } + } + + pub fn build( + schema_documents: &[SchemaDocument], + client_schema_documents: &[SchemaDocument], + ) -> DiagnosticsResult { + let schema_documents = schema_documents + .iter() + .map(|i| (i.definitions.iter().collect::>(), i.location)) + .collect(); + + let client_schema_documents = client_schema_documents + .iter() + .map(|i| (i.definitions.iter().collect::>(), i.location)) + .collect(); + Self::build_impl(schema_documents, client_schema_documents) + } + + pub fn build_with_definition_ptrs( + definitions: Vec<&TypeSystemDefinition>, + location: Location, + ) -> DiagnosticsResult { + Self::build_impl(vec![(definitions, location)], vec![]) + } + + fn build_impl<'a>( + schema_documents: Vec<(Vec<&'a TypeSystemDefinition>, Location)>, + client_schema_documents: Vec<(Vec<&'a TypeSystemDefinition>, Location)>, + ) -> DiagnosticsResult { + let schema_definitions: Vec<(&TypeSystemDefinition, Location)> = schema_documents + .iter() + .flat_map(|document| { + document + .0 + .iter() + .map(|definition| (*definition, document.1)) + }) + .collect(); + + let client_definitions: Vec<(&TypeSystemDefinition, Location)> = client_schema_documents + .iter() + .flat_map(|document| { + document + .0 + .iter() + .map(|definition| (*definition, document.1)) + }) + .collect(); + + // Step 1: build the type_map from type names to type keys + let mut type_map = + HashMap::with_capacity(schema_definitions.len() + client_definitions.len()); + let mut next_object_id = 0; + let mut next_interface_id = 0; + let mut next_union_id = 0; + let mut next_input_object_id = 0; + let mut next_enum_id = 0; + let mut next_scalar_id = 0; + let mut field_count = 0; + let mut directive_count = 0; + + let mut duplicate_definitions: Vec<(Type, Location)> = Vec::new(); + + for (definition, location) in schema_definitions.iter().chain(&client_definitions) { + let mut insert_into_type_map = |name: StringKey, type_: Type| { + match type_map.entry(name) { + Entry::Occupied(existing_entry) => { + duplicate_definitions + .push((*existing_entry.get(), location.with_span(definition.span()))); + } + Entry::Vacant(vacant) => { + vacant.insert(type_); + } + }; + }; + + match definition { + TypeSystemDefinition::SchemaDefinition { .. } => {} + TypeSystemDefinition::DirectiveDefinition { .. } => { + directive_count += 1; + } + TypeSystemDefinition::ObjectTypeDefinition(ObjectTypeDefinition { + name, + fields, + .. + }) => { + insert_into_type_map(name.value, Type::Object(ObjectID(next_object_id))); + field_count += len_of_option_list(fields); + next_object_id += 1; + } + TypeSystemDefinition::InterfaceTypeDefinition(InterfaceTypeDefinition { + name, + fields, + .. + }) => { + insert_into_type_map( + name.value, + Type::Interface(InterfaceID(next_interface_id)), + ); + field_count += len_of_option_list(fields); + next_interface_id += 1; + } + TypeSystemDefinition::UnionTypeDefinition(UnionTypeDefinition { name, .. }) => { + insert_into_type_map(name.value, Type::Union(UnionID(next_union_id))); + next_union_id += 1; + } + TypeSystemDefinition::InputObjectTypeDefinition(InputObjectTypeDefinition { + name, + .. + }) => { + insert_into_type_map( + name.value, + Type::InputObject(InputObjectID(next_input_object_id)), + ); + next_input_object_id += 1; + } + TypeSystemDefinition::EnumTypeDefinition(EnumTypeDefinition { name, .. }) => { + insert_into_type_map(name.value, Type::Enum(EnumID(next_enum_id))); + next_enum_id += 1; + } + TypeSystemDefinition::ScalarTypeDefinition(ScalarTypeDefinition { + name, .. + }) => { + // We allow duplicate scalar definitions + type_map.insert(name.value, Type::Scalar(ScalarID(next_scalar_id))); + next_scalar_id += 1; + } + TypeSystemDefinition::ObjectTypeExtension { .. } => {} + TypeSystemDefinition::InterfaceTypeExtension { .. } => {} + TypeSystemDefinition::EnumTypeExtension { .. } => {} + TypeSystemDefinition::SchemaExtension { .. } => { + todo!("SchemaExtension not implemented: {}", definition) + } + TypeSystemDefinition::UnionTypeExtension { .. } => { + todo!("UnionTypeExtension not implemented: {}", definition) + } + TypeSystemDefinition::InputObjectTypeExtension { .. } => { + todo!("InputObjectTypeExtension not implemented: {}", definition) + } + TypeSystemDefinition::ScalarTypeExtension { .. } => { + todo!("ScalarTypeExtension not implemented: {}", definition) + } + } + } + + // Step 2: define operation types, directives, and types + let string_type = type_map + .get(&"String".intern()) + .expect("Missing String type") + .get_scalar_id() + .expect("Expected ID to be a Scalar"); + let id_type = type_map + .get(&"ID".intern()) + .expect("Missing ID type") + .get_scalar_id() + .expect("Expected ID to be a Scalar"); + + let unchecked_argument_type_sentinel = Some(TypeReference::Named( + *type_map + .get(&"Boolean".intern()) + .expect("Missing Boolean type"), + )); + + let mut schema = InMemorySchema { + query_type: None, + mutation_type: None, + subscription_type: None, + type_map, + clientid_field: FieldID(0), // dummy value, overwritten later + strongid_field: FieldID(0), // dummy value, overwritten later + typename_field: FieldID(0), // dummy value, overwritten later + fetch_token_field: FieldID(0), // dummy value, overwritten later + is_fulfilled_field: FieldID(0), // dummy value, overwritten later + clientid_field_name: "__id".intern(), + strongid_field_name: "strong_id__".intern(), + typename_field_name: "__typename".intern(), + fetch_token_field_name: "__token".intern(), + is_fulfilled_field_name: "is_fulfilled__".intern(), + string_type: Some(string_type), + id_type: Some(id_type), + unchecked_argument_type_sentinel, + directives: HashMap::with_capacity(directive_count), + enums: Vec::with_capacity(next_enum_id.try_into().unwrap()), + fields: Vec::with_capacity(field_count), + input_objects: Vec::with_capacity(next_input_object_id.try_into().unwrap()), + interfaces: Vec::with_capacity(next_interface_id.try_into().unwrap()), + objects: Vec::with_capacity(next_object_id.try_into().unwrap()), + scalars: Vec::with_capacity(next_scalar_id.try_into().unwrap()), + unions: Vec::with_capacity(next_union_id.try_into().unwrap()), + }; + + for document in schema_documents.iter() { + for definition in document.0.iter() { + schema.add_definition(definition, &document.1.source_location(), false)?; + } + } + + for document in client_schema_documents.iter() { + for definition in document.0.iter() { + schema.add_definition(definition, &document.1.source_location(), true)?; + } + } + + if !duplicate_definitions.is_empty() { + return Err(duplicate_definitions + .into_iter() + .map(|(type_, location)| { + let name = schema.get_type_name(type_); + let previous_location = schema.get_type_location(type_); + Diagnostic::error(SchemaError::DuplicateType(name), location).annotate( + format!("`{}` was previously defined here:", name), + previous_location, + ) + }) + .collect()); + } + + for document in schema_documents + .iter() + .chain(client_schema_documents.iter()) + { + for definition in document.0.iter() { + if let TypeSystemDefinition::ObjectTypeDefinition(ObjectTypeDefinition { + name, + interfaces, + .. + }) = definition + { + let object_id = match schema.type_map.get(&name.value) { + Some(Type::Object(id)) => id, + _ => unreachable!("Must be an Object type"), + }; + for interface in interfaces { + let type_ = schema.type_map.get(&interface.value).unwrap(); + match type_ { + Type::Interface(id) => { + let interface = schema.interfaces.get_mut(id.as_usize()).unwrap(); + interface.implementing_objects.push(*object_id) + } + _ => unreachable!("Must be an interface"), + } + } + } + + if let TypeSystemDefinition::InterfaceTypeDefinition(InterfaceTypeDefinition { + name, + interfaces, + .. + }) = definition + { + let child_interface_id = match schema.type_map.get(&name.value) { + Some(Type::Interface(id)) => id, + _ => unreachable!("Must be an Interface type"), + }; + for interface in interfaces { + let type_ = schema.type_map.get(&interface.value).unwrap(); + match type_ { + Type::Interface(id) => { + let interface = schema.interfaces.get_mut(id.as_usize()).unwrap(); + interface.implementing_interfaces.push(*child_interface_id) + } + _ => unreachable!("Must be an interface"), + } + } + } + } + } + schema.load_defaults(); + + Ok(schema) + } + + fn load_defaults(&mut self) { + self.load_default_root_types(); + self.load_default_typename_field(); + self.load_default_fetch_token_field(); + self.load_default_clientid_field(); + self.load_default_strongid_field(); + self.load_default_is_fulfilled_field(); + } + + // In case the schema doesn't define a query, mutation or subscription + // type, but there is a Query, Mutation, or Subscription object type + // defined, default to those. + // This is not standard GraphQL behavior, and we might want to remove + // this at some point. + fn load_default_root_types(&mut self) { + if self.query_type.is_none() { + if let Some(Type::Object(id)) = self.type_map.get(&"Query".intern()) { + self.query_type = Some(*id); + } + } + if self.mutation_type.is_none() { + if let Some(Type::Object(id)) = self.type_map.get(&"Mutation".intern()) { + self.mutation_type = Some(*id); + } + } + if self.subscription_type.is_none() { + if let Some(Type::Object(id)) = self.type_map.get(&"Subscription".intern()) { + self.subscription_type = Some(*id); + } + } + } + + fn load_default_typename_field(&mut self) { + let string_type = *self + .type_map + .get(&"String".intern()) + .expect("Missing String type"); + let typename_field_id = self.fields.len(); + self.typename_field = FieldID(typename_field_id.try_into().unwrap()); + self.fields.push(Field { + name: WithLocation::generated(self.typename_field_name), + is_extension: false, + arguments: ArgumentDefinitions::new(Default::default()), + type_: TypeReference::NonNull(Box::new(TypeReference::Named(string_type))), + directives: Vec::new(), + parent_type: None, + description: Some(*TYPENAME_DESCRIPTION), + hack_source: None, + }); + } + + fn load_default_fetch_token_field(&mut self) { + let id_type = *self.type_map.get(&"ID".intern()).expect("Missing ID type"); + let fetch_token_field_id = self.fields.len(); + self.fetch_token_field = FieldID(fetch_token_field_id.try_into().unwrap()); + self.fields.push(Field { + name: WithLocation::generated(self.fetch_token_field_name), + is_extension: false, + arguments: ArgumentDefinitions::new(Default::default()), + type_: TypeReference::NonNull(Box::new(TypeReference::Named(id_type))), + directives: Vec::new(), + parent_type: None, + description: None, + hack_source: None, + }); + } + + fn load_default_clientid_field(&mut self) { + let id_type = *self.type_map.get(&"ID".intern()).expect("Missing ID type"); + let clientid_field_id = self.fields.len(); + self.clientid_field = FieldID(clientid_field_id.try_into().unwrap()); + self.fields.push(Field { + name: WithLocation::generated(self.clientid_field_name), + is_extension: true, + arguments: ArgumentDefinitions::new(Default::default()), + type_: TypeReference::NonNull(Box::new(TypeReference::Named(id_type))), + directives: Vec::new(), + parent_type: None, + description: Some(*CLIENT_ID_DESCRIPTION), + hack_source: None, + }); + } + + fn load_default_strongid_field(&mut self) { + let id_type = *self.type_map.get(&"ID".intern()).expect("Missing ID type"); + let strongid_field_id = self.fields.len(); + self.strongid_field = FieldID(strongid_field_id.try_into().unwrap()); + self.fields.push(Field { + name: WithLocation::generated(self.strongid_field_name), + is_extension: true, + arguments: ArgumentDefinitions::new(Default::default()), + type_: TypeReference::Named(id_type), + directives: Vec::new(), + parent_type: None, + description: None, + hack_source: None, + }); + } + + fn load_default_is_fulfilled_field(&mut self) { + let string_type = *self + .type_map + .get(&"String".intern()) + .expect("Missing String type"); + let is_fulfilled_field_id = self.fields.len(); + self.is_fulfilled_field = FieldID(is_fulfilled_field_id.try_into().unwrap()); + self.fields.push(Field { + name: WithLocation::generated(self.is_fulfilled_field_name), + is_extension: true, + arguments: ArgumentDefinitions::new(vec![Argument { + name: WithLocation::generated(ArgumentName("name".intern())), + type_: TypeReference::NonNull(Box::new(TypeReference::Named(string_type))), + default_value: None, + description: None, + directives: Default::default(), + }]), + type_: TypeReference::NonNull(Box::new(TypeReference::Named(string_type))), + directives: Vec::new(), + parent_type: None, + description: None, + hack_source: None, + }); + } + + /// Add additional object extensions to the schema after its initial + /// creation. + pub fn add_object_type_extension( + &mut self, + object_extension: ObjectTypeExtension, + location_key: SourceLocationKey, + ) -> DiagnosticsResult<()> { + self.add_definition( + &TypeSystemDefinition::ObjectTypeExtension(object_extension), + &location_key, + true, + ) + } + + /// Add additional interface extensions to the schema after its initial + /// creation. + pub fn add_interface_type_extension( + &mut self, + interface_extension: InterfaceTypeExtension, + location_key: SourceLocationKey, + ) -> DiagnosticsResult<()> { + self.add_definition( + &TypeSystemDefinition::InterfaceTypeExtension(interface_extension), + &location_key, + true, + ) + } + + /// Add additional client-only (extension) scalar + pub fn add_extension_scalar( + &mut self, + scalar: ScalarTypeDefinition, + location_key: SourceLocationKey, + ) -> DiagnosticsResult<()> { + let scalar_name = scalar.name.name_with_location(location_key); + + if self.type_map.contains_key(&scalar_name.item) { + return Err(vec![Diagnostic::error( + SchemaError::DuplicateType(scalar_name.item), + scalar_name.location, + )]); + } + + let scalar_id = Type::Scalar(ScalarID(self.scalars.len() as u32)); + self.type_map.insert(scalar_name.item, scalar_id); + self.add_definition( + &TypeSystemDefinition::ScalarTypeDefinition(scalar), + &location_key, + true, + )?; + + Ok(()) + } + + /// Add additional client-only (extension) object + pub fn add_extension_object( + &mut self, + object: ObjectTypeDefinition, + location_key: SourceLocationKey, + ) -> DiagnosticsResult<()> { + let object_name = object.name.name_with_location(location_key); + + if self.type_map.contains_key(&object_name.item) { + return Err(vec![Diagnostic::error( + SchemaError::DuplicateType(object_name.item), + object_name.location, + )]); + } + + let object_id = self.objects.len() as u32; + let object_type = Type::Object(ObjectID(self.objects.len() as u32)); + self.type_map.insert(object_name.item, object_type); + + let interfaces = object + .interfaces + .iter() + .map(|name| self.build_interface_id(name, &location_key)) + .collect::>>()?; + + for interface_id in &interfaces { + // All interfaces implemented by this concrete object should exist, and this + // should be checked beforehand. + let interface_obj = self + .interfaces + .get_mut(interface_id.0 as usize) + .expect("Expected interface to exist"); + + if !interface_obj + .implementing_objects + .contains(&ObjectID(object_id)) + { + interface_obj.implementing_objects.push(ObjectID(object_id)) + } + } + + self.add_definition( + &TypeSystemDefinition::ObjectTypeDefinition(object), + &location_key, + true, + )?; + + Ok(()) + } + + fn add_definition( + &mut self, + definition: &TypeSystemDefinition, + location_key: &SourceLocationKey, + is_extension: bool, + ) -> DiagnosticsResult<()> { + match definition { + TypeSystemDefinition::SchemaDefinition(SchemaDefinition { + operation_types, .. + }) => { + for OperationTypeDefinition { + operation, type_, .. + } in &operation_types.items + { + let operation_id = self.build_object_id(type_.value)?; + match operation { + OperationType::Query => { + if let Some(prev_query_type) = self.query_type { + return Err(vec![Diagnostic::error( + SchemaError::DuplicateOperationDefinition( + operation.to_string(), + type_.value, + expect_object_type_name(&self.type_map, prev_query_type), + ), + Location::new(*location_key, type_.span), + )]); + } else { + self.query_type = Some(operation_id); + } + } + OperationType::Mutation => { + if let Some(prev_mutation_type) = self.mutation_type { + return Err(vec![Diagnostic::error( + SchemaError::DuplicateOperationDefinition( + operation.to_string(), + type_.value, + expect_object_type_name(&self.type_map, prev_mutation_type), + ), + Location::new(*location_key, type_.span), + )]); + } else { + self.mutation_type = Some(operation_id); + } + } + OperationType::Subscription => { + if let Some(prev_subscription_type) = self.subscription_type { + return Err(vec![Diagnostic::error( + SchemaError::DuplicateOperationDefinition( + operation.to_string(), + type_.value, + expect_object_type_name( + &self.type_map, + prev_subscription_type, + ), + ), + Location::new(*location_key, type_.span), + )]); + } else { + self.subscription_type = Some(operation_id); + } + } + } + } + } + TypeSystemDefinition::DirectiveDefinition(DirectiveDefinition { + name, + arguments, + repeatable, + locations, + description, + hack_source, + .. + }) => { + if self.directives.contains_key(&DirectiveName(name.value)) { + let str_name = name.value.lookup(); + if str_name != "skip" && str_name != "include" { + // TODO(T63941319) @skip and @include directives are duplicated in our schema + return Err(vec![Diagnostic::error( + SchemaError::DuplicateDirectiveDefinition(name.value), + Location::new(*location_key, name.span), + )]); + } + } + let arguments = self.build_arguments(arguments, *location_key)?; + self.directives.insert( + DirectiveName(name.value), + Directive { + name: WithLocation::new( + Location::new(*location_key, name.span), + DirectiveName(name.value), + ), + arguments, + locations: locations.clone(), + repeatable: *repeatable, + is_extension, + description: description.as_ref().map(|node| node.value), + hack_source: hack_source.as_ref().map(|node| node.value), + }, + ); + } + TypeSystemDefinition::ObjectTypeDefinition(ObjectTypeDefinition { + name, + interfaces, + fields, + directives, + .. + }) => { + let parent_id = Type::Object(ObjectID(self.objects.len() as u32)); + let fields = if is_extension { + self.build_extend_fields( + fields, + &mut HashMap::with_capacity(len_of_option_list(fields)), + *location_key, + Some(parent_id), + )? + } else { + self.build_fields(fields, *location_key, Some(parent_id))? + }; + let interfaces = interfaces + .iter() + .map(|name| self.build_interface_id(name, location_key)) + .collect::>>()?; + let directives = self.build_directive_values(directives); + self.objects.push(Object { + name: WithLocation::new( + Location::new(*location_key, name.span), + ObjectName(name.value), + ), + fields, + is_extension, + interfaces, + directives, + description: None, + hack_source: None, + }); + } + TypeSystemDefinition::InterfaceTypeDefinition(InterfaceTypeDefinition { + name, + interfaces, + directives, + fields, + .. + }) => { + let parent_id = Type::Interface(InterfaceID(self.interfaces.len() as u32)); + let fields = if is_extension { + self.build_extend_fields( + fields, + &mut HashMap::with_capacity(len_of_option_list(fields)), + *location_key, + Some(parent_id), + )? + } else { + self.build_fields(fields, *location_key, Some(parent_id))? + }; + let interfaces = interfaces + .iter() + .map(|name| self.build_interface_id(name, location_key)) + .collect::>>()?; + let directives = self.build_directive_values(directives); + self.interfaces.push(Interface { + name: WithLocation::new( + Location::new(*location_key, name.span), + InterfaceName(name.value), + ), + implementing_interfaces: vec![], + implementing_objects: vec![], + is_extension, + fields, + directives, + interfaces, + description: None, + hack_source: None, + }); + } + TypeSystemDefinition::UnionTypeDefinition(UnionTypeDefinition { + name, + directives, + members, + .. + }) => { + let members = members + .iter() + .map(|name| self.build_object_id(name.value)) + .collect::>>()?; + let directives = self.build_directive_values(directives); + self.unions.push(Union { + name: WithLocation::new( + Location::new(*location_key, name.span), + UnionName(name.value), + ), + is_extension, + members, + directives, + description: None, + hack_source: None, + }); + } + TypeSystemDefinition::InputObjectTypeDefinition(InputObjectTypeDefinition { + name, + fields, + directives, + .. + }) => { + let fields = self.build_arguments(fields, *location_key)?; + let directives = self.build_directive_values(directives); + self.input_objects.push(InputObject { + name: WithLocation::new( + Location::new(*location_key, name.span), + InputObjectName(name.value), + ), + + fields, + directives, + description: None, + hack_source: None, + }); + } + TypeSystemDefinition::EnumTypeDefinition(EnumTypeDefinition { + name, + directives, + values, + .. + }) => { + let directives = self.build_directive_values(directives); + let values = if let Some(values) = values { + values + .items + .iter() + .map(|enum_def| EnumValue { + value: enum_def.name.value, + directives: self.build_directive_values(&enum_def.directives), + }) + .collect() + } else { + Vec::new() + }; + self.enums.push(Enum { + name: WithLocation::new( + Location::new(*location_key, name.span), + EnumName(name.value), + ), + is_extension, + values, + directives, + description: None, + hack_source: None, + }); + } + TypeSystemDefinition::ScalarTypeDefinition(ScalarTypeDefinition { + name, + directives, + .. + }) => { + let directives = self.build_directive_values(directives); + self.scalars.push(Scalar { + name: WithLocation::new( + Location::new(*location_key, name.span), + ScalarName(name.value), + ), + is_extension, + directives, + description: None, + hack_source: None, + }) + } + TypeSystemDefinition::ObjectTypeExtension(ObjectTypeExtension { + name, + interfaces, + fields, + directives, + .. + }) => match self.type_map.get(&name.value).cloned() { + Some(Type::Object(id)) => { + let index = id.as_usize(); + let obj = self.objects.get(index).ok_or_else(|| { + vec![Diagnostic::error( + SchemaError::ExtendUndefinedType(name.value), + Location::new(*location_key, name.span), + )] + })?; + + let field_ids = &obj.fields; + let mut existing_fields = + HashMap::with_capacity(field_ids.len() + len_of_option_list(fields)); + for field_id in field_ids { + let field_name = self.fields[field_id.as_usize()].name; + existing_fields.insert(field_name.item, field_name.location); + } + let client_fields = self.build_extend_fields( + fields, + &mut existing_fields, + *location_key, + Some(Type::Object(id)), + )?; + + self.objects[index].fields.extend(client_fields); + + let built_interfaces = interfaces + .iter() + .map(|name| self.build_interface_id(name, location_key)) + .collect::>>()?; + extend_without_duplicates( + &mut self.objects[index].interfaces, + built_interfaces, + ); + + let built_directives = self.build_directive_values(directives); + extend_without_duplicates( + &mut self.objects[index].directives, + built_directives, + ); + } + _ => { + return Err(vec![Diagnostic::error( + SchemaError::ExtendUndefinedType(name.value), + Location::new(*location_key, name.span), + )]); + } + }, + TypeSystemDefinition::InterfaceTypeExtension(InterfaceTypeExtension { + name, + interfaces, + fields, + directives, + .. + }) => match self.type_map.get(&name.value).cloned() { + Some(Type::Interface(id)) => { + let index = id.as_usize(); + let interface = self.interfaces.get(index).ok_or_else(|| { + vec![Diagnostic::error( + SchemaError::ExtendUndefinedType(name.value), + Location::new(*location_key, name.span), + )] + })?; + let field_ids = &interface.fields; + let mut existing_fields = + HashMap::with_capacity(field_ids.len() + len_of_option_list(fields)); + for field_id in field_ids { + let field_name = self.fields[field_id.as_usize()].name; + existing_fields.insert(field_name.item, field_name.location); + } + let client_fields = self.build_extend_fields( + fields, + &mut existing_fields, + *location_key, + Some(Type::Interface(id)), + )?; + self.interfaces[index].fields.extend(client_fields); + + let built_interfaces = interfaces + .iter() + .map(|name| self.build_interface_id(name, location_key)) + .collect::>>()?; + extend_without_duplicates( + &mut self.interfaces[index].interfaces, + built_interfaces, + ); + + let built_directives = self.build_directive_values(directives); + extend_without_duplicates( + &mut self.interfaces[index].directives, + built_directives, + ); + } + _ => { + return Err(vec![Diagnostic::error( + SchemaError::ExtendUndefinedType(name.value), + Location::new(*location_key, name.span), + )]); + } + }, + TypeSystemDefinition::EnumTypeExtension(EnumTypeExtension { + name, + directives, + values, + .. + }) => { + let enum_id = self.type_map.get(&name.value).cloned(); + match enum_id { + Some(Type::Enum(enum_id)) => { + let index = enum_id.as_usize(); + if self.enums.get(index).is_none() { + return Err(vec![Diagnostic::error( + SchemaError::ExtendUndefinedType(name.value), + Location::new(*location_key, name.span), + )]); + } + + if let Some(values) = values { + let updated_values = values + .items + .iter() + .map(|enum_def| EnumValue { + value: enum_def.name.value, + directives: self.build_directive_values(&enum_def.directives), + }) + .collect::>(); + extend_without_duplicates( + &mut self.enums[index].values, + updated_values, + ); + } + let built_directives = self.build_directive_values(directives); + extend_without_duplicates( + &mut self.enums[index].directives, + built_directives, + ); + } + _ => { + return Err(vec![Diagnostic::error( + SchemaError::ExtendUndefinedType(name.value), + Location::new(*location_key, name.span), + )]); + } + } + } + TypeSystemDefinition::SchemaExtension { .. } => todo!("SchemaExtension"), + + TypeSystemDefinition::UnionTypeExtension { .. } => todo!("UnionTypeExtension"), + TypeSystemDefinition::InputObjectTypeExtension { .. } => { + todo!("InputObjectTypeExtension") + } + TypeSystemDefinition::ScalarTypeExtension { .. } => todo!("ScalarTypeExtension"), + } + Ok(()) + } + + fn build_object_id(&mut self, name: StringKey) -> DiagnosticsResult { + match self.type_map.get(&name) { + Some(Type::Object(id)) => Ok(*id), + Some(non_object_type) => todo_add_location(SchemaError::ExpectedObjectReference( + name, + non_object_type.get_variant_name().to_string(), + )), + None => todo_add_location(SchemaError::UndefinedType(name)), + } + } + + fn build_interface_id( + &mut self, + name: &Identifier, + location_key: &SourceLocationKey, + ) -> DiagnosticsResult { + match self.type_map.get(&name.value) { + Some(Type::Interface(id)) => Ok(*id), + Some(non_interface_type) => Err(vec![ + Diagnostic::error( + SchemaError::ExpectedInterfaceReference( + name.value, + non_interface_type.get_variant_name().to_string(), + ), + Location::new(*location_key, name.span), + ) + .annotate( + "the other type is defined here", + self.get_type_location(*non_interface_type), + ), + ]), + None => Err(vec![Diagnostic::error( + SchemaError::UndefinedType(name.value), + Location::new(*location_key, name.span), + )]), + } + } + + fn build_field(&mut self, field: Field) -> FieldID { + let field_index = self.fields.len().try_into().unwrap(); + self.fields.push(field); + FieldID(field_index) + } + + fn build_fields( + &mut self, + field_defs: &Option>, + field_location_key: SourceLocationKey, + parent_type: Option, + ) -> DiagnosticsResult> { + if let Some(field_defs) = field_defs { + field_defs + .items + .iter() + .map(|field_def| { + let arguments = + self.build_arguments(&field_def.arguments, field_location_key)?; + let type_ = self.build_type_reference(&field_def.type_, field_location_key)?; + let directives = self.build_directive_values(&field_def.directives); + let description = field_def.description.as_ref().map(|desc| desc.value); + let hack_source = field_def + .hack_source + .as_ref() + .map(|hack_source| hack_source.value); + Ok(self.build_field(Field { + name: WithLocation::new( + Location::new(field_location_key, field_def.name.span), + field_def.name.value, + ), + is_extension: false, + arguments, + type_, + directives, + parent_type, + description, + hack_source, + })) + }) + .collect() + } else { + Ok(Vec::new()) + } + } + + fn build_extend_fields( + &mut self, + field_defs: &Option>, + existing_fields: &mut HashMap, + source_location_key: SourceLocationKey, + parent_type: Option, + ) -> DiagnosticsResult> { + if let Some(field_defs) = field_defs { + let mut field_ids: Vec = Vec::with_capacity(field_defs.items.len()); + for field_def in &field_defs.items { + let field_name = field_def.name.value; + let field_location = Location::new(source_location_key, field_def.name.span); + if let Some(prev_location) = existing_fields.insert(field_name, field_location) { + return Err(vec![ + Diagnostic::error(SchemaError::DuplicateField(field_name), field_location) + .annotate("previously defined here", prev_location), + ]); + } + let arguments = self.build_arguments(&field_def.arguments, source_location_key)?; + let directives = self.build_directive_values(&field_def.directives); + let type_ = self.build_type_reference(&field_def.type_, source_location_key)?; + let description = field_def.description.as_ref().map(|desc| desc.value); + let hack_source = field_def + .hack_source + .as_ref() + .map(|hack_source| hack_source.value); + field_ids.push(self.build_field(Field { + name: WithLocation::new(field_location, field_name), + is_extension: true, + arguments, + type_, + directives, + parent_type, + description, + hack_source, + })); + } + Ok(field_ids) + } else { + Ok(Vec::new()) + } + } + + fn build_arguments( + &mut self, + arg_defs: &Option>, + source_location_key: SourceLocationKey, + ) -> DiagnosticsResult { + if let Some(arg_defs) = arg_defs { + let arg_defs: DiagnosticsResult> = arg_defs + .items + .iter() + .map(|arg_def| { + let argument_location = Location::new(source_location_key, arg_def.name.span); + + Ok(Argument { + name: WithLocation::new( + argument_location, + ArgumentName(arg_def.name.value), + ), + type_: self.build_input_object_reference(&arg_def.type_)?, + default_value: arg_def + .default_value + .as_ref() + .map(|default_value| default_value.value.clone()), + description: None, + directives: self.build_directive_values(&arg_def.directives), + }) + }) + .collect(); + Ok(ArgumentDefinitions(arg_defs?)) + } else { + Ok(ArgumentDefinitions(Vec::new())) + } + } + + fn build_input_object_reference( + &mut self, + ast_type: &TypeAnnotation, + ) -> DiagnosticsResult> { + Ok(match ast_type { + TypeAnnotation::Named(named_type) => { + let type_ = self.type_map.get(&named_type.name.value).ok_or_else(|| { + vec![Diagnostic::error( + SchemaError::UndefinedType(named_type.name.value), + Location::new(SourceLocationKey::generated(), named_type.name.span), + )] + })?; + if !(type_.is_enum() || type_.is_scalar() || type_.is_input_object()) { + return Err(vec![Diagnostic::error( + SchemaError::ExpectedInputType(named_type.name.value), + Location::new(SourceLocationKey::generated(), named_type.name.span), + )]); + } + + TypeReference::Named(*type_) + } + TypeAnnotation::NonNull(of_type) => { + TypeReference::NonNull(Box::new(self.build_input_object_reference(&of_type.type_)?)) + } + TypeAnnotation::List(of_type) => { + TypeReference::List(Box::new(self.build_input_object_reference(&of_type.type_)?)) + } + }) + } + + fn build_type_reference( + &mut self, + ast_type: &TypeAnnotation, + source_location: SourceLocationKey, + ) -> DiagnosticsResult> { + Ok(match ast_type { + TypeAnnotation::Named(named_type) => TypeReference::Named( + *self.type_map.get(&named_type.name.value).ok_or_else(|| { + vec![Diagnostic::error( + SchemaError::UndefinedType(named_type.name.value), + Location::new(source_location, named_type.name.span), + )] + })?, + ), + TypeAnnotation::NonNull(of_type) => TypeReference::NonNull(Box::new( + self.build_type_reference(&of_type.type_, source_location)?, + )), + TypeAnnotation::List(of_type) => TypeReference::List(Box::new( + self.build_type_reference(&of_type.type_, source_location)?, + )), + }) + } + + fn build_directive_values(&mut self, directives: &[ConstantDirective]) -> Vec { + directives + .iter() + .map(|directive| { + let arguments = if let Some(arguments) = &directive.arguments { + arguments + .items + .iter() + .map(|argument| ArgumentValue { + name: ArgumentName(argument.name.value), + value: argument.value.clone(), + }) + .collect() + } else { + Vec::new() + }; + DirectiveValue { + name: DirectiveName(directive.name.value), + arguments, + } + }) + .collect() + } + + fn get_type_location(&self, type_: Type) -> Location { + match type_ { + Type::InputObject(id) => self.input_objects[id.as_usize()].name.location, + Type::Enum(id) => self.enums[id.as_usize()].name.location, + Type::Interface(id) => self.interfaces[id.as_usize()].name.location, + Type::Object(id) => self.objects[id.as_usize()].name.location, + Type::Scalar(id) => self.scalars[id.as_usize()].name.location, + Type::Union(id) => self.unions[id.as_usize()].name.location, + } + } +} + +/// Extends the `target` with `extensions` ignoring items that are already in +/// `target`. +fn extend_without_duplicates( + target: &mut Vec, + extensions: impl IntoIterator, +) { + for extension in extensions { + if !target.contains(&extension) { + target.push(extension); + } + } +} + +fn len_of_option_list(option_list: &Option>) -> usize { + option_list.as_ref().map_or(0, |list| list.items.len()) +} + +fn expect_object_type_name(type_map: &TypeMap, object_id: ObjectID) -> StringKey { + *type_map + .iter() + .find(|(_, type_)| match type_ { + Type::Object(id_) => id_ == &object_id, + _ => false, + }) + .expect("Missing object in type_map") + .0 +} + +#[cfg(test)] +mod tests { + use common::Span; + + use super::*; + + #[test] + fn test_extend_without_duplicates() { + let mut target = vec![10, 11]; + extend_without_duplicates(&mut target, vec![1, 10, 100]); + assert_eq!(target, vec![10, 11, 1, 100]); + } + + fn identifier_from_value(value: StringKey) -> Identifier { + Identifier { + span: Span { start: 0, end: 1 }, + token: Token { + span: Span { start: 0, end: 1 }, + kind: TokenKind::Identifier, + }, + value, + } + } + + #[test] + fn test_adding_extension_object() { + let mut schema = InMemorySchema::create_uninitialized(); + + schema + .add_interface(Interface { + name: WithLocation::generated(InterfaceName("ITunes".intern())), + is_extension: false, + implementing_interfaces: vec![], + implementing_objects: vec![], + fields: vec![], + directives: vec![], + interfaces: vec![], + description: None, + hack_source: None, + }) + .unwrap(); + + schema + .add_extension_object( + ObjectTypeDefinition { + name: identifier_from_value("EarlyModel".intern()), + interfaces: vec![identifier_from_value("ITunes".intern())], + directives: vec![], + fields: None, + span: Span::empty(), + }, + SourceLocationKey::Generated, + ) + .unwrap(); + + let interface = schema.interface(InterfaceID(0)); + + assert!( + interface.implementing_objects.len() == 1, + "ITunes should have an implementing object" + ); + } +} diff --git a/compiler/crates/schema/src/in_memory/mod.rs b/compiler/crates/schema/src/in_memory/mod.rs deleted file mode 100644 index 6dd91dca0033b..0000000000000 --- a/compiler/crates/schema/src/in_memory/mod.rs +++ /dev/null @@ -1,1679 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::collections::BTreeMap; -use std::collections::HashMap; - -use common::ArgumentName; -use common::Diagnostic; -use common::DiagnosticsResult; -use common::DirectiveName; -use common::EnumName; -use common::InputObjectName; -use common::InterfaceName; -use common::Location; -use common::ObjectName; -use common::ScalarName; -use common::SourceLocationKey; -use common::WithLocation; -use graphql_syntax::*; -use intern::string_key::Intern; -use intern::string_key::StringKey; -use intern::Lookup; - -use crate::definitions::Argument; -use crate::definitions::Directive; -use crate::definitions::*; -use crate::errors::SchemaError; -use crate::field_descriptions::CLIENT_ID_DESCRIPTION; -use crate::field_descriptions::TYPENAME_DESCRIPTION; -use crate::graphql_schema::Schema; - -fn todo_add_location(error: SchemaError) -> DiagnosticsResult { - Err(vec![Diagnostic::error(error, Location::generated())]) -} - -#[derive(Debug)] -pub struct InMemorySchema { - query_type: Option, - mutation_type: Option, - subscription_type: Option, - type_map: TypeMap, - - clientid_field: FieldID, - strongid_field: FieldID, - typename_field: FieldID, - fetch_token_field: FieldID, - is_fulfilled_field: FieldID, - - clientid_field_name: StringKey, - strongid_field_name: StringKey, - typename_field_name: StringKey, - fetch_token_field_name: StringKey, - is_fulfilled_field_name: StringKey, - - string_type: Option, - id_type: Option, - - unchecked_argument_type_sentinel: Option>, - - directives: HashMap, - - enums: Vec, - fields: Vec, - input_objects: Vec, - interfaces: Vec, - objects: Vec, - scalars: Vec, - unions: Vec, -} - -impl Schema for InMemorySchema { - fn query_type(&self) -> Option { - self.query_type.map(Type::Object) - } - - fn mutation_type(&self) -> Option { - self.mutation_type.map(Type::Object) - } - - fn subscription_type(&self) -> Option { - self.subscription_type.map(Type::Object) - } - - fn clientid_field(&self) -> FieldID { - self.clientid_field - } - - fn strongid_field(&self) -> FieldID { - self.strongid_field - } - - fn typename_field(&self) -> FieldID { - self.typename_field - } - - fn fetch_token_field(&self) -> FieldID { - self.fetch_token_field - } - - fn is_fulfilled_field(&self) -> FieldID { - self.is_fulfilled_field - } - - fn get_type(&self, type_name: StringKey) -> Option { - self.type_map.get(&type_name).copied() - } - - fn get_directive(&self, name: DirectiveName) -> Option<&Directive> { - self.directives.get(&name) - } - - fn input_object(&self, id: InputObjectID) -> &InputObject { - &self.input_objects[id.as_usize()] - } - - fn enum_(&self, id: EnumID) -> &Enum { - &self.enums[id.as_usize()] - } - - fn scalar(&self, id: ScalarID) -> &Scalar { - &self.scalars[id.as_usize()] - } - - fn field(&self, id: FieldID) -> &Field { - &self.fields[id.as_usize()] - } - - fn object(&self, id: ObjectID) -> &Object { - &self.objects[id.as_usize()] - } - - fn union(&self, id: UnionID) -> &Union { - &self.unions[id.as_usize()] - } - - fn interface(&self, id: InterfaceID) -> &Interface { - &self.interfaces[id.as_usize()] - } - - fn get_type_name(&self, type_: Type) -> StringKey { - match type_ { - Type::InputObject(id) => self.input_objects[id.as_usize()].name.item.0, - Type::Enum(id) => self.enums[id.as_usize()].name.item.0, - Type::Interface(id) => self.interfaces[id.as_usize()].name.item.0, - Type::Object(id) => self.objects[id.as_usize()].name.item.0, - Type::Scalar(id) => self.scalars[id.as_usize()].name.item.0, - Type::Union(id) => self.unions[id.as_usize()].name.item, - } - } - - fn is_extension_type(&self, type_: Type) -> bool { - match type_ { - Type::Enum(id) => self.enums[id.as_usize()].is_extension, - Type::Interface(id) => self.interfaces[id.as_usize()].is_extension, - Type::Object(id) => self.objects[id.as_usize()].is_extension, - Type::Scalar(id) => self.scalars[id.as_usize()].is_extension, - Type::Union(id) => self.unions[id.as_usize()].is_extension, - Type::InputObject(_) => false, - } - } - - fn is_string(&self, type_: Type) -> bool { - type_ == Type::Scalar(self.string_type.unwrap()) - } - - fn is_id(&self, type_: Type) -> bool { - type_ == Type::Scalar(self.id_type.unwrap()) - } - - fn named_field(&self, parent_type: Type, name: StringKey) -> Option { - // Special case for __typename and __id fields, which should not be in the list of type fields - // but should be fine to select. - let can_have_typename = matches!( - parent_type, - Type::Object(_) | Type::Interface(_) | Type::Union(_) - ); - if can_have_typename { - if name == self.typename_field_name { - return Some(self.typename_field); - } - // TODO(inanc): Also check if the parent type is fetchable? - if name == self.fetch_token_field_name { - return Some(self.fetch_token_field); - } - if name == self.clientid_field_name { - return Some(self.clientid_field); - } - if name == self.strongid_field_name { - return Some(self.strongid_field); - } - } - - let fields = match parent_type { - Type::Object(id) => { - let object = &self.objects[id.as_usize()]; - &object.fields - } - Type::Interface(id) => { - let interface = &self.interfaces[id.as_usize()]; - &interface.fields - } - // Unions don't have any fields, but can have selections like __typename - // or a field with @fixme_fat_interface - Type::Union(_) => return None, - _ => panic!( - "Cannot get field {} on type '{:?}', this type does not have fields", - name, - self.get_type_name(parent_type) - ), - }; - fields - .iter() - .find(|field_id| { - let field = &self.fields[field_id.as_usize()]; - field.name.item == name - }) - .cloned() - } - - /// A value that represents a type of unchecked arguments where we don't - /// have a type to instantiate the argument. - /// - /// TODO: we probably want to replace this with a proper `Unknown` type. - fn unchecked_argument_type_sentinel(&self) -> &TypeReference { - self.unchecked_argument_type_sentinel.as_ref().unwrap() - } - - fn snapshot_print(&self) -> String { - let Self { - query_type, - mutation_type, - subscription_type, - directives, - clientid_field: _clientid_field, - strongid_field: _strongid_field, - typename_field: _typename_field, - fetch_token_field: _fetch_token_field, - is_fulfilled_field: _is_fulfilled_field, - clientid_field_name: _clientid_field_name, - strongid_field_name: _strongid_field_name, - typename_field_name: _typename_field_name, - fetch_token_field_name: _fetch_token_field_name, - is_fulfilled_field_name: _is_fulfilled_field_name, - string_type: _string_type, - id_type: _id_type, - unchecked_argument_type_sentinel: _unchecked_argument_type_sentinel, - type_map, - enums, - fields, - input_objects, - interfaces, - objects, - scalars, - unions, - } = self; - let ordered_type_map: BTreeMap<_, _> = type_map.iter().collect(); - - let mut ordered_directives = directives.values().collect::>(); - ordered_directives.sort_by_key(|dir| dir.name.0.lookup()); - - format!( - r#"Schema {{ - query_type: {:#?} - mutation_type: {:#?} - subscription_type: {:#?} - directives: {:#?} - type_map: {:#?} - enums: {:#?} - fields: {:#?} - input_objects: {:#?} - interfaces: {:#?} - objects: {:#?} - scalars: {:#?} - unions: {:#?} - }}"#, - query_type, - mutation_type, - subscription_type, - ordered_directives, - ordered_type_map, - enums, - fields, - input_objects, - interfaces, - objects, - scalars, - unions, - ) - } - - fn input_objects<'a>(&'a self) -> Box + 'a> { - Box::new(self.input_objects.iter()) - } - - fn enums<'a>(&'a self) -> Box + 'a> { - Box::new(self.enums.iter()) - } - - fn scalars<'a>(&'a self) -> Box + 'a> { - Box::new(self.scalars.iter()) - } - - fn fields<'a>(&'a self) -> Box + 'a> { - Box::new(self.fields.iter()) - } - - fn objects<'a>(&'a self) -> Box + 'a> { - Box::new(self.objects.iter()) - } - - fn unions<'a>(&'a self) -> Box + 'a> { - Box::new(self.unions.iter()) - } - - fn interfaces<'a>(&'a self) -> Box + 'a> { - Box::new(self.interfaces.iter()) - } -} - -impl InMemorySchema { - pub fn get_directive_mut(&mut self, name: DirectiveName) -> Option<&mut Directive> { - self.directives.get_mut(&name) - } - - pub fn get_type_map(&self) -> impl Iterator { - self.type_map.iter() - } - - pub fn get_directives(&self) -> impl Iterator { - self.directives.values() - } - - /// Returns all directives applicable for a given location(Query, Field, etc). - pub fn directives_for_location(&self, location: DirectiveLocation) -> Vec<&Directive> { - self.directives - .values() - .filter(|directive| directive.locations.contains(&location)) - .collect() - } - - pub fn get_fields(&self) -> impl Iterator { - self.fields.iter() - } - - pub fn get_interfaces(&self) -> impl Iterator { - self.interfaces.iter() - } - - pub fn get_enums(&self) -> impl Iterator { - self.enums.iter() - } - - pub fn get_objects(&self) -> impl Iterator { - self.objects.iter() - } - - pub fn get_unions(&self) -> impl Iterator { - self.unions.iter() - } - - pub fn has_directive(&self, directive_name: DirectiveName) -> bool { - self.directives.contains_key(&directive_name) - } - - pub fn has_type(&self, type_name: StringKey) -> bool { - self.type_map.contains_key(&type_name) - } - - pub fn add_directive(&mut self, directive: Directive) -> DiagnosticsResult<()> { - if self.directives.contains_key(&directive.name) { - return todo_add_location(SchemaError::DuplicateDirectiveDefinition(directive.name.0)); - } - self.directives.insert(directive.name, directive); - Ok(()) - } - - pub fn remove_directive(&mut self, directive_name: DirectiveName) -> DiagnosticsResult<()> { - if !self.directives.contains_key(&directive_name) { - // Cannot find the directive to remove - return todo_add_location(SchemaError::UndefinedDirective(directive_name.0)); - } - self.directives.remove(&directive_name); - Ok(()) - } - - pub fn add_field(&mut self, field: Field) -> DiagnosticsResult { - Ok(self.build_field(field)) - } - - pub fn add_enum(&mut self, enum_: Enum) -> DiagnosticsResult { - if self.type_map.contains_key(&enum_.name.item.0) { - return todo_add_location(SchemaError::DuplicateType(enum_.name.item.0)); - } - let index: u32 = self.enums.len().try_into().unwrap(); - let name = enum_.name; - self.enums.push(enum_); - self.type_map.insert(name.item.0, Type::Enum(EnumID(index))); - Ok(EnumID(index)) - } - - pub fn add_input_object( - &mut self, - input_object: InputObject, - ) -> DiagnosticsResult { - if self.type_map.contains_key(&input_object.name.item.0) { - return todo_add_location(SchemaError::DuplicateType(input_object.name.item.0)); - } - let index: u32 = self.input_objects.len().try_into().unwrap(); - let name = input_object.name; - self.input_objects.push(input_object); - self.type_map - .insert(name.item.0, Type::InputObject(InputObjectID(index))); - Ok(InputObjectID(index)) - } - - pub fn add_interface(&mut self, interface: Interface) -> DiagnosticsResult { - if self.type_map.contains_key(&interface.name.item.0) { - return todo_add_location(SchemaError::DuplicateType(interface.name.item.0)); - } - let index: u32 = self.interfaces.len().try_into().unwrap(); - let name = interface.name; - self.interfaces.push(interface); - self.type_map - .insert(name.item.0, Type::Interface(InterfaceID(index))); - Ok(InterfaceID(index)) - } - - pub fn add_object(&mut self, object: Object) -> DiagnosticsResult { - if self.type_map.contains_key(&object.name.item.0) { - return Err(vec![Diagnostic::error( - SchemaError::DuplicateType(object.name.item.0), - object.name.location, - )]); - } - let index: u32 = self.objects.len().try_into().unwrap(); - let name = object.name; - self.objects.push(object); - self.type_map - .insert(name.item.0, Type::Object(ObjectID(index))); - Ok(ObjectID(index)) - } - - pub fn add_scalar(&mut self, scalar: Scalar) -> DiagnosticsResult { - if self.type_map.contains_key(&scalar.name.item.0) { - return todo_add_location(SchemaError::DuplicateType(scalar.name.item.0)); - } - let index: u32 = self.scalars.len().try_into().unwrap(); - let name = scalar.name.item; - self.scalars.push(scalar); - self.type_map.insert(name.0, Type::Scalar(ScalarID(index))); - Ok(ScalarID(index)) - } - - pub fn add_union(&mut self, union: Union) -> DiagnosticsResult { - if self.type_map.contains_key(&union.name.item) { - return todo_add_location(SchemaError::DuplicateType(union.name.item)); - } - let index: u32 = self.unions.len().try_into().unwrap(); - let name = union.name.item; - self.unions.push(union); - self.type_map.insert(name, Type::Union(UnionID(index))); - Ok(UnionID(index)) - } - - pub fn add_field_to_interface( - &mut self, - interface_id: InterfaceID, - field_id: FieldID, - ) -> DiagnosticsResult { - let interface = self.interfaces.get_mut(interface_id.as_usize()).unwrap(); - interface.fields.push(field_id); - Ok(interface_id) - } - - pub fn add_field_to_object( - &mut self, - obj_id: ObjectID, - field_id: FieldID, - ) -> DiagnosticsResult { - let object = self.objects.get_mut(obj_id.as_usize()).unwrap(); - object.fields.push(field_id); - Ok(obj_id) - } - - pub fn add_interface_to_object( - &mut self, - obj_id: ObjectID, - interface_id: InterfaceID, - ) -> DiagnosticsResult { - let object = self.objects.get_mut(obj_id.as_usize()).unwrap(); - object.interfaces.push(interface_id); - Ok(obj_id) - } - - pub fn add_parent_interface_to_interface( - &mut self, - interface_id: InterfaceID, - parent_interface_id: InterfaceID, - ) -> DiagnosticsResult { - let interface = self.interfaces.get_mut(interface_id.as_usize()).unwrap(); - interface.interfaces.push(parent_interface_id); - Ok(interface_id) - } - - pub fn add_implementing_object_to_interface( - &mut self, - interface_id: InterfaceID, - object_id: ObjectID, - ) -> DiagnosticsResult { - let interface = self.interfaces.get_mut(interface_id.as_usize()).unwrap(); - interface.implementing_objects.push(object_id); - Ok(interface_id) - } - - pub fn add_member_to_union( - &mut self, - union_id: UnionID, - object_id: ObjectID, - ) -> DiagnosticsResult { - let union = self.unions.get_mut(union_id.as_usize()).unwrap(); - union.members.push(object_id); - Ok(union_id) - } - - /// Sets argument definitions for a given input object. - /// Any existing argument definitions will be erased. - pub fn set_input_object_args( - &mut self, - input_object_id: InputObjectID, - fields: ArgumentDefinitions, - ) -> DiagnosticsResult { - let input_object = self - .input_objects - .get_mut(input_object_id.as_usize()) - .unwrap(); - input_object.fields = fields; - Ok(input_object_id) - } - - /// Sets argument definitions for a given field. - /// Any existing argument definitions on the field will be erased. - pub fn set_field_args( - &mut self, - field_id: FieldID, - args: ArgumentDefinitions, - ) -> DiagnosticsResult { - let field = self.fields.get_mut(field_id.as_usize()).unwrap(); - field.arguments = args; - Ok(field_id) - } - - /// Replaces the definition of interface type, but keeps the same id. - /// Existing references to the old type now reference the replacement. - pub fn replace_interface( - &mut self, - id: InterfaceID, - interface: Interface, - ) -> DiagnosticsResult<()> { - if id.as_usize() >= self.interfaces.len() { - return todo_add_location(SchemaError::UnknownTypeID( - id.as_usize(), - String::from("Interface"), - )); - } - self.type_map - .remove(&self.get_type_name(Type::Interface(id))); - self.type_map - .insert(interface.name.item.0, Type::Interface(id)); - self.interfaces[id.as_usize()] = interface; - Ok(()) - } - - /// Replaces the definition of object type, but keeps the same id. - /// Existing references to the old type now reference the replacement. - pub fn replace_object(&mut self, id: ObjectID, object: Object) -> DiagnosticsResult<()> { - if id.as_usize() >= self.objects.len() { - return todo_add_location(SchemaError::UnknownTypeID( - id.as_usize(), - String::from("Object"), - )); - } - self.type_map.remove(&self.get_type_name(Type::Object(id))); - self.type_map.insert(object.name.item.0, Type::Object(id)); - self.objects[id.as_usize()] = object; - Ok(()) - } - - /// Replaces the definition of enum type, but keeps the same id. - /// Existing references to the old type now reference the replacement. - pub fn replace_enum(&mut self, id: EnumID, enum_: Enum) -> DiagnosticsResult<()> { - if id.as_usize() >= self.enums.len() { - return todo_add_location(SchemaError::UnknownTypeID( - id.as_usize(), - String::from("Enum"), - )); - } - self.type_map.remove(&self.get_type_name(Type::Enum(id))); - self.type_map.insert(enum_.name.item.0, Type::Enum(id)); - self.enums[id.as_usize()] = enum_; - Ok(()) - } - - /// Replaces the definition of input object type, but keeps the same id. - /// Existing references to the old type now reference the replacement. - pub fn replace_input_object( - &mut self, - id: InputObjectID, - input_object: InputObject, - ) -> DiagnosticsResult<()> { - if id.as_usize() >= self.enums.len() { - return todo_add_location(SchemaError::UnknownTypeID( - id.as_usize(), - String::from("Input Object"), - )); - } - self.type_map - .remove(&self.get_type_name(Type::InputObject(id))); - self.type_map - .insert(input_object.name.item.0, Type::InputObject(id)); - self.input_objects[id.as_usize()] = input_object; - Ok(()) - } - - /// Replaces the definition of union type, but keeps the same id. - /// Existing references to the old type now reference the replacement. - pub fn replace_union(&mut self, id: UnionID, union: Union) -> DiagnosticsResult<()> { - if id.as_usize() >= self.enums.len() { - return todo_add_location(SchemaError::UnknownTypeID( - id.as_usize(), - String::from("Union"), - )); - } - self.type_map.remove(&self.get_type_name(Type::Union(id))); - self.type_map.insert(union.name.item, Type::Union(id)); - self.unions[id.as_usize()] = union; - Ok(()) - } - - /// Replaces the definition of field, but keeps the same id. - /// Existing references to the old field now reference the replacement. - pub fn replace_field(&mut self, id: FieldID, field: Field) -> DiagnosticsResult<()> { - let id = id.as_usize(); - if id >= self.fields.len() { - return Err(vec![Diagnostic::error( - SchemaError::UnknownTypeID(id, String::from("Field")), - field.name.location, - )]); - } - self.fields[id] = field; - Ok(()) - } - - /// Creates an uninitialized, invalid schema which can then be added to using the add_* - /// methods. Note that we still bake in some assumptions about the clientid and typename - /// fields, but in practice this is not an issue. - pub fn create_uninitialized() -> InMemorySchema { - InMemorySchema { - query_type: None, - mutation_type: None, - subscription_type: None, - type_map: HashMap::new(), - clientid_field: FieldID(0), - strongid_field: FieldID(0), - typename_field: FieldID(0), - fetch_token_field: FieldID(0), - is_fulfilled_field: FieldID(0), - clientid_field_name: "__id".intern(), - strongid_field_name: "strong_id__".intern(), - typename_field_name: "__typename".intern(), - fetch_token_field_name: "__token".intern(), - is_fulfilled_field_name: "is_fulfilled__".intern(), - string_type: None, - id_type: None, - unchecked_argument_type_sentinel: None, - directives: HashMap::new(), - enums: Vec::new(), - fields: Vec::new(), - input_objects: Vec::new(), - interfaces: Vec::new(), - objects: Vec::new(), - scalars: Vec::new(), - unions: Vec::new(), - } - } - - pub fn build( - schema_documents: &[SchemaDocument], - client_schema_documents: &[SchemaDocument], - ) -> DiagnosticsResult { - let schema_definitions: Vec<&TypeSystemDefinition> = schema_documents - .iter() - .flat_map(|document| &document.definitions) - .collect(); - - let client_definitions: Vec<&TypeSystemDefinition> = client_schema_documents - .iter() - .flat_map(|document| &document.definitions) - .collect(); - - // Step 1: build the type_map from type names to type keys - let mut type_map = - HashMap::with_capacity(schema_definitions.len() + client_definitions.len()); - let mut next_object_id = 0; - let mut next_interface_id = 0; - let mut next_union_id = 0; - let mut next_input_object_id = 0; - let mut next_enum_id = 0; - let mut next_scalar_id = 0; - let mut field_count = 0; - let mut directive_count = 0; - - for definition in schema_definitions.iter().chain(&client_definitions) { - match definition { - TypeSystemDefinition::SchemaDefinition { .. } => {} - TypeSystemDefinition::DirectiveDefinition { .. } => { - directive_count += 1; - } - TypeSystemDefinition::ObjectTypeDefinition(ObjectTypeDefinition { - name, - fields, - .. - }) => { - type_map.insert(name.value, Type::Object(ObjectID(next_object_id))); - field_count += len_of_option_list(fields); - next_object_id += 1; - } - TypeSystemDefinition::InterfaceTypeDefinition(InterfaceTypeDefinition { - name, - fields, - .. - }) => { - type_map.insert(name.value, Type::Interface(InterfaceID(next_interface_id))); - field_count += len_of_option_list(fields); - next_interface_id += 1; - } - TypeSystemDefinition::UnionTypeDefinition(UnionTypeDefinition { name, .. }) => { - type_map.insert(name.value, Type::Union(UnionID(next_union_id))); - next_union_id += 1; - } - TypeSystemDefinition::InputObjectTypeDefinition(InputObjectTypeDefinition { - name, - .. - }) => { - type_map.insert( - name.value, - Type::InputObject(InputObjectID(next_input_object_id)), - ); - next_input_object_id += 1; - } - TypeSystemDefinition::EnumTypeDefinition(EnumTypeDefinition { name, .. }) => { - type_map.insert(name.value, Type::Enum(EnumID(next_enum_id))); - next_enum_id += 1; - } - TypeSystemDefinition::ScalarTypeDefinition(ScalarTypeDefinition { - name, .. - }) => { - type_map.insert(name.value, Type::Scalar(ScalarID(next_scalar_id))); - next_scalar_id += 1; - } - TypeSystemDefinition::ObjectTypeExtension { .. } => {} - TypeSystemDefinition::InterfaceTypeExtension { .. } => {} - TypeSystemDefinition::SchemaExtension { .. } => todo!("SchemaExtension"), - TypeSystemDefinition::EnumTypeExtension { .. } => todo!("EnumTypeExtension"), - TypeSystemDefinition::UnionTypeExtension { .. } => todo!("UnionTypeExtension"), - TypeSystemDefinition::InputObjectTypeExtension { .. } => { - todo!("InputObjectTypeExtension") - } - TypeSystemDefinition::ScalarTypeExtension { .. } => todo!("ScalarTypeExtension"), - } - } - - // Step 2: define operation types, directives, and types - let string_type = type_map - .get(&"String".intern()) - .expect("Missing String type") - .get_scalar_id() - .expect("Expected ID to be a Scalar"); - let id_type = type_map - .get(&"ID".intern()) - .expect("Missing ID type") - .get_scalar_id() - .expect("Expected ID to be a Scalar"); - - let unchecked_argument_type_sentinel = Some(TypeReference::Named( - *type_map - .get(&"Boolean".intern()) - .expect("Missing Boolean type"), - )); - - let mut schema = InMemorySchema { - query_type: None, - mutation_type: None, - subscription_type: None, - type_map, - clientid_field: FieldID(0), // dummy value, overwritten later - strongid_field: FieldID(0), // dummy value, overwritten later - typename_field: FieldID(0), // dummy value, overwritten later - fetch_token_field: FieldID(0), // dummy value, overwritten later - is_fulfilled_field: FieldID(0), // dummy value, overwritten later - clientid_field_name: "__id".intern(), - strongid_field_name: "strong_id__".intern(), - typename_field_name: "__typename".intern(), - fetch_token_field_name: "__token".intern(), - is_fulfilled_field_name: "is_fulfilled__".intern(), - string_type: Some(string_type), - id_type: Some(id_type), - unchecked_argument_type_sentinel, - directives: HashMap::with_capacity(directive_count), - enums: Vec::with_capacity(next_enum_id.try_into().unwrap()), - fields: Vec::with_capacity(field_count), - input_objects: Vec::with_capacity(next_input_object_id.try_into().unwrap()), - interfaces: Vec::with_capacity(next_interface_id.try_into().unwrap()), - objects: Vec::with_capacity(next_object_id.try_into().unwrap()), - scalars: Vec::with_capacity(next_scalar_id.try_into().unwrap()), - unions: Vec::with_capacity(next_union_id.try_into().unwrap()), - }; - - for document in schema_documents { - for definition in &document.definitions { - schema.add_definition(definition, &document.location.source_location(), false)?; - } - } - - for document in client_schema_documents { - for definition in &document.definitions { - schema.add_definition(definition, &document.location.source_location(), true)?; - } - } - - for document in schema_documents.iter().chain(client_schema_documents) { - for definition in &document.definitions { - if let TypeSystemDefinition::ObjectTypeDefinition(ObjectTypeDefinition { - name, - interfaces, - .. - }) = definition - { - let object_id = match schema.type_map.get(&name.value) { - Some(Type::Object(id)) => id, - _ => unreachable!("Must be an Object type"), - }; - for interface in interfaces { - let type_ = schema.type_map.get(&interface.value).unwrap(); - match type_ { - Type::Interface(id) => { - let interface = schema.interfaces.get_mut(id.as_usize()).unwrap(); - interface.implementing_objects.push(*object_id) - } - _ => unreachable!("Must be an interface"), - } - } - } - - if let TypeSystemDefinition::InterfaceTypeDefinition(InterfaceTypeDefinition { - name, - interfaces, - .. - }) = definition - { - let child_interface_id = match schema.type_map.get(&name.value) { - Some(Type::Interface(id)) => id, - _ => unreachable!("Must be an Interface type"), - }; - for interface in interfaces { - let type_ = schema.type_map.get(&interface.value).unwrap(); - match type_ { - Type::Interface(id) => { - let interface = schema.interfaces.get_mut(id.as_usize()).unwrap(); - interface.implementing_interfaces.push(*child_interface_id) - } - _ => unreachable!("Must be an interface"), - } - } - } - } - } - schema.load_defaults(); - - Ok(schema) - } - - fn load_defaults(&mut self) { - self.load_default_root_types(); - self.load_default_typename_field(); - self.load_default_fetch_token_field(); - self.load_default_clientid_field(); - self.load_default_strongid_field(); - self.load_default_is_fulfilled_field(); - } - - // In case the schema doesn't define a query, mutation or subscription - // type, but there is a Query, Mutation, or Subscription object type - // defined, default to those. - // This is not standard GraphQL behavior, and we might want to remove - // this at some point. - fn load_default_root_types(&mut self) { - if self.query_type.is_none() { - if let Some(Type::Object(id)) = self.type_map.get(&"Query".intern()) { - self.query_type = Some(*id); - } - } - if self.mutation_type.is_none() { - if let Some(Type::Object(id)) = self.type_map.get(&"Mutation".intern()) { - self.mutation_type = Some(*id); - } - } - if self.subscription_type.is_none() { - if let Some(Type::Object(id)) = self.type_map.get(&"Subscription".intern()) { - self.subscription_type = Some(*id); - } - } - } - - fn load_default_typename_field(&mut self) { - let string_type = *self - .type_map - .get(&"String".intern()) - .expect("Missing String type"); - let typename_field_id = self.fields.len(); - self.typename_field = FieldID(typename_field_id.try_into().unwrap()); - self.fields.push(Field { - name: WithLocation::generated(self.typename_field_name), - is_extension: false, - arguments: ArgumentDefinitions::new(Default::default()), - type_: TypeReference::NonNull(Box::new(TypeReference::Named(string_type))), - directives: Vec::new(), - parent_type: None, - description: Some(*TYPENAME_DESCRIPTION), - }); - } - - fn load_default_fetch_token_field(&mut self) { - let id_type = *self.type_map.get(&"ID".intern()).expect("Missing ID type"); - let fetch_token_field_id = self.fields.len(); - self.fetch_token_field = FieldID(fetch_token_field_id.try_into().unwrap()); - self.fields.push(Field { - name: WithLocation::generated(self.fetch_token_field_name), - is_extension: false, - arguments: ArgumentDefinitions::new(Default::default()), - type_: TypeReference::NonNull(Box::new(TypeReference::Named(id_type))), - directives: Vec::new(), - parent_type: None, - description: None, - }); - } - - fn load_default_clientid_field(&mut self) { - let id_type = *self.type_map.get(&"ID".intern()).expect("Missing ID type"); - let clientid_field_id = self.fields.len(); - self.clientid_field = FieldID(clientid_field_id.try_into().unwrap()); - self.fields.push(Field { - name: WithLocation::generated(self.clientid_field_name), - is_extension: true, - arguments: ArgumentDefinitions::new(Default::default()), - type_: TypeReference::NonNull(Box::new(TypeReference::Named(id_type))), - directives: Vec::new(), - parent_type: None, - description: Some(*CLIENT_ID_DESCRIPTION), - }); - } - - fn load_default_strongid_field(&mut self) { - let id_type = *self.type_map.get(&"ID".intern()).expect("Missing ID type"); - let strongid_field_id = self.fields.len(); - self.strongid_field = FieldID(strongid_field_id.try_into().unwrap()); - self.fields.push(Field { - name: WithLocation::generated(self.strongid_field_name), - is_extension: true, - arguments: ArgumentDefinitions::new(Default::default()), - type_: TypeReference::Named(id_type), - directives: Vec::new(), - parent_type: None, - description: None, - }); - } - - fn load_default_is_fulfilled_field(&mut self) { - let string_type = *self - .type_map - .get(&"String".intern()) - .expect("Missing String type"); - let is_fulfilled_field_id = self.fields.len(); - self.is_fulfilled_field = FieldID(is_fulfilled_field_id.try_into().unwrap()); - self.fields.push(Field { - name: WithLocation::generated(self.is_fulfilled_field_name), - is_extension: true, - arguments: ArgumentDefinitions::new(vec![Argument { - name: ArgumentName("name".intern()), - type_: TypeReference::NonNull(Box::new(TypeReference::Named(string_type))), - default_value: None, - description: None, - directives: Default::default(), - }]), - type_: TypeReference::NonNull(Box::new(TypeReference::Named(string_type))), - directives: Vec::new(), - parent_type: None, - description: None, - }); - } - - /// Add additional object extensions to the schema after its initial - /// creation. - pub fn add_object_type_extension( - &mut self, - object_extension: ObjectTypeExtension, - location_key: SourceLocationKey, - ) -> DiagnosticsResult<()> { - self.add_definition( - &TypeSystemDefinition::ObjectTypeExtension(object_extension), - &location_key, - true, - ) - } - - /// Add additional interface extensions to the schema after its initial - /// creation. - pub fn add_interface_type_extension( - &mut self, - interface_extension: InterfaceTypeExtension, - location_key: SourceLocationKey, - ) -> DiagnosticsResult<()> { - self.add_definition( - &TypeSystemDefinition::InterfaceTypeExtension(interface_extension), - &location_key, - true, - ) - } - - /// Add additional client-only (extension) scalar - pub fn add_extension_scalar( - &mut self, - scalar: ScalarTypeDefinition, - location_key: SourceLocationKey, - ) -> DiagnosticsResult<()> { - let scalar_name = scalar.name.name_with_location(location_key); - - if self.type_map.contains_key(&scalar_name.item) { - return Err(vec![Diagnostic::error( - SchemaError::DuplicateType(scalar_name.item), - scalar_name.location, - )]); - } - - let scalar_id = Type::Scalar(ScalarID(self.scalars.len() as u32)); - self.type_map.insert(scalar_name.item, scalar_id); - self.add_definition( - &TypeSystemDefinition::ScalarTypeDefinition(scalar), - &location_key, - true, - )?; - - Ok(()) - } - - /// Add additional client-only (extension) object - pub fn add_extension_object( - &mut self, - object: ObjectTypeDefinition, - location_key: SourceLocationKey, - ) -> DiagnosticsResult<()> { - let object_name = object.name.name_with_location(location_key); - - if self.type_map.contains_key(&object_name.item) { - return Err(vec![Diagnostic::error( - SchemaError::DuplicateType(object_name.item), - object_name.location, - )]); - } - - let object_id = Type::Object(ObjectID(self.objects.len() as u32)); - self.type_map.insert(object_name.item, object_id); - self.add_definition( - &TypeSystemDefinition::ObjectTypeDefinition(object), - &location_key, - true, - )?; - - Ok(()) - } - - fn add_definition( - &mut self, - definition: &TypeSystemDefinition, - location_key: &SourceLocationKey, - is_extension: bool, - ) -> DiagnosticsResult<()> { - match definition { - TypeSystemDefinition::SchemaDefinition(SchemaDefinition { - operation_types, - directives: _directives, - }) => { - for OperationTypeDefinition { operation, type_ } in &operation_types.items { - let operation_id = self.build_object_id(type_.value)?; - match operation { - OperationType::Query => { - if let Some(prev_query_type) = self.query_type { - return Err(vec![Diagnostic::error( - SchemaError::DuplicateOperationDefinition( - *operation, - type_.value, - expect_object_type_name(&self.type_map, prev_query_type), - ), - Location::new(*location_key, type_.span), - )]); - } else { - self.query_type = Some(operation_id); - } - } - OperationType::Mutation => { - if let Some(prev_mutation_type) = self.mutation_type { - return Err(vec![Diagnostic::error( - SchemaError::DuplicateOperationDefinition( - *operation, - type_.value, - expect_object_type_name(&self.type_map, prev_mutation_type), - ), - Location::new(*location_key, type_.span), - )]); - } else { - self.mutation_type = Some(operation_id); - } - } - OperationType::Subscription => { - if let Some(prev_subscription_type) = self.subscription_type { - return Err(vec![Diagnostic::error( - SchemaError::DuplicateOperationDefinition( - *operation, - type_.value, - expect_object_type_name( - &self.type_map, - prev_subscription_type, - ), - ), - Location::new(*location_key, type_.span), - )]); - } else { - self.subscription_type = Some(operation_id); - } - } - } - } - } - TypeSystemDefinition::DirectiveDefinition(DirectiveDefinition { - name, - arguments, - repeatable, - locations, - description, - }) => { - if self.directives.contains_key(&DirectiveName(name.value)) { - let str_name = name.value.lookup(); - if str_name != "skip" && str_name != "include" { - // TODO(T63941319) @skip and @include directives are duplicated in our schema - return Err(vec![Diagnostic::error( - SchemaError::DuplicateDirectiveDefinition(name.value), - Location::new(*location_key, name.span), - )]); - } - } - let arguments = self.build_arguments(arguments)?; - self.directives.insert( - DirectiveName(name.value), - Directive { - name: DirectiveName(name.value), - arguments, - locations: locations.clone(), - repeatable: *repeatable, - is_extension, - description: description.as_ref().map(|node| node.value), - }, - ); - } - TypeSystemDefinition::ObjectTypeDefinition(ObjectTypeDefinition { - name, - interfaces, - fields, - directives, - }) => { - let parent_id = Type::Object(ObjectID(self.objects.len() as u32)); - let fields = if is_extension { - self.build_extend_fields( - fields, - &mut HashMap::with_capacity(len_of_option_list(fields)), - *location_key, - Some(parent_id), - )? - } else { - self.build_fields(fields, *location_key, Some(parent_id))? - }; - let interfaces = interfaces - .iter() - .map(|name| self.build_interface_id(name, location_key)) - .collect::>>()?; - let directives = self.build_directive_values(directives); - self.objects.push(Object { - name: WithLocation::new( - Location::new(*location_key, name.span), - ObjectName(name.value), - ), - fields, - is_extension, - interfaces, - directives, - description: None, - }); - } - TypeSystemDefinition::InterfaceTypeDefinition(InterfaceTypeDefinition { - name, - interfaces, - directives, - fields, - }) => { - let parent_id = Type::Interface(InterfaceID(self.interfaces.len() as u32)); - let fields = if is_extension { - self.build_extend_fields( - fields, - &mut HashMap::with_capacity(len_of_option_list(fields)), - *location_key, - Some(parent_id), - )? - } else { - self.build_fields(fields, *location_key, Some(parent_id))? - }; - let interfaces = interfaces - .iter() - .map(|name| self.build_interface_id(name, location_key)) - .collect::>>()?; - let directives = self.build_directive_values(directives); - self.interfaces.push(Interface { - name: WithLocation::new( - Location::new(*location_key, name.span), - InterfaceName(name.value), - ), - implementing_interfaces: vec![], - implementing_objects: vec![], - is_extension, - fields, - directives, - interfaces, - description: None, - }); - } - TypeSystemDefinition::UnionTypeDefinition(UnionTypeDefinition { - name, - directives, - members, - }) => { - let members = members - .iter() - .map(|name| self.build_object_id(name.value)) - .collect::>>()?; - let directives = self.build_directive_values(directives); - self.unions.push(Union { - name: WithLocation::new(Location::new(*location_key, name.span), name.value), - is_extension, - members, - directives, - description: None, - }); - } - TypeSystemDefinition::InputObjectTypeDefinition(InputObjectTypeDefinition { - name, - fields, - directives, - }) => { - let fields = self.build_arguments(fields)?; - let directives = self.build_directive_values(directives); - self.input_objects.push(InputObject { - name: WithLocation::new( - Location::new(*location_key, name.span), - InputObjectName(name.value), - ), - - fields, - directives, - description: None, - }); - } - TypeSystemDefinition::EnumTypeDefinition(EnumTypeDefinition { - name, - directives, - values, - }) => { - let directives = self.build_directive_values(directives); - let values = if let Some(values) = values { - values - .items - .iter() - .map(|enum_def| EnumValue { - value: enum_def.name.value, - directives: self.build_directive_values(&enum_def.directives), - }) - .collect() - } else { - Vec::new() - }; - self.enums.push(Enum { - name: WithLocation::new( - Location::new(*location_key, name.span), - EnumName(name.value), - ), - is_extension, - values, - directives, - description: None, - }); - } - TypeSystemDefinition::ScalarTypeDefinition(ScalarTypeDefinition { - name, - directives, - }) => { - let directives = self.build_directive_values(directives); - self.scalars.push(Scalar { - name: WithLocation::new( - Location::new(*location_key, name.span), - ScalarName(name.value), - ), - is_extension, - directives, - description: None, - }) - } - TypeSystemDefinition::ObjectTypeExtension(ObjectTypeExtension { - name, - interfaces, - fields, - directives, - }) => match self.type_map.get(&name.value).cloned() { - Some(Type::Object(id)) => { - let index = id.as_usize(); - let obj = self.objects.get(index).ok_or_else(|| { - vec![Diagnostic::error( - SchemaError::ExtendUndefinedType(name.value), - Location::new(*location_key, name.span), - )] - })?; - - let field_ids = &obj.fields; - let mut existing_fields = - HashMap::with_capacity(field_ids.len() + len_of_option_list(fields)); - for field_id in field_ids { - let field_name = self.fields[field_id.as_usize()].name; - existing_fields.insert(field_name.item, field_name.location); - } - let client_fields = self.build_extend_fields( - fields, - &mut existing_fields, - *location_key, - Some(Type::Object(id)), - )?; - - self.objects[index].fields.extend(client_fields); - - let built_interfaces = interfaces - .iter() - .map(|name| self.build_interface_id(name, location_key)) - .collect::>>()?; - extend_without_duplicates( - &mut self.objects[index].interfaces, - built_interfaces, - ); - - let built_directives = self.build_directive_values(directives); - extend_without_duplicates( - &mut self.objects[index].directives, - built_directives, - ); - } - _ => { - return Err(vec![Diagnostic::error( - SchemaError::ExtendUndefinedType(name.value), - Location::new(*location_key, name.span), - )]); - } - }, - TypeSystemDefinition::InterfaceTypeExtension(InterfaceTypeExtension { - name, - fields, - directives, - .. - }) => match self.type_map.get(&name.value).cloned() { - Some(Type::Interface(id)) => { - let index = id.as_usize(); - let interface = self.interfaces.get(index).ok_or_else(|| { - vec![Diagnostic::error( - SchemaError::ExtendUndefinedType(name.value), - Location::new(*location_key, name.span), - )] - })?; - let field_ids = &interface.fields; - let mut existing_fields = - HashMap::with_capacity(field_ids.len() + len_of_option_list(fields)); - for field_id in field_ids { - let field_name = self.fields[field_id.as_usize()].name; - existing_fields.insert(field_name.item, field_name.location); - } - let client_fields = self.build_extend_fields( - fields, - &mut existing_fields, - *location_key, - Some(Type::Interface(id)), - )?; - self.interfaces[index].fields.extend(client_fields); - - let built_directives = self.build_directive_values(directives); - extend_without_duplicates( - &mut self.interfaces[index].directives, - built_directives, - ); - } - _ => { - return Err(vec![Diagnostic::error( - SchemaError::ExtendUndefinedType(name.value), - Location::new(*location_key, name.span), - )]); - } - }, - TypeSystemDefinition::SchemaExtension { .. } => todo!("SchemaExtension"), - TypeSystemDefinition::EnumTypeExtension { .. } => todo!("EnumTypeExtension"), - TypeSystemDefinition::UnionTypeExtension { .. } => todo!("UnionTypeExtension"), - TypeSystemDefinition::InputObjectTypeExtension { .. } => { - todo!("InputObjectTypeExtension") - } - TypeSystemDefinition::ScalarTypeExtension { .. } => todo!("ScalarTypeExtension"), - } - Ok(()) - } - - fn build_object_id(&mut self, name: StringKey) -> DiagnosticsResult { - match self.type_map.get(&name) { - Some(Type::Object(id)) => Ok(*id), - Some(non_object_type) => { - todo_add_location(SchemaError::ExpectedObjectReference(name, *non_object_type)) - } - None => todo_add_location(SchemaError::UndefinedType(name)), - } - } - - fn build_interface_id( - &mut self, - name: &Identifier, - location_key: &SourceLocationKey, - ) -> DiagnosticsResult { - match self.type_map.get(&name.value) { - Some(Type::Interface(id)) => Ok(*id), - Some(non_interface_type) => Err(vec![Diagnostic::error( - SchemaError::ExpectedInterfaceReference(name.value, *non_interface_type), - Location::new(*location_key, name.span), - )]), - None => Err(vec![Diagnostic::error( - SchemaError::UndefinedType(name.value), - Location::new(*location_key, name.span), - )]), - } - } - - fn build_field(&mut self, field: Field) -> FieldID { - let field_index = self.fields.len().try_into().unwrap(); - self.fields.push(field); - FieldID(field_index) - } - - fn build_fields( - &mut self, - field_defs: &Option>, - field_location_key: SourceLocationKey, - parent_type: Option, - ) -> DiagnosticsResult> { - if let Some(field_defs) = field_defs { - field_defs - .items - .iter() - .map(|field_def| { - let arguments = self.build_arguments(&field_def.arguments)?; - let type_ = self.build_type_reference(&field_def.type_, field_location_key)?; - let directives = self.build_directive_values(&field_def.directives); - let description = field_def.description.as_ref().map(|desc| desc.value); - Ok(self.build_field(Field { - name: WithLocation::new( - Location::new(field_location_key, field_def.name.span), - field_def.name.value, - ), - is_extension: false, - arguments, - type_, - directives, - parent_type, - description, - })) - }) - .collect() - } else { - Ok(Vec::new()) - } - } - - fn build_extend_fields( - &mut self, - field_defs: &Option>, - existing_fields: &mut HashMap, - source_location_key: SourceLocationKey, - parent_type: Option, - ) -> DiagnosticsResult> { - if let Some(field_defs) = field_defs { - let mut field_ids: Vec = Vec::with_capacity(field_defs.items.len()); - for field_def in &field_defs.items { - let field_name = field_def.name.value; - let field_location = Location::new(source_location_key, field_def.name.span); - if let Some(prev_location) = existing_fields.insert(field_name, field_location) { - return Err(vec![ - Diagnostic::error(SchemaError::DuplicateField(field_name), field_location) - .annotate("previously defined here", prev_location), - ]); - } - let arguments = self.build_arguments(&field_def.arguments)?; - let directives = self.build_directive_values(&field_def.directives); - let type_ = self.build_type_reference(&field_def.type_, source_location_key)?; - let description = field_def.description.as_ref().map(|desc| desc.value); - field_ids.push(self.build_field(Field { - name: WithLocation::new(field_location, field_name), - is_extension: true, - arguments, - type_, - directives, - parent_type, - description, - })); - } - Ok(field_ids) - } else { - Ok(Vec::new()) - } - } - - fn build_arguments( - &mut self, - arg_defs: &Option>, - ) -> DiagnosticsResult { - if let Some(arg_defs) = arg_defs { - let arg_defs: DiagnosticsResult> = arg_defs - .items - .iter() - .map(|arg_def| { - Ok(Argument { - name: ArgumentName(arg_def.name.value), - type_: self.build_input_object_reference(&arg_def.type_)?, - default_value: arg_def.default_value.clone(), - description: None, - directives: self.build_directive_values(&arg_def.directives), - }) - }) - .collect(); - Ok(ArgumentDefinitions(arg_defs?)) - } else { - Ok(ArgumentDefinitions(Vec::new())) - } - } - - fn build_input_object_reference( - &mut self, - ast_type: &TypeAnnotation, - ) -> DiagnosticsResult> { - Ok(match ast_type { - TypeAnnotation::Named(named_type) => { - let type_ = self.type_map.get(&named_type.name.value).ok_or_else(|| { - vec![Diagnostic::error( - SchemaError::UndefinedType(named_type.name.value), - Location::new(SourceLocationKey::generated(), named_type.name.span), - )] - })?; - if !(type_.is_enum() || type_.is_scalar() || type_.is_input_object()) { - return Err(vec![Diagnostic::error( - SchemaError::ExpectedInputType(named_type.name.value), - Location::new(SourceLocationKey::generated(), named_type.name.span), - )]); - } - - TypeReference::Named(*type_) - } - TypeAnnotation::NonNull(of_type) => { - TypeReference::NonNull(Box::new(self.build_input_object_reference(&of_type.type_)?)) - } - TypeAnnotation::List(of_type) => { - TypeReference::List(Box::new(self.build_input_object_reference(&of_type.type_)?)) - } - }) - } - - fn build_type_reference( - &mut self, - ast_type: &TypeAnnotation, - source_location: SourceLocationKey, - ) -> DiagnosticsResult> { - Ok(match ast_type { - TypeAnnotation::Named(named_type) => TypeReference::Named( - *self.type_map.get(&named_type.name.value).ok_or_else(|| { - vec![Diagnostic::error( - SchemaError::UndefinedType(named_type.name.value), - Location::new(source_location, named_type.name.span), - )] - })?, - ), - TypeAnnotation::NonNull(of_type) => TypeReference::NonNull(Box::new( - self.build_type_reference(&of_type.type_, source_location)?, - )), - TypeAnnotation::List(of_type) => TypeReference::List(Box::new( - self.build_type_reference(&of_type.type_, source_location)?, - )), - }) - } - - fn build_directive_values(&mut self, directives: &[ConstantDirective]) -> Vec { - directives - .iter() - .map(|directive| { - let arguments = if let Some(arguments) = &directive.arguments { - arguments - .items - .iter() - .map(|argument| ArgumentValue { - name: ArgumentName(argument.name.value), - value: argument.value.clone(), - }) - .collect() - } else { - Vec::new() - }; - DirectiveValue { - name: DirectiveName(directive.name.value), - arguments, - } - }) - .collect() - } -} - -/// Extends the `target` with `extensions` ignoring items that are already in -/// `target`. -fn extend_without_duplicates( - target: &mut Vec, - extensions: impl IntoIterator, -) { - for extension in extensions { - if !target.contains(&extension) { - target.push(extension); - } - } -} - -fn len_of_option_list(option_list: &Option>) -> usize { - option_list.as_ref().map_or(0, |list| list.items.len()) -} - -fn expect_object_type_name(type_map: &TypeMap, object_id: ObjectID) -> StringKey { - *type_map - .iter() - .find(|(_, type_)| match type_ { - Type::Object(id_) => id_ == &object_id, - _ => false, - }) - .expect("Missing object in type_map") - .0 -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_extend_without_duplicates() { - let mut target = vec![10, 11]; - extend_without_duplicates(&mut target, vec![1, 10, 100]); - assert_eq!(target, vec![10, 11, 1, 100]); - } -} diff --git a/compiler/crates/schema/src/lib.rs b/compiler/crates/schema/src/lib.rs index 5bbef3e43564e..ae8c64c7cc1c5 100644 --- a/compiler/crates/schema/src/lib.rs +++ b/compiler/crates/schema/src/lib.rs @@ -19,7 +19,10 @@ mod graphql_schema; mod in_memory; mod schema; pub mod suggestion_list; +use std::borrow::Cow; +use common::sync::IntoParallelIterator; +use common::sync::ParallelIterator; use common::DiagnosticsResult; use common::SourceLocationKey; pub use definitions::Argument; @@ -54,6 +57,7 @@ pub use graphql_syntax::DirectiveLocation; use graphql_syntax::SchemaDocument; pub use graphql_syntax::TypeSystemDefinition; pub use in_memory::InMemorySchema; +use rayon::iter::IntoParallelRefIterator; pub use crate::schema::SDLSchema; @@ -65,16 +69,32 @@ pub fn build_schema(sdl: &str) -> DiagnosticsResult { build_schema_with_extensions::<_, &str>(&[(sdl, SourceLocationKey::generated())], &[]) } -pub fn build_schema_with_extensions, U: AsRef>( +pub struct SchemaDocuments { + pub server: Vec, + pub extensions: Vec, +} + +pub fn build_schema_with_extensions< + T: AsRef + std::marker::Sync, + U: AsRef + std::marker::Sync, +>( server_sdls: &[(T, SourceLocationKey)], extension_sdls: &[(U, SourceLocationKey)], ) -> DiagnosticsResult { - let mut server_documents = vec![builtins()?]; + let SchemaDocuments { server, extensions } = + parse_schema_with_extensions(server_sdls, extension_sdls)?; + SDLSchema::build(&server, &extensions) +} - let server_schema_document = match server_sdls { - [(sdl, source_location)] => { - graphql_syntax::parse_schema_document(sdl.as_ref(), *source_location)? - } +pub fn parse_schema_with_extensions< + T: AsRef + std::marker::Sync, + U: AsRef + std::marker::Sync, +>( + server_sdls: &[(T, SourceLocationKey)], + extension_sdls: &[(U, SourceLocationKey)], +) -> DiagnosticsResult { + let merged_server_sdls = match server_sdls { + [(sdl, location)] => vec![(Cow::Borrowed(sdl.as_ref()), *location)], _ => { // When the schema is split across multiple files, the individual // files may not be syntactically complete, so we join them together @@ -82,26 +102,57 @@ pub fn build_schema_with_extensions, U: AsRef>( // Note that this requires us to use a generates source location key which // means click to definition for schema files will not work. - let mut combined_sdl: String = String::new(); - for (sdl, _) in server_sdls { - combined_sdl.push_str(sdl.as_ref()); - combined_sdl.push('\n'); + let mut chunks = vec![]; + let mut buffer = String::new(); + for (sdl, source_location) in server_sdls { + // Accumulate the document until it ends with a `}` to form + // a valid schema document + if ends_with_right_curly_brace(sdl) { + if buffer.is_empty() { + chunks.push((Cow::Borrowed(sdl.as_ref()), *source_location)); + } else { + buffer.push_str(sdl.as_ref()); + chunks.push(( + Cow::Owned(std::mem::take(&mut buffer)), + SourceLocationKey::Generated, + )); + } + } else { + buffer.push_str(sdl.as_ref()); + buffer.push('\n'); + } } - graphql_syntax::parse_schema_document(&combined_sdl, SourceLocationKey::Generated)? + assert!(buffer.is_empty()); + chunks } }; - - server_documents.push(server_schema_document); - - let mut client_schema_documents = Vec::new(); - for (extension_sdl, location_key) in extension_sdls { - client_schema_documents.push(graphql_syntax::parse_schema_document( - extension_sdl.as_ref(), - *location_key, - )?); - } - - SDLSchema::build(&server_documents, &client_schema_documents) + let result = rayon::join( + || { + merged_server_sdls + .into_par_iter() + .map(|(sdl, source_location)| { + graphql_syntax::parse_schema_document(sdl.as_ref(), source_location) + }) + .collect::>>() + }, + || { + extension_sdls + .par_iter() + .map(|(extension_sdl, location_key)| { + graphql_syntax::parse_schema_document(extension_sdl.as_ref(), *location_key) + }) + .collect::>>() + }, + ); + + let mut server_documents: Vec = vec![builtins()?]; + server_documents.extend(result.0?); + let client_schema_documents = result.1?; + + Ok(SchemaDocuments { + server: server_documents, + extensions: client_schema_documents, + }) } pub fn build_schema_with_flat_buffer(bytes: Vec) -> SDLSchema { @@ -115,3 +166,15 @@ pub fn build_schema_from_flat_buffer(bytes: &[u8]) -> DiagnosticsResult DiagnosticsResult { graphql_syntax::parse_schema_document(BUILTINS, SourceLocationKey::generated()) } + +fn ends_with_right_curly_brace>(text: T) -> bool { + for char in text.as_ref().chars().rev() { + if char == '}' { + return true; + } + if !char.is_whitespace() { + return false; + } + } + false +} diff --git a/compiler/crates/schema/src/schema.rs b/compiler/crates/schema/src/schema.rs index a45add2af00fa..f1c01325718d6 100644 --- a/compiler/crates/schema/src/schema.rs +++ b/compiler/crates/schema/src/schema.rs @@ -10,6 +10,7 @@ use common::DirectiveName; use common::SourceLocationKey; use graphql_syntax::*; use intern::string_key::StringKey; +use rayon::iter::ParallelIterator; use crate::definitions::Directive; use crate::definitions::*; @@ -345,6 +346,13 @@ impl SDLSchema { } } + pub fn get_type_map_par_iter(&self) -> impl ParallelIterator { + match self { + SDLSchema::FlatBuffer(_schema) => todo!(), + SDLSchema::InMemory(schema) => schema.get_type_map_par_iter(), + } + } + pub fn get_directives(&self) -> impl Iterator { match self { SDLSchema::FlatBuffer(_schema) => todo!(), diff --git a/compiler/crates/schema/src/suggestion_list.rs b/compiler/crates/schema/src/suggestion_list.rs index 10d466f5cc4e0..685c26a519925 100644 --- a/compiler/crates/schema/src/suggestion_list.rs +++ b/compiler/crates/schema/src/suggestion_list.rs @@ -232,7 +232,7 @@ impl<'schema> GraphQLSuggestions<'schema> { .input_object(input_id) .fields .iter() - .map(|arg| arg.name.0) + .map(|arg| arg.name.item.0) .collect(), _ => vec![], }; diff --git a/compiler/crates/schema/tests/build_schema.rs b/compiler/crates/schema/tests/build_schema.rs new file mode 100644 index 0000000000000..09a5c17cd5596 --- /dev/null +++ b/compiler/crates/schema/tests/build_schema.rs @@ -0,0 +1,127 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +use std::collections::BTreeMap; + +use common::Diagnostic; +use common::SourceLocationKey; +use common::TextSource; +use fixture_tests::Fixture; +use graphql_cli::DiagnosticPrinter; +use schema::build_schema_from_flat_buffer; +use schema::build_schema_with_extensions; +use schema::serialize_as_flatbuffer; +use schema::SDLSchema; +use schema::Schema; +use schema::Type; + +const SCHEMA_SEPARATOR: &str = "%extensions%"; + +pub async fn transform_fixture(fixture: &Fixture<'_>) -> Result { + let parts: Vec<_> = fixture.content.split(SCHEMA_SEPARATOR).collect(); + let result = match parts.as_slice() { + [base] => build_schema_with_extensions::<_, &str>( + &[(base, SourceLocationKey::standalone(fixture.file_name))], + &[], + ), + [base, extensions] => { + // prepend a comment so the correct line + column number is reported for client extension + // (since we source base and client schemas from one file) + let nchars_base = base.chars().count() + SCHEMA_SEPARATOR.chars().count(); + assert!(nchars_base > 0); + let prepended_extension = format!("{}\n{}", "#".repeat(nchars_base - 1), extensions); + build_schema_with_extensions( + &[(base, SourceLocationKey::standalone(fixture.file_name))], + &[( + prepended_extension, + SourceLocationKey::standalone(fixture.file_name), + )], + ) + } + _ => panic!("Expected a single extension block"), + }; + + result + .map(print_schema_and_flat_buffer_schema) + .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) +} + +fn print_schema_and_flat_buffer_schema(schema: SDLSchema) -> String { + let schema = schema.unwrap_in_memory_impl(); + let bytes = serialize_as_flatbuffer(&schema); + let fb_schema = build_schema_from_flat_buffer(&bytes).unwrap(); + let mut objects = Vec::new(); + let mut interfaces = Vec::new(); + let mut unions = Vec::new(); + let mut scalars = Vec::new(); + let mut enums = Vec::new(); + let mut input_objects = Vec::new(); + let mut fields = Vec::new(); + let mut directives = Vec::new(); + + // Hydrate types + for (key, _value) in schema.get_type_map().collect::>() { + let type_ = fb_schema.get_type(*key).unwrap(); + // Hyderate fields + match type_ { + Type::Object(id) => { + let object = fb_schema.object(id); + for field_id in object.fields.clone() { + fields.push(fb_schema.field(field_id)); + } + objects.push(object); + } + Type::Interface(id) => { + let interface = fb_schema.interface(id); + for field_id in interface.fields.clone() { + fields.push(fb_schema.field(field_id)); + } + interfaces.push(interface); + } + Type::Union(id) => unions.push(fb_schema.union(id)), + Type::Scalar(id) => scalars.push(fb_schema.scalar(id)), + Type::Enum(id) => enums.push(fb_schema.enum_(id)), + Type::InputObject(id) => input_objects.push(fb_schema.input_object(id)), + }; + } + // Hydrate directives + let mut ordered_directives = schema.get_directives().collect::>(); + ordered_directives.sort_by_key(|directive| directive.name); + for directive in ordered_directives { + directives.push(fb_schema.get_directive(directive.name.item).unwrap()); + } + let fb_schema_snapshot = format!( + r#"FB Schema {{ +directives: {:#?} +enums: {:#?} +fields: {:#?} +input_objects: {:#?} +interfaces: {:#?} +objects: {:#?} +scalars: {:#?} +unions: {:#?} +}}"#, + directives, enums, fields, input_objects, interfaces, objects, scalars, unions, + ); + format!( + "Text Schema:{}\n\nFlatBuffer Schema:{}", + schema.snapshot_print(), + fb_schema_snapshot + ) +} + +// NOTE: copied from graphql-test-helpers to avoid cyclic dependency breaking Rust Analyzer +fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { + let printer = + DiagnosticPrinter::new(|_| Some(TextSource::from_whole_document(source.to_string()))); + let mut printed = diagnostics + .iter() + .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) + .collect::>(); + printed.sort(); + printed.join("\n\n") +} diff --git a/compiler/crates/schema/tests/build_schema/fixtures/directive-on-arg-def.expected b/compiler/crates/schema/tests/build_schema/fixtures/directive-on-arg-def.expected index a9300a7ce7014..821a2abbd92ed 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/directive-on-arg-def.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/directive-on-arg-def.expected @@ -21,14 +21,20 @@ Text Schema:Schema { subscription_type: None directives: [ Directive { - name: DirectiveName( - "include", - ), + name: WithLocation { + location: :255:262, + item: DirectiveName( + "include", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :263:265, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(3), @@ -47,16 +53,23 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "required", - ), + name: WithLocation { + location: directive-on-arg-def.graphql:62:70, + item: DirectiveName( + "required", + ), + }, arguments: [ Argument { - name: ArgumentName( - "action", - ), + name: WithLocation { + location: directive-on-arg-def.graphql:71:77, + item: ArgumentName( + "action", + ), + }, type_: NonNull( Named( Enum(0), @@ -80,16 +93,23 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "skip", - ), + name: WithLocation { + location: :334:338, + item: DirectiveName( + "skip", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :339:341, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(3), @@ -108,11 +128,15 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "static", - ), + name: WithLocation { + location: directive-on-arg-def.graphql:130:136, + item: DirectiveName( + "static", + ), + }, arguments: [], locations: [ ArgumentDefinition, @@ -120,6 +144,7 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, ] type_map: { @@ -134,7 +159,7 @@ Text Schema:Schema { enums: [ Enum { name: WithLocation { - location: :5:24, + location: directive-on-arg-def.graphql:5:24, item: EnumName( "RequiredFieldAction", ), @@ -156,12 +181,13 @@ Text Schema:Schema { ], directives: [], description: None, + hack_source: None, }, ] fields: [ Field { name: WithLocation { - location: :176:179, + location: directive-on-arg-def.graphql:176:179, item: "foo", }, is_extension: false, @@ -174,6 +200,7 @@ Text Schema:Schema { Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -192,6 +219,7 @@ Text Schema:Schema { description: Some( "This object's GraphQL type. Provided by GraphQL type name introspection.", ), + hack_source: None, }, Field { name: WithLocation { @@ -208,6 +236,7 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, Field { name: WithLocation { @@ -226,6 +255,7 @@ Text Schema:Schema { description: Some( "Relay's cache key for this object.", ), + hack_source: None, }, Field { name: WithLocation { @@ -240,6 +270,7 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, Field { name: WithLocation { @@ -249,9 +280,12 @@ Text Schema:Schema { is_extension: true, arguments: [ Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "name", + ), + }, type_: NonNull( Named( Scalar(2), @@ -270,6 +304,7 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, ] input_objects: [] @@ -277,7 +312,7 @@ Text Schema:Schema { objects: [ Object { name: WithLocation { - location: :166:171, + location: directive-on-arg-def.graphql:166:171, item: ObjectName( "Query", ), @@ -289,6 +324,7 @@ Text Schema:Schema { interfaces: [], directives: [], description: None, + hack_source: None, }, ] scalars: [ @@ -302,6 +338,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -313,6 +350,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -324,6 +362,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -335,6 +374,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -346,6 +386,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, ] unions: [] @@ -354,14 +395,20 @@ Text Schema:Schema { FlatBuffer Schema:FB Schema { directives: [ Directive { - name: DirectiveName( - "include", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "include", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(0), @@ -380,16 +427,23 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "required", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "required", + ), + }, arguments: [ Argument { - name: ArgumentName( - "action", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "action", + ), + }, type_: NonNull( Named( Enum(0), @@ -406,16 +460,23 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "skip", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "skip", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(0), @@ -434,11 +495,15 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "static", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "static", + ), + }, arguments: [], locations: [ ArgumentDefinition, @@ -446,6 +511,7 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, ] enums: [ @@ -473,6 +539,7 @@ enums: [ ], directives: [], description: None, + hack_source: None, }, ] fields: [ @@ -491,6 +558,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, ] input_objects: [] @@ -510,6 +578,7 @@ objects: [ interfaces: [], directives: [], description: None, + hack_source: None, }, ] scalars: [ @@ -523,6 +592,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -534,6 +604,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -545,6 +616,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -556,6 +628,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -567,6 +640,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, ] unions: [] diff --git a/compiler/crates/schema/tests/build_schema/fixtures/directives-for-external-types.expected b/compiler/crates/schema/tests/build_schema/fixtures/directives-for-external-types.expected index 85acd1375f868..ecd489c8f07d5 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/directives-for-external-types.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/directives-for-external-types.expected @@ -53,14 +53,20 @@ Text Schema:Schema { subscription_type: None directives: [ Directive { - name: DirectiveName( - "extern_type", - ), + name: WithLocation { + location: directives-for-external-types.graphql:179:190, + item: DirectiveName( + "extern_type", + ), + }, arguments: [ Argument { - name: ArgumentName( - "schema", - ), + name: WithLocation { + location: directives-for-external-types.graphql:191:197, + item: ArgumentName( + "schema", + ), + }, type_: Named( Scalar(2), ), @@ -69,9 +75,12 @@ Text Schema:Schema { directives: [], }, Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: directives-for-external-types.graphql:207:211, + item: ArgumentName( + "name", + ), + }, type_: Named( Scalar(2), ), @@ -86,16 +95,23 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "fetchable", - ), + name: WithLocation { + location: directives-for-external-types.graphql:245:254, + item: DirectiveName( + "fetchable", + ), + }, arguments: [ Argument { - name: ArgumentName( - "field_name", - ), + name: WithLocation { + location: directives-for-external-types.graphql:255:265, + item: ArgumentName( + "field_name", + ), + }, type_: Named( Scalar(2), ), @@ -110,16 +126,23 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "include", - ), + name: WithLocation { + location: :255:262, + item: DirectiveName( + "include", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :263:265, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(3), @@ -138,16 +161,23 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "ref_type", - ), + name: WithLocation { + location: directives-for-external-types.graphql:109:117, + item: DirectiveName( + "ref_type", + ), + }, arguments: [ Argument { - name: ArgumentName( - "schema", - ), + name: WithLocation { + location: directives-for-external-types.graphql:118:124, + item: ArgumentName( + "schema", + ), + }, type_: Named( Scalar(2), ), @@ -156,9 +186,12 @@ Text Schema:Schema { directives: [], }, Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: directives-for-external-types.graphql:134:138, + item: ArgumentName( + "name", + ), + }, type_: Named( Scalar(2), ), @@ -173,16 +206,23 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "skip", - ), + name: WithLocation { + location: :334:338, + item: DirectiveName( + "skip", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :339:341, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(3), @@ -201,16 +241,23 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "source", - ), + name: WithLocation { + location: directives-for-external-types.graphql:11:17, + item: DirectiveName( + "source", + ), + }, arguments: [ Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: directives-for-external-types.graphql:21:25, + item: ArgumentName( + "name", + ), + }, type_: NonNull( Named( Scalar(2), @@ -232,6 +279,7 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, ] type_map: { @@ -252,15 +300,18 @@ Text Schema:Schema { fields: [ Field { name: WithLocation { - location: :352:363, + location: directives-for-external-types.graphql:352:363, item: "fetch__User", }, is_extension: false, arguments: [ Argument { - name: ArgumentName( - "id", - ), + name: WithLocation { + location: directives-for-external-types.graphql:364:366, + item: ArgumentName( + "id", + ), + }, type_: Named( Scalar(5), ), @@ -277,18 +328,22 @@ Text Schema:Schema { Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :380:395, + location: directives-for-external-types.graphql:380:395, item: "fetch__XIGStory", }, is_extension: false, arguments: [ Argument { - name: ArgumentName( - "id", - ), + name: WithLocation { + location: directives-for-external-types.graphql:396:398, + item: ArgumentName( + "id", + ), + }, type_: Named( Scalar(5), ), @@ -305,10 +360,11 @@ Text Schema:Schema { Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :484:488, + location: directives-for-external-types.graphql:484:488, item: "name", }, is_extension: false, @@ -321,10 +377,11 @@ Text Schema:Schema { Interface(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :563:565, + location: directives-for-external-types.graphql:563:565, item: "id", }, is_extension: false, @@ -337,10 +394,11 @@ Text Schema:Schema { Object(1), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :572:577, + location: directives-for-external-types.graphql:572:577, item: "story", }, is_extension: false, @@ -389,18 +447,22 @@ Text Schema:Schema { Object(1), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :636:659, + location: directives-for-external-types.graphql:636:659, item: "default_mailing_address", }, is_extension: false, arguments: [ Argument { - name: ArgumentName( - "country", - ), + name: WithLocation { + location: directives-for-external-types.graphql:660:667, + item: ArgumentName( + "country", + ), + }, type_: Named( InputObject(0), ), @@ -417,10 +479,11 @@ Text Schema:Schema { Object(1), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :721:723, + location: directives-for-external-types.graphql:721:723, item: "id", }, is_extension: false, @@ -433,10 +496,11 @@ Text Schema:Schema { Object(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :730:740, + location: directives-for-external-types.graphql:730:740, item: "is_default", }, is_extension: false, @@ -449,10 +513,11 @@ Text Schema:Schema { Object(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :752:757, + location: directives-for-external-types.graphql:752:757, item: "label", }, is_extension: false, @@ -465,10 +530,11 @@ Text Schema:Schema { Object(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :962:964, + location: directives-for-external-types.graphql:962:964, item: "id", }, is_extension: false, @@ -481,10 +547,11 @@ Text Schema:Schema { Object(3), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :971:975, + location: directives-for-external-types.graphql:971:975, item: "name", }, is_extension: false, @@ -497,6 +564,7 @@ Text Schema:Schema { Object(3), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -515,6 +583,7 @@ Text Schema:Schema { description: Some( "This object's GraphQL type. Provided by GraphQL type name introspection.", ), + hack_source: None, }, Field { name: WithLocation { @@ -531,6 +600,7 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, Field { name: WithLocation { @@ -549,6 +619,7 @@ Text Schema:Schema { description: Some( "Relay's cache key for this object.", ), + hack_source: None, }, Field { name: WithLocation { @@ -563,6 +634,7 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, Field { name: WithLocation { @@ -572,9 +644,12 @@ Text Schema:Schema { is_extension: true, arguments: [ Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "name", + ), + }, type_: NonNull( Named( Scalar(2), @@ -593,21 +668,25 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, ] input_objects: [ InputObject { name: WithLocation { - location: :775:782, + location: directives-for-external-types.graphql:775:782, item: InputObjectName( "Country", ), }, fields: [ Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: directives-for-external-types.graphql:833:837, + item: ArgumentName( + "name", + ), + }, type_: NonNull( Named( Scalar(2), @@ -656,12 +735,13 @@ Text Schema:Schema { }, ], description: None, + hack_source: None, }, ] interfaces: [ Interface { name: WithLocation { - location: :427:435, + location: directives-for-external-types.graphql:427:435, item: InterfaceName( "XIGHuman", ), @@ -713,12 +793,13 @@ Text Schema:Schema { ], interfaces: [], description: None, + hack_source: None, }, ] objects: [ Object { name: WithLocation { - location: :342:347, + location: directives-for-external-types.graphql:342:347, item: ObjectName( "Query", ), @@ -731,10 +812,11 @@ Text Schema:Schema { interfaces: [], directives: [], description: None, + hack_source: None, }, Object { name: WithLocation { - location: :505:509, + location: directives-for-external-types.graphql:505:509, item: ObjectName( "User", ), @@ -772,10 +854,11 @@ Text Schema:Schema { }, ], description: None, + hack_source: None, }, Object { name: WithLocation { - location: :702:716, + location: directives-for-external-types.graphql:702:716, item: ObjectName( "MailingAddress", ), @@ -789,10 +872,11 @@ Text Schema:Schema { interfaces: [], directives: [], description: None, + hack_source: None, }, Object { name: WithLocation { - location: :872:880, + location: directives-for-external-types.graphql:872:880, item: ObjectName( "XIGStory", ), @@ -862,6 +946,7 @@ Text Schema:Schema { }, ], description: None, + hack_source: None, }, ] scalars: [ @@ -875,6 +960,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -886,6 +972,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -897,6 +984,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -908,6 +996,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -919,10 +1008,11 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { - location: :293:295, + location: directives-for-external-types.graphql:293:295, item: ScalarName( "ID", ), @@ -966,13 +1056,16 @@ Text Schema:Schema { }, ], description: None, + hack_source: None, }, ] unions: [ Union { name: WithLocation { - location: :993:1000, - item: "Address", + location: directives-for-external-types.graphql:993:1000, + item: UnionName( + "Address", + ), }, is_extension: false, members: [ @@ -1017,6 +1110,7 @@ Text Schema:Schema { }, ], description: None, + hack_source: None, }, ] } @@ -1024,14 +1118,20 @@ Text Schema:Schema { FlatBuffer Schema:FB Schema { directives: [ Directive { - name: DirectiveName( - "extern_type", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "extern_type", + ), + }, arguments: [ Argument { - name: ArgumentName( - "schema", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "schema", + ), + }, type_: Named( Scalar(2), ), @@ -1040,9 +1140,12 @@ directives: [ directives: [], }, Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "name", + ), + }, type_: Named( Scalar(2), ), @@ -1057,16 +1160,23 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "fetchable", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "fetchable", + ), + }, arguments: [ Argument { - name: ArgumentName( - "field_name", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "field_name", + ), + }, type_: Named( Scalar(2), ), @@ -1081,16 +1191,23 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "include", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "include", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(1), @@ -1109,16 +1226,23 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "ref_type", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "ref_type", + ), + }, arguments: [ Argument { - name: ArgumentName( - "schema", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "schema", + ), + }, type_: Named( Scalar(2), ), @@ -1127,9 +1251,12 @@ directives: [ directives: [], }, Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "name", + ), + }, type_: Named( Scalar(2), ), @@ -1144,16 +1271,23 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "skip", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "skip", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(1), @@ -1172,16 +1306,23 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "source", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "source", + ), + }, arguments: [ Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "name", + ), + }, type_: NonNull( Named( Scalar(2), @@ -1203,6 +1344,7 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, ] enums: [] @@ -1222,6 +1364,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1238,6 +1381,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1254,6 +1398,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1263,9 +1408,12 @@ fields: [ is_extension: false, arguments: [ Argument { - name: ArgumentName( - "id", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "id", + ), + }, type_: Named( Scalar(0), ), @@ -1282,6 +1430,7 @@ fields: [ Object(1), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1291,9 +1440,12 @@ fields: [ is_extension: false, arguments: [ Argument { - name: ArgumentName( - "id", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "id", + ), + }, type_: Named( Scalar(0), ), @@ -1310,6 +1462,7 @@ fields: [ Object(1), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1326,6 +1479,7 @@ fields: [ Object(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1378,6 +1532,7 @@ fields: [ Object(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1387,9 +1542,12 @@ fields: [ is_extension: false, arguments: [ Argument { - name: ArgumentName( - "country", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "country", + ), + }, type_: Named( InputObject(0), ), @@ -1406,6 +1564,7 @@ fields: [ Object(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1422,6 +1581,7 @@ fields: [ Interface(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1438,6 +1598,7 @@ fields: [ Object(3), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1454,6 +1615,7 @@ fields: [ Object(3), ), description: None, + hack_source: None, }, ] input_objects: [ @@ -1466,9 +1628,12 @@ input_objects: [ }, fields: [ Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "name", + ), + }, type_: NonNull( Named( Scalar(2), @@ -1517,6 +1682,7 @@ input_objects: [ }, ], description: None, + hack_source: None, }, ] interfaces: [ @@ -1574,6 +1740,7 @@ interfaces: [ ], interfaces: [], description: None, + hack_source: None, }, ] objects: [ @@ -1593,6 +1760,7 @@ objects: [ interfaces: [], directives: [], description: None, + hack_source: None, }, Object { name: WithLocation { @@ -1609,6 +1777,7 @@ objects: [ interfaces: [], directives: [], description: None, + hack_source: None, }, Object { name: WithLocation { @@ -1650,6 +1819,7 @@ objects: [ }, ], description: None, + hack_source: None, }, Object { name: WithLocation { @@ -1723,6 +1893,7 @@ objects: [ }, ], description: None, + hack_source: None, }, ] scalars: [ @@ -1736,6 +1907,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -1747,6 +1919,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -1794,6 +1967,7 @@ scalars: [ }, ], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -1805,6 +1979,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -1816,13 +1991,16 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, ] unions: [ Union { name: WithLocation { location: :0:0, - item: "Address", + item: UnionName( + "Address", + ), }, is_extension: false, members: [ @@ -1867,6 +2045,7 @@ unions: [ }, ], description: None, + hack_source: None, }, ] } diff --git a/compiler/crates/schema/tests/build_schema/fixtures/extend-interface-before-define.expected b/compiler/crates/schema/tests/build_schema/fixtures/extend-interface-before-define.expected index 57ecf406e569f..d0310026bb3b8 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/extend-interface-before-define.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/extend-interface-before-define.expected @@ -19,7 +19,7 @@ type MyType { ==================================== ERROR ==================================== ✖︎ Cannot extend type 'MyType', the type is not defined on the server schema. - :11:13 + extend-interface-before-define.graphql:11:13 10 │ 11 │ extend type MyType { │ ^^^^^^ diff --git a/compiler/crates/schema/tests/build_schema/fixtures/extend-object-before-define.expected b/compiler/crates/schema/tests/build_schema/fixtures/extend-object-before-define.expected index 222722c7c018c..7ac69e3a76689 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/extend-object-before-define.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/extend-object-before-define.expected @@ -22,7 +22,7 @@ type MyType { ==================================== ERROR ==================================== ✖︎ Cannot extend type 'MyType', the type is not defined on the server schema. - :14:13 + extend-object-before-define.graphql:14:13 13 │ # -- https://spec.graphql.org/June2018/#InterfaceTypeExtension 14 │ extend type MyType { │ ^^^^^^ diff --git a/compiler/crates/schema/tests/build_schema/fixtures/field-descriptions.expected b/compiler/crates/schema/tests/build_schema/fixtures/field-descriptions.expected index 76eadf503504f..44b81440dcab5 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/field-descriptions.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/field-descriptions.expected @@ -43,14 +43,20 @@ Text Schema:Schema { subscription_type: None directives: [ Directive { - name: DirectiveName( - "include", - ), + name: WithLocation { + location: :255:262, + item: DirectiveName( + "include", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :263:265, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(3), @@ -69,16 +75,23 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "skip", - ), + name: WithLocation { + location: :334:338, + item: DirectiveName( + "skip", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :339:341, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(3), @@ -97,6 +110,7 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, ] type_map: { @@ -112,7 +126,7 @@ Text Schema:Schema { fields: [ Field { name: WithLocation { - location: :42:45, + location: field-descriptions.graphql:42:45, item: "foo", }, is_extension: false, @@ -125,10 +139,11 @@ Text Schema:Schema { Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :101:105, + location: field-descriptions.graphql:101:105, item: "line", }, is_extension: false, @@ -143,10 +158,11 @@ Text Schema:Schema { description: Some( "Single line field description", ), + hack_source: None, }, Field { name: WithLocation { - location: :154:159, + location: field-descriptions.graphql:154:159, item: "block", }, is_extension: false, @@ -161,10 +177,11 @@ Text Schema:Schema { description: Some( "Block field description", ), + hack_source: None, }, Field { name: WithLocation { - location: :271:286, + location: field-descriptions.graphql:271:286, item: "multiline_block", }, is_extension: false, @@ -179,10 +196,11 @@ Text Schema:Schema { description: Some( "Multiline block field description which is so long\nthat it spans onto a second line.", ), + hack_source: None, }, Field { name: WithLocation { - location: :361:374, + location: field-descriptions.graphql:361:374, item: "extended_line", }, is_extension: true, @@ -197,10 +215,11 @@ Text Schema:Schema { description: Some( "Single line extended field description", ), + hack_source: None, }, Field { name: WithLocation { - location: :423:437, + location: field-descriptions.graphql:423:437, item: "extended_block", }, is_extension: true, @@ -215,10 +234,11 @@ Text Schema:Schema { description: Some( "Block field description", ), + hack_source: None, }, Field { name: WithLocation { - location: :549:573, + location: field-descriptions.graphql:549:573, item: "extended_multiline_block", }, is_extension: true, @@ -233,6 +253,7 @@ Text Schema:Schema { description: Some( "Multiline block field description which is so long\nthat it spans onto a second line.", ), + hack_source: None, }, Field { name: WithLocation { @@ -251,6 +272,7 @@ Text Schema:Schema { description: Some( "This object's GraphQL type. Provided by GraphQL type name introspection.", ), + hack_source: None, }, Field { name: WithLocation { @@ -267,6 +289,7 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, Field { name: WithLocation { @@ -285,6 +308,7 @@ Text Schema:Schema { description: Some( "Relay's cache key for this object.", ), + hack_source: None, }, Field { name: WithLocation { @@ -299,6 +323,7 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, Field { name: WithLocation { @@ -308,9 +333,12 @@ Text Schema:Schema { is_extension: true, arguments: [ Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "name", + ), + }, type_: NonNull( Named( Scalar(2), @@ -329,6 +357,7 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, ] input_objects: [] @@ -336,7 +365,7 @@ Text Schema:Schema { objects: [ Object { name: WithLocation { - location: :32:37, + location: field-descriptions.graphql:32:37, item: ObjectName( "Query", ), @@ -348,10 +377,11 @@ Text Schema:Schema { interfaces: [], directives: [], description: None, + hack_source: None, }, Object { name: WithLocation { - location: :59:62, + location: field-descriptions.graphql:59:62, item: ObjectName( "Foo", ), @@ -368,6 +398,7 @@ Text Schema:Schema { interfaces: [], directives: [], description: None, + hack_source: None, }, ] scalars: [ @@ -381,6 +412,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -392,6 +424,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -403,6 +436,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -414,6 +448,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -425,6 +460,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, ] unions: [] @@ -433,14 +469,20 @@ Text Schema:Schema { FlatBuffer Schema:FB Schema { directives: [ Directive { - name: DirectiveName( - "include", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "include", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(0), @@ -459,16 +501,23 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "skip", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "skip", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(0), @@ -487,6 +536,7 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, ] enums: [] @@ -506,6 +556,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -522,6 +573,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -538,6 +590,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -554,6 +607,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -570,6 +624,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -586,6 +641,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -602,6 +658,7 @@ fields: [ Object(1), ), description: None, + hack_source: None, }, ] input_objects: [] @@ -626,6 +683,7 @@ objects: [ interfaces: [], directives: [], description: None, + hack_source: None, }, Object { name: WithLocation { @@ -641,6 +699,7 @@ objects: [ interfaces: [], directives: [], description: None, + hack_source: None, }, ] scalars: [ @@ -654,6 +713,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -665,6 +725,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -676,6 +737,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -687,6 +749,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -698,6 +761,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, ] unions: [] diff --git a/compiler/crates/schema/tests/build_schema/fixtures/interface-implements-interface.expected b/compiler/crates/schema/tests/build_schema/fixtures/interface-implements-interface.expected index 274fda0214714..85a4ea59a0722 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/interface-implements-interface.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/interface-implements-interface.expected @@ -35,14 +35,20 @@ Text Schema:Schema { subscription_type: None directives: [ Directive { - name: DirectiveName( - "include", - ), + name: WithLocation { + location: :255:262, + item: DirectiveName( + "include", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :263:265, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(3), @@ -61,16 +67,23 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "skip", - ), + name: WithLocation { + location: :334:338, + item: DirectiveName( + "skip", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :339:341, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(3), @@ -89,6 +102,7 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, ] type_map: { @@ -107,7 +121,7 @@ Text Schema:Schema { fields: [ Field { name: WithLocation { - location: :42:46, + location: interface-implements-interface.graphql:42:46, item: "node", }, is_extension: false, @@ -120,10 +134,11 @@ Text Schema:Schema { Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :79:81, + location: interface-implements-interface.graphql:79:81, item: "id", }, is_extension: false, @@ -138,10 +153,11 @@ Text Schema:Schema { Interface(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :129:133, + location: interface-implements-interface.graphql:129:133, item: "name", }, is_extension: false, @@ -156,10 +172,11 @@ Text Schema:Schema { Interface(1), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :185:187, + location: interface-implements-interface.graphql:185:187, item: "id", }, is_extension: false, @@ -174,10 +191,11 @@ Text Schema:Schema { Interface(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :195:199, + location: interface-implements-interface.graphql:195:199, item: "name", }, is_extension: false, @@ -192,10 +210,11 @@ Text Schema:Schema { Interface(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :211:214, + location: interface-implements-interface.graphql:211:214, item: "url", }, is_extension: false, @@ -208,10 +227,11 @@ Text Schema:Schema { Interface(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :273:275, + location: interface-implements-interface.graphql:273:275, item: "id", }, is_extension: false, @@ -226,10 +246,11 @@ Text Schema:Schema { Interface(3), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :283:286, + location: interface-implements-interface.graphql:283:286, item: "url", }, is_extension: false, @@ -242,10 +263,11 @@ Text Schema:Schema { Interface(3), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :297:306, + location: interface-implements-interface.graphql:297:306, item: "thumbnail", }, is_extension: false, @@ -258,6 +280,7 @@ Text Schema:Schema { Interface(3), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -276,6 +299,7 @@ Text Schema:Schema { description: Some( "This object's GraphQL type. Provided by GraphQL type name introspection.", ), + hack_source: None, }, Field { name: WithLocation { @@ -292,6 +316,7 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, Field { name: WithLocation { @@ -310,6 +335,7 @@ Text Schema:Schema { description: Some( "Relay's cache key for this object.", ), + hack_source: None, }, Field { name: WithLocation { @@ -324,6 +350,7 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, Field { name: WithLocation { @@ -333,9 +360,12 @@ Text Schema:Schema { is_extension: true, arguments: [ Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "name", + ), + }, type_: NonNull( Named( Scalar(2), @@ -354,13 +384,14 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, ] input_objects: [] interfaces: [ Interface { name: WithLocation { - location: :66:74, + location: interface-implements-interface.graphql:66:74, item: InterfaceName( "BaseNode", ), @@ -376,10 +407,11 @@ Text Schema:Schema { directives: [], interfaces: [], description: None, + hack_source: None, }, Interface { name: WithLocation { - location: :100:104, + location: interface-implements-interface.graphql:100:104, item: InterfaceName( "Node", ), @@ -398,10 +430,11 @@ Text Schema:Schema { InterfaceID(0), ], description: None, + hack_source: None, }, Interface { name: WithLocation { - location: :156:164, + location: interface-implements-interface.graphql:156:164, item: InterfaceName( "Resource", ), @@ -421,10 +454,11 @@ Text Schema:Schema { InterfaceID(1), ], description: None, + hack_source: None, }, Interface { name: WithLocation { - location: :236:241, + location: interface-implements-interface.graphql:236:241, item: InterfaceName( "Image", ), @@ -443,12 +477,13 @@ Text Schema:Schema { InterfaceID(1), ], description: None, + hack_source: None, }, ] objects: [ Object { name: WithLocation { - location: :32:37, + location: interface-implements-interface.graphql:32:37, item: ObjectName( "Query", ), @@ -460,6 +495,7 @@ Text Schema:Schema { interfaces: [], directives: [], description: None, + hack_source: None, }, ] scalars: [ @@ -473,6 +509,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -484,6 +521,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -495,6 +533,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -506,6 +545,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -517,6 +557,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, ] unions: [] @@ -525,14 +566,20 @@ Text Schema:Schema { FlatBuffer Schema:FB Schema { directives: [ Directive { - name: DirectiveName( - "include", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "include", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(2), @@ -551,16 +598,23 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "skip", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "skip", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(2), @@ -579,6 +633,7 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, ] enums: [] @@ -600,6 +655,7 @@ fields: [ Interface(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -618,6 +674,7 @@ fields: [ Interface(3), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -634,6 +691,7 @@ fields: [ Interface(3), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -650,6 +708,7 @@ fields: [ Interface(3), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -668,6 +727,7 @@ fields: [ Interface(1), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -684,6 +744,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -702,6 +763,7 @@ fields: [ Interface(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -720,6 +782,7 @@ fields: [ Interface(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -736,6 +799,7 @@ fields: [ Interface(2), ), description: None, + hack_source: None, }, ] input_objects: [] @@ -758,6 +822,7 @@ interfaces: [ directives: [], interfaces: [], description: None, + hack_source: None, }, Interface { name: WithLocation { @@ -780,6 +845,7 @@ interfaces: [ InterfaceID(1), ], description: None, + hack_source: None, }, Interface { name: WithLocation { @@ -802,6 +868,7 @@ interfaces: [ InterfaceID(0), ], description: None, + hack_source: None, }, Interface { name: WithLocation { @@ -825,6 +892,7 @@ interfaces: [ InterfaceID(1), ], description: None, + hack_source: None, }, ] objects: [ @@ -842,6 +910,7 @@ objects: [ interfaces: [], directives: [], description: None, + hack_source: None, }, ] scalars: [ @@ -855,6 +924,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -866,6 +936,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -877,6 +948,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -888,6 +960,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -899,6 +972,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, ] unions: [] diff --git a/compiler/crates/schema/tests/build_schema/fixtures/invalid-duplicate-directive.expected b/compiler/crates/schema/tests/build_schema/fixtures/invalid-duplicate-directive.expected index eacc0feddba25..e173c62337aa8 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/invalid-duplicate-directive.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/invalid-duplicate-directive.expected @@ -6,7 +6,7 @@ directive @fetchable(field_name: String) on OBJECT ==================================== ERROR ==================================== ✖︎ Duplicate directive definition 'fetchable'. - :4:12 + invalid-duplicate-directive.graphql:4:12 3 │ directive @fetchable(field_name: String) on OBJECT 4 │ directive @fetchable(field_name: String) on OBJECT │ ^^^^^^^^^ diff --git a/compiler/crates/schema/tests/build_schema/fixtures/invalid-duplicate-query-operation.expected b/compiler/crates/schema/tests/build_schema/fixtures/invalid-duplicate-query-operation.expected index ffcea77cf0e3f..59d45c515dd5e 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/invalid-duplicate-query-operation.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/invalid-duplicate-query-operation.expected @@ -14,9 +14,9 @@ type Query2 { s: String } ==================================== ERROR ==================================== -✖︎ Duplicate Query type definition, got 'Query2' and 'Query1'. +✖︎ Duplicate query type definition, got 'Query2' and 'Query1'. - :5:10 + invalid-duplicate-query-operation.graphql:5:10 4 │ query: Query1 5 │ query: Query2 │ ^^^^^^ diff --git a/compiler/crates/schema/tests/build_schema/fixtures/invalid-duplicate-type-name.expected b/compiler/crates/schema/tests/build_schema/fixtures/invalid-duplicate-type-name.expected new file mode 100644 index 0000000000000..1ca0d4c0faec1 --- /dev/null +++ b/compiler/crates/schema/tests/build_schema/fixtures/invalid-duplicate-type-name.expected @@ -0,0 +1,26 @@ +==================================== INPUT ==================================== +# expected-to-throw + +type Foo { + name: String +} + +interface Foo { + name: String +} +==================================== ERROR ==================================== +✖︎ Duplicate definition for type 'Foo'. + + invalid-duplicate-type-name.graphql:7:11 + 6 │ + 7 │ interface Foo { + │ ^^^ + 8 │ name: String + + ℹ︎ `Foo` was previously defined here: + + invalid-duplicate-type-name.graphql:3:6 + 2 │ + 3 │ type Foo { + │ ^^^ + 4 │ name: String diff --git a/compiler/crates/schema/tests/build_schema/fixtures/invalid-duplicate-type-name.graphql b/compiler/crates/schema/tests/build_schema/fixtures/invalid-duplicate-type-name.graphql new file mode 100644 index 0000000000000..a3dc04e7d999f --- /dev/null +++ b/compiler/crates/schema/tests/build_schema/fixtures/invalid-duplicate-type-name.graphql @@ -0,0 +1,9 @@ +# expected-to-throw + +type Foo { + name: String +} + +interface Foo { + name: String +} diff --git a/compiler/crates/schema/tests/build_schema/fixtures/invalid-extension-implements-noninterface.expected b/compiler/crates/schema/tests/build_schema/fixtures/invalid-extension-implements-noninterface.expected index 311b8cc51a329..0b9a18bee057f 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/invalid-extension-implements-noninterface.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/invalid-extension-implements-noninterface.expected @@ -17,10 +17,18 @@ extend type User implements Foo { client: String } ==================================== ERROR ==================================== -✖︎ Expected an interface type for name 'Foo', got 'Object(1)'. +✖︎ Expected an interface type for name 'Foo', got an object. - :14:29 + invalid-extension-implements-noninterface.graphql:14:29 13 │ } 14 │ extend type User implements Foo { │ ^^^ 15 │ nickname: String + + ℹ︎ the other type is defined here + + invalid-extension-implements-noninterface.graphql:7:6 + 6 │ + 7 │ type Foo { + │ ^^^ + 8 │ url: String diff --git a/compiler/crates/schema/tests/build_schema/fixtures/invalid-implements-non-interface.expected b/compiler/crates/schema/tests/build_schema/fixtures/invalid-implements-non-interface.expected index d79c51119fed3..c8fff263f1d58 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/invalid-implements-non-interface.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/invalid-implements-non-interface.expected @@ -9,10 +9,18 @@ type Page implements User { id: ID } ==================================== ERROR ==================================== -✖︎ Expected an interface type for name 'User', got 'Object(0)'. +✖︎ Expected an interface type for name 'User', got an object. - :7:22 + invalid-implements-non-interface.graphql:7:22 6 │ 7 │ type Page implements User { │ ^^^^ 8 │ id: ID + + ℹ︎ the other type is defined here + + invalid-implements-non-interface.graphql:3:6 + 2 │ + 3 │ type User { + │ ^^^^ + 4 │ id: ID diff --git a/compiler/crates/schema/tests/build_schema/fixtures/invalid-interface-implements-noninterface.expected b/compiler/crates/schema/tests/build_schema/fixtures/invalid-interface-implements-noninterface.expected index b84ab5a5c3fbc..fd10601d24022 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/invalid-interface-implements-noninterface.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/invalid-interface-implements-noninterface.expected @@ -9,10 +9,18 @@ interface Page implements User { id: ID } ==================================== ERROR ==================================== -✖︎ Expected an interface type for name 'User', got 'Object(0)'. +✖︎ Expected an interface type for name 'User', got an object. - :7:27 + invalid-interface-implements-noninterface.graphql:7:27 6 │ 7 │ interface Page implements User { │ ^^^^ 8 │ id: ID + + ℹ︎ the other type is defined here + + invalid-interface-implements-noninterface.graphql:3:6 + 2 │ + 3 │ type User { + │ ^^^^ + 4 │ id: ID diff --git a/compiler/crates/schema/tests/build_schema/fixtures/invalid-object-extension-duplicated-server-field.expected b/compiler/crates/schema/tests/build_schema/fixtures/invalid-object-extension-duplicated-server-field.expected index a50c6abbb2d23..1149e8c0dfa4c 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/invalid-object-extension-duplicated-server-field.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/invalid-object-extension-duplicated-server-field.expected @@ -21,7 +21,7 @@ extend type User { ℹ︎ previously defined here - :4:3 + invalid-object-extension-duplicated-server-field.graphql:4:3 3 │ type User { 4 │ name: String │ ^^^^ diff --git a/compiler/crates/schema/tests/build_schema/fixtures/invalid-sdl.expected b/compiler/crates/schema/tests/build_schema/fixtures/invalid-sdl.expected index 54ee84ebf9041..d21de04a5acf2 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/invalid-sdl.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/invalid-sdl.expected @@ -13,7 +13,7 @@ type Page implements User @source("") { ==================================== ERROR ==================================== ✖︎ Expected a non-variable identifier (e.g. 'x' or 'Foo') - :9:35 + invalid-sdl.graphql:9:35 8 │ 9 │ type Page implements User @source("") { │ ^^ diff --git a/compiler/crates/schema/tests/build_schema/fixtures/invalid-type-reference.expected b/compiler/crates/schema/tests/build_schema/fixtures/invalid-type-reference.expected index 530b19980660f..6aa9649cc451e 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/invalid-type-reference.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/invalid-type-reference.expected @@ -7,7 +7,7 @@ type User { ==================================== ERROR ==================================== ✖︎ Reference to undefined type 'Email'. - :4:12 + invalid-type-reference.graphql:4:12 3 │ type User { 4 │ emails: [Email!]! │ ^^^^^ diff --git a/compiler/crates/schema/tests/build_schema/fixtures/kitchen-sink.expected b/compiler/crates/schema/tests/build_schema/fixtures/kitchen-sink.expected index ad529c0b9f26e..a0a5d473c0ef2 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/kitchen-sink.expected +++ b/compiler/crates/schema/tests/build_schema/fixtures/kitchen-sink.expected @@ -57,6 +57,15 @@ extend type User { nickname: String client: ClientType } + +interface HasName { + name: String +} + +extend interface Node implements HasName { + name: String +} +extend type User implements HasName ==================================== OUTPUT =================================== Text Schema:Schema { query_type: Some( @@ -66,14 +75,20 @@ Text Schema:Schema { subscription_type: None directives: [ Directive { - name: DirectiveName( - "include", - ), + name: WithLocation { + location: :255:262, + item: DirectiveName( + "include", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :263:265, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(3), @@ -92,16 +107,23 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "skip", - ), + name: WithLocation { + location: :334:338, + item: DirectiveName( + "skip", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :339:341, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(3), @@ -120,16 +142,23 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "source", - ), + name: WithLocation { + location: kitchen-sink.graphql:11:17, + item: DirectiveName( + "source", + ), + }, arguments: [ Argument { - name: ArgumentName( - "schema", - ), + name: WithLocation { + location: kitchen-sink.graphql:18:24, + item: ArgumentName( + "schema", + ), + }, type_: NonNull( Named( Scalar(8), @@ -140,9 +169,12 @@ Text Schema:Schema { directives: [], }, Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: kitchen-sink.graphql:35:39, + item: ArgumentName( + "name", + ), + }, type_: NonNull( Named( Scalar(8), @@ -164,6 +196,7 @@ Text Schema:Schema { repeatable: false, is_extension: false, description: None, + hack_source: None, }, ] type_map: { @@ -171,6 +204,7 @@ Text Schema:Schema { "Boolean": Scalar(3), "ClientType": Object(3), "Float": Scalar(6), + "HasName": Interface(1), "ID": Scalar(4), "Int": Scalar(7), "Location": InputObject(0), @@ -185,7 +219,7 @@ Text Schema:Schema { enums: [ Enum { name: WithLocation { - location: :340:348, + location: kitchen-sink.graphql:340:348, item: EnumName( "PageType", ), @@ -229,20 +263,24 @@ Text Schema:Schema { ], directives: [], description: None, + hack_source: None, }, ] fields: [ Field { name: WithLocation { - location: :154:158, + location: kitchen-sink.graphql:154:158, item: "node", }, is_extension: false, arguments: [ Argument { - name: ArgumentName( - "id", - ), + name: WithLocation { + location: kitchen-sink.graphql:159:161, + item: ArgumentName( + "id", + ), + }, type_: NonNull( Named( Scalar(4), @@ -261,18 +299,22 @@ Text Schema:Schema { Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :176:181, + location: kitchen-sink.graphql:176:181, item: "nodes", }, is_extension: false, arguments: [ Argument { - name: ArgumentName( - "ids", - ), + name: WithLocation { + location: kitchen-sink.graphql:182:185, + item: ArgumentName( + "ids", + ), + }, type_: NonNull( List( NonNull( @@ -301,10 +343,11 @@ Text Schema:Schema { Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :227:229, + location: kitchen-sink.graphql:227:229, item: "id", }, is_extension: false, @@ -319,10 +362,11 @@ Text Schema:Schema { Interface(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :268:270, + location: kitchen-sink.graphql:268:270, item: "id", }, is_extension: false, @@ -337,10 +381,11 @@ Text Schema:Schema { Object(1), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :278:282, + location: kitchen-sink.graphql:278:282, item: "name", }, is_extension: false, @@ -353,10 +398,11 @@ Text Schema:Schema { Object(1), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :308:310, + location: kitchen-sink.graphql:308:310, item: "id", }, is_extension: false, @@ -369,10 +415,11 @@ Text Schema:Schema { Object(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { - location: :317:321, + location: kitchen-sink.graphql:317:321, item: "type", }, is_extension: false, @@ -385,6 +432,7 @@ Text Schema:Schema { Object(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -401,6 +449,7 @@ Text Schema:Schema { Object(3), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -417,6 +466,7 @@ Text Schema:Schema { Object(1), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -433,6 +483,41 @@ Text Schema:Schema { Object(1), ), description: None, + hack_source: None, + }, + Field { + name: WithLocation { + location: kitchen-sink.graphql:835:839, + item: "name", + }, + is_extension: true, + arguments: [], + type_: Named( + Scalar(8), + ), + directives: [], + parent_type: Some( + Interface(1), + ), + description: None, + hack_source: None, + }, + Field { + name: WithLocation { + location: kitchen-sink.graphql:896:900, + item: "name", + }, + is_extension: true, + arguments: [], + type_: Named( + Scalar(8), + ), + directives: [], + parent_type: Some( + Interface(0), + ), + description: None, + hack_source: None, }, Field { name: WithLocation { @@ -451,6 +536,7 @@ Text Schema:Schema { description: Some( "This object's GraphQL type. Provided by GraphQL type name introspection.", ), + hack_source: None, }, Field { name: WithLocation { @@ -467,6 +553,7 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, Field { name: WithLocation { @@ -485,6 +572,7 @@ Text Schema:Schema { description: Some( "Relay's cache key for this object.", ), + hack_source: None, }, Field { name: WithLocation { @@ -499,6 +587,7 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, Field { name: WithLocation { @@ -508,9 +597,12 @@ Text Schema:Schema { is_extension: true, arguments: [ Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "name", + ), + }, type_: NonNull( Named( Scalar(8), @@ -529,21 +621,25 @@ Text Schema:Schema { directives: [], parent_type: None, description: None, + hack_source: None, }, ] input_objects: [ InputObject { name: WithLocation { - location: :510:518, + location: kitchen-sink.graphql:510:518, item: InputObjectName( "Location", ), }, fields: [ Argument { - name: ArgumentName( - "lat", - ), + name: WithLocation { + location: kitchen-sink.graphql:569:572, + item: ArgumentName( + "lat", + ), + }, type_: NonNull( Named( Scalar(6), @@ -554,9 +650,12 @@ Text Schema:Schema { directives: [], }, Argument { - name: ArgumentName( - "lon", - ), + name: WithLocation { + location: kitchen-sink.graphql:583:586, + item: ArgumentName( + "lon", + ), + }, type_: NonNull( Named( Scalar(6), @@ -567,9 +666,12 @@ Text Schema:Schema { directives: [], }, Argument { - name: ArgumentName( - "city", - ), + name: WithLocation { + location: kitchen-sink.graphql:597:601, + item: ArgumentName( + "city", + ), + }, type_: Named( Scalar(8), ), @@ -578,9 +680,12 @@ Text Schema:Schema { directives: [], }, Argument { - name: ArgumentName( - "zip", - ), + name: WithLocation { + location: kitchen-sink.graphql:612:615, + item: ArgumentName( + "zip", + ), + }, type_: Named( Scalar(7), ), @@ -589,9 +694,12 @@ Text Schema:Schema { directives: [], }, Argument { - name: ArgumentName( - "previous_cities", - ), + name: WithLocation { + location: kitchen-sink.graphql:623:638, + item: ArgumentName( + "previous_cities", + ), + }, type_: List( NonNull( Named( @@ -642,12 +750,13 @@ Text Schema:Schema { }, ], description: None, + hack_source: None, }, ] interfaces: [ Interface { name: WithLocation { - location: :218:222, + location: kitchen-sink.graphql:218:222, item: InterfaceName( "Node", ), @@ -659,16 +768,38 @@ Text Schema:Schema { ], fields: [ FieldID(2), + FieldID(11), + ], + directives: [], + interfaces: [ + InterfaceID(1), + ], + description: None, + hack_source: None, + }, + Interface { + name: WithLocation { + location: kitchen-sink.graphql:823:830, + item: InterfaceName( + "HasName", + ), + }, + is_extension: true, + implementing_interfaces: [], + implementing_objects: [], + fields: [ + FieldID(10), ], directives: [], interfaces: [], description: None, + hack_source: None, }, ] objects: [ Object { name: WithLocation { - location: :144:149, + location: kitchen-sink.graphql:144:149, item: ObjectName( "Query", ), @@ -681,10 +812,11 @@ Text Schema:Schema { interfaces: [], directives: [], description: None, + hack_source: None, }, Object { name: WithLocation { - location: :243:247, + location: kitchen-sink.graphql:243:247, item: ObjectName( "User", ), @@ -698,13 +830,15 @@ Text Schema:Schema { ], interfaces: [ InterfaceID(0), + InterfaceID(1), ], directives: [], description: None, + hack_source: None, }, Object { name: WithLocation { - location: :299:303, + location: kitchen-sink.graphql:299:303, item: ObjectName( "Page", ), @@ -717,6 +851,7 @@ Text Schema:Schema { interfaces: [], directives: [], description: None, + hack_source: None, }, Object { name: WithLocation { @@ -732,6 +867,7 @@ Text Schema:Schema { interfaces: [], directives: [], description: None, + hack_source: None, }, ] scalars: [ @@ -745,6 +881,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -756,6 +893,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -767,6 +905,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -778,6 +917,7 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -789,10 +929,11 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { - location: :459:462, + location: kitchen-sink.graphql:459:462, item: ScalarName( "URL", ), @@ -836,10 +977,11 @@ Text Schema:Schema { }, ], description: None, + hack_source: None, }, Scalar { name: WithLocation { - location: :660:665, + location: kitchen-sink.graphql:660:665, item: ScalarName( "Float", ), @@ -847,10 +989,11 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { - location: :674:677, + location: kitchen-sink.graphql:674:677, item: ScalarName( "Int", ), @@ -858,10 +1001,11 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { - location: :686:692, + location: kitchen-sink.graphql:686:692, item: ScalarName( "String", ), @@ -869,13 +1013,16 @@ Text Schema:Schema { is_extension: false, directives: [], description: None, + hack_source: None, }, ] unions: [ Union { name: WithLocation { - location: :431:436, - item: "Actor", + location: kitchen-sink.graphql:431:436, + item: UnionName( + "Actor", + ), }, is_extension: false, members: [ @@ -884,6 +1031,7 @@ Text Schema:Schema { ], directives: [], description: None, + hack_source: None, }, ] } @@ -891,14 +1039,20 @@ Text Schema:Schema { FlatBuffer Schema:FB Schema { directives: [ Directive { - name: DirectiveName( - "include", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "include", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(2), @@ -917,16 +1071,23 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "skip", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "skip", + ), + }, arguments: [ Argument { - name: ArgumentName( - "if", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "if", + ), + }, type_: NonNull( Named( Scalar(2), @@ -945,16 +1106,23 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, Directive { - name: DirectiveName( - "source", - ), + name: WithLocation { + location: :0:0, + item: DirectiveName( + "source", + ), + }, arguments: [ Argument { - name: ArgumentName( - "schema", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "schema", + ), + }, type_: NonNull( Named( Scalar(1), @@ -965,9 +1133,12 @@ directives: [ directives: [], }, Argument { - name: ArgumentName( - "name", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "name", + ), + }, type_: NonNull( Named( Scalar(1), @@ -989,6 +1160,7 @@ directives: [ repeatable: false, is_extension: false, description: None, + hack_source: None, }, ] enums: [ @@ -1038,6 +1210,7 @@ enums: [ ], directives: [], description: None, + hack_source: None, }, ] fields: [ @@ -1056,6 +1229,24 @@ fields: [ Object(1), ), description: None, + hack_source: None, + }, + Field { + name: WithLocation { + location: :0:0, + item: "name", + }, + is_extension: true, + arguments: [], + type_: Named( + Scalar(1), + ), + directives: [], + parent_type: Some( + Interface(1), + ), + description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1074,6 +1265,24 @@ fields: [ Interface(0), ), description: None, + hack_source: None, + }, + Field { + name: WithLocation { + location: :0:0, + item: "name", + }, + is_extension: true, + arguments: [], + type_: Named( + Scalar(1), + ), + directives: [], + parent_type: Some( + Interface(0), + ), + description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1090,6 +1299,7 @@ fields: [ Object(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1106,6 +1316,7 @@ fields: [ Object(2), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1115,9 +1326,12 @@ fields: [ is_extension: false, arguments: [ Argument { - name: ArgumentName( - "id", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "id", + ), + }, type_: NonNull( Named( Scalar(0), @@ -1136,6 +1350,7 @@ fields: [ Object(3), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1145,9 +1360,12 @@ fields: [ is_extension: false, arguments: [ Argument { - name: ArgumentName( - "ids", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "ids", + ), + }, type_: NonNull( List( NonNull( @@ -1176,6 +1394,7 @@ fields: [ Object(3), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1194,6 +1413,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1210,6 +1430,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1226,6 +1447,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, Field { name: WithLocation { @@ -1242,6 +1464,7 @@ fields: [ Object(0), ), description: None, + hack_source: None, }, ] input_objects: [ @@ -1254,9 +1477,12 @@ input_objects: [ }, fields: [ Argument { - name: ArgumentName( - "lat", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "lat", + ), + }, type_: NonNull( Named( Scalar(3), @@ -1267,9 +1493,12 @@ input_objects: [ directives: [], }, Argument { - name: ArgumentName( - "lon", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "lon", + ), + }, type_: NonNull( Named( Scalar(3), @@ -1280,9 +1509,12 @@ input_objects: [ directives: [], }, Argument { - name: ArgumentName( - "city", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "city", + ), + }, type_: Named( Scalar(1), ), @@ -1291,9 +1523,12 @@ input_objects: [ directives: [], }, Argument { - name: ArgumentName( - "zip", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "zip", + ), + }, type_: Named( Scalar(4), ), @@ -1302,9 +1537,12 @@ input_objects: [ directives: [], }, Argument { - name: ArgumentName( - "previous_cities", - ), + name: WithLocation { + location: :0:0, + item: ArgumentName( + "previous_cities", + ), + }, type_: List( NonNull( Named( @@ -1355,9 +1593,28 @@ input_objects: [ }, ], description: None, + hack_source: None, }, ] interfaces: [ + Interface { + name: WithLocation { + location: :0:0, + item: InterfaceName( + "HasName", + ), + }, + is_extension: true, + implementing_interfaces: [], + implementing_objects: [], + fields: [ + FieldID(7), + ], + directives: [], + interfaces: [], + description: None, + hack_source: None, + }, Interface { name: WithLocation { location: :0:0, @@ -1372,10 +1629,14 @@ interfaces: [ ], fields: [ FieldID(5), + FieldID(6), ], directives: [], - interfaces: [], + interfaces: [ + InterfaceID(1), + ], description: None, + hack_source: None, }, ] objects: [ @@ -1393,6 +1654,7 @@ objects: [ interfaces: [], directives: [], description: None, + hack_source: None, }, Object { name: WithLocation { @@ -1403,12 +1665,13 @@ objects: [ }, is_extension: false, fields: [ - FieldID(6), - FieldID(7), + FieldID(8), + FieldID(9), ], interfaces: [], directives: [], description: None, + hack_source: None, }, Object { name: WithLocation { @@ -1419,12 +1682,13 @@ objects: [ }, is_extension: false, fields: [ - FieldID(8), - FieldID(9), + FieldID(10), + FieldID(11), ], interfaces: [], directives: [], description: None, + hack_source: None, }, Object { name: WithLocation { @@ -1442,9 +1706,11 @@ objects: [ ], interfaces: [ InterfaceID(0), + InterfaceID(1), ], directives: [], description: None, + hack_source: None, }, ] scalars: [ @@ -1458,6 +1724,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -1469,6 +1736,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -1480,6 +1748,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -1491,6 +1760,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -1502,6 +1772,7 @@ scalars: [ is_extension: false, directives: [], description: None, + hack_source: None, }, Scalar { name: WithLocation { @@ -1549,13 +1820,16 @@ scalars: [ }, ], description: None, + hack_source: None, }, ] unions: [ Union { name: WithLocation { location: :0:0, - item: "Actor", + item: UnionName( + "Actor", + ), }, is_extension: false, members: [ @@ -1564,6 +1838,7 @@ unions: [ ], directives: [], description: None, + hack_source: None, }, ] } diff --git a/compiler/crates/schema/tests/build_schema/fixtures/kitchen-sink.graphql b/compiler/crates/schema/tests/build_schema/fixtures/kitchen-sink.graphql index 0d045d71e68a0..a6314a4f06554 100644 --- a/compiler/crates/schema/tests/build_schema/fixtures/kitchen-sink.graphql +++ b/compiler/crates/schema/tests/build_schema/fixtures/kitchen-sink.graphql @@ -56,3 +56,12 @@ extend type User { nickname: String client: ClientType } + +interface HasName { + name: String +} + +extend interface Node implements HasName { + name: String +} +extend type User implements HasName diff --git a/compiler/crates/schema/tests/build_schema/mod.rs b/compiler/crates/schema/tests/build_schema/mod.rs deleted file mode 100644 index e7f6a19d21802..0000000000000 --- a/compiler/crates/schema/tests/build_schema/mod.rs +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - -use std::collections::BTreeMap; - -use common::Diagnostic; -use common::SourceLocationKey; -use common::TextSource; -use fixture_tests::Fixture; -use graphql_cli::DiagnosticPrinter; -use schema::build_schema_from_flat_buffer; -use schema::build_schema_with_extensions; -use schema::serialize_as_flatbuffer; -use schema::SDLSchema; -use schema::Schema; -use schema::Type; - -const SCHEMA_SEPARATOR: &str = "%extensions%"; - -pub fn transform_fixture(fixture: &Fixture<'_>) -> Result { - let parts: Vec<_> = fixture.content.split(SCHEMA_SEPARATOR).collect(); - let result = match parts.as_slice() { - [base] => { - build_schema_with_extensions::<_, &str>(&[(base, SourceLocationKey::generated())], &[]) - } - [base, extensions] => { - // prepend a comment so the correct line + column number is reported for client extension - // (since we source base and client schemas from one file) - let nchars_base = base.chars().count() + SCHEMA_SEPARATOR.chars().count(); - assert!(nchars_base > 0); - let prepended_extension = format!("{}\n{}", "#".repeat(nchars_base - 1), extensions); - build_schema_with_extensions( - &[(base, SourceLocationKey::generated())], - &[( - prepended_extension, - SourceLocationKey::standalone(fixture.file_name), - )], - ) - } - _ => panic!("Expected a single extension block"), - }; - - result - .map(print_schema_and_flat_buffer_schema) - .map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics)) -} - -fn print_schema_and_flat_buffer_schema(schema: SDLSchema) -> String { - let schema = schema.unwrap_in_memory_impl(); - let bytes = serialize_as_flatbuffer(&schema); - let fb_schema = build_schema_from_flat_buffer(&bytes).unwrap(); - let mut objects = Vec::new(); - let mut interfaces = Vec::new(); - let mut unions = Vec::new(); - let mut scalars = Vec::new(); - let mut enums = Vec::new(); - let mut input_objects = Vec::new(); - let mut fields = Vec::new(); - let mut directives = Vec::new(); - - // Hydrate types - for (key, _value) in schema.get_type_map().collect::>() { - let type_ = fb_schema.get_type(*key).unwrap(); - // Hyderate fields - match type_ { - Type::Object(id) => { - let object = fb_schema.object(id); - for field_id in object.fields.clone() { - fields.push(fb_schema.field(field_id)); - } - objects.push(object); - } - Type::Interface(id) => { - let interface = fb_schema.interface(id); - for field_id in interface.fields.clone() { - fields.push(fb_schema.field(field_id)); - } - interfaces.push(interface); - } - Type::Union(id) => unions.push(fb_schema.union(id)), - Type::Scalar(id) => scalars.push(fb_schema.scalar(id)), - Type::Enum(id) => enums.push(fb_schema.enum_(id)), - Type::InputObject(id) => input_objects.push(fb_schema.input_object(id)), - }; - } - // Hydrate directives - let mut ordered_directives = schema.get_directives().collect::>(); - ordered_directives.sort_by_key(|directive| directive.name); - for directive in ordered_directives { - directives.push(fb_schema.get_directive(directive.name).unwrap()); - } - let fb_schema_snapshot = format!( - r#"FB Schema {{ -directives: {:#?} -enums: {:#?} -fields: {:#?} -input_objects: {:#?} -interfaces: {:#?} -objects: {:#?} -scalars: {:#?} -unions: {:#?} -}}"#, - directives, enums, fields, input_objects, interfaces, objects, scalars, unions, - ); - format!( - "Text Schema:{}\n\nFlatBuffer Schema:{}", - schema.snapshot_print(), - fb_schema_snapshot - ) -} - -// NOTE: copied from graphql-test-helpers to avoid cyclic dependency breaking Rust Analyzer -fn diagnostics_to_sorted_string(source: &str, diagnostics: &[Diagnostic]) -> String { - let printer = - DiagnosticPrinter::new(|_| Some(TextSource::from_whole_document(source.to_string()))); - let mut printed = diagnostics - .iter() - .map(|diagnostic| printer.diagnostic_to_string(diagnostic)) - .collect::>(); - printed.sort(); - printed.join("\n\n") -} diff --git a/compiler/crates/schema/tests/build_schema_test.rs b/compiler/crates/schema/tests/build_schema_test.rs index 050054cc28613..5ee8dc93b4e83 100644 --- a/compiler/crates/schema/tests/build_schema_test.rs +++ b/compiler/crates/schema/tests/build_schema_test.rs @@ -4,7 +4,7 @@ * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * - * @generated SignedSource<<8634e6fbc71207f6f4bb9079d4bea17a>> + * @generated SignedSource<<0a030b4a01ee463bfd4bd071a0aa1930>> */ mod build_schema; @@ -12,135 +12,142 @@ mod build_schema; use build_schema::transform_fixture; use fixture_tests::test_fixture; -#[test] -fn directive_on_arg_def() { +#[tokio::test] +async fn directive_on_arg_def() { let input = include_str!("build_schema/fixtures/directive-on-arg-def.graphql"); let expected = include_str!("build_schema/fixtures/directive-on-arg-def.expected"); - test_fixture(transform_fixture, "directive-on-arg-def.graphql", "build_schema/fixtures/directive-on-arg-def.expected", input, expected); + test_fixture(transform_fixture, file!(), "directive-on-arg-def.graphql", "build_schema/fixtures/directive-on-arg-def.expected", input, expected).await; } -#[test] -fn directives_for_external_types() { +#[tokio::test] +async fn directives_for_external_types() { let input = include_str!("build_schema/fixtures/directives-for-external-types.graphql"); let expected = include_str!("build_schema/fixtures/directives-for-external-types.expected"); - test_fixture(transform_fixture, "directives-for-external-types.graphql", "build_schema/fixtures/directives-for-external-types.expected", input, expected); + test_fixture(transform_fixture, file!(), "directives-for-external-types.graphql", "build_schema/fixtures/directives-for-external-types.expected", input, expected).await; } -#[test] -fn extend_interface_before_define() { +#[tokio::test] +async fn extend_interface_before_define() { let input = include_str!("build_schema/fixtures/extend-interface-before-define.graphql"); let expected = include_str!("build_schema/fixtures/extend-interface-before-define.expected"); - test_fixture(transform_fixture, "extend-interface-before-define.graphql", "build_schema/fixtures/extend-interface-before-define.expected", input, expected); + test_fixture(transform_fixture, file!(), "extend-interface-before-define.graphql", "build_schema/fixtures/extend-interface-before-define.expected", input, expected).await; } -#[test] -fn extend_object_before_define() { +#[tokio::test] +async fn extend_object_before_define() { let input = include_str!("build_schema/fixtures/extend-object-before-define.graphql"); let expected = include_str!("build_schema/fixtures/extend-object-before-define.expected"); - test_fixture(transform_fixture, "extend-object-before-define.graphql", "build_schema/fixtures/extend-object-before-define.expected", input, expected); + test_fixture(transform_fixture, file!(), "extend-object-before-define.graphql", "build_schema/fixtures/extend-object-before-define.expected", input, expected).await; } -#[test] -fn field_descriptions() { +#[tokio::test] +async fn field_descriptions() { let input = include_str!("build_schema/fixtures/field-descriptions.graphql"); let expected = include_str!("build_schema/fixtures/field-descriptions.expected"); - test_fixture(transform_fixture, "field-descriptions.graphql", "build_schema/fixtures/field-descriptions.expected", input, expected); + test_fixture(transform_fixture, file!(), "field-descriptions.graphql", "build_schema/fixtures/field-descriptions.expected", input, expected).await; } -#[test] -fn interface_implements_interface() { +#[tokio::test] +async fn interface_implements_interface() { let input = include_str!("build_schema/fixtures/interface-implements-interface.graphql"); let expected = include_str!("build_schema/fixtures/interface-implements-interface.expected"); - test_fixture(transform_fixture, "interface-implements-interface.graphql", "build_schema/fixtures/interface-implements-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "interface-implements-interface.graphql", "build_schema/fixtures/interface-implements-interface.expected", input, expected).await; } -#[test] -fn invalid_duplicate_directive() { +#[tokio::test] +async fn invalid_duplicate_directive() { let input = include_str!("build_schema/fixtures/invalid-duplicate-directive.graphql"); let expected = include_str!("build_schema/fixtures/invalid-duplicate-directive.expected"); - test_fixture(transform_fixture, "invalid-duplicate-directive.graphql", "build_schema/fixtures/invalid-duplicate-directive.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-duplicate-directive.graphql", "build_schema/fixtures/invalid-duplicate-directive.expected", input, expected).await; } -#[test] -fn invalid_duplicate_query_operation() { +#[tokio::test] +async fn invalid_duplicate_query_operation() { let input = include_str!("build_schema/fixtures/invalid-duplicate-query-operation.graphql"); let expected = include_str!("build_schema/fixtures/invalid-duplicate-query-operation.expected"); - test_fixture(transform_fixture, "invalid-duplicate-query-operation.graphql", "build_schema/fixtures/invalid-duplicate-query-operation.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-duplicate-query-operation.graphql", "build_schema/fixtures/invalid-duplicate-query-operation.expected", input, expected).await; } -#[test] -fn invalid_extension_implements_noninterface() { +#[tokio::test] +async fn invalid_duplicate_type_name() { + let input = include_str!("build_schema/fixtures/invalid-duplicate-type-name.graphql"); + let expected = include_str!("build_schema/fixtures/invalid-duplicate-type-name.expected"); + test_fixture(transform_fixture, file!(), "invalid-duplicate-type-name.graphql", "build_schema/fixtures/invalid-duplicate-type-name.expected", input, expected).await; +} + +#[tokio::test] +async fn invalid_extension_implements_noninterface() { let input = include_str!("build_schema/fixtures/invalid-extension-implements-noninterface.graphql"); let expected = include_str!("build_schema/fixtures/invalid-extension-implements-noninterface.expected"); - test_fixture(transform_fixture, "invalid-extension-implements-noninterface.graphql", "build_schema/fixtures/invalid-extension-implements-noninterface.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-extension-implements-noninterface.graphql", "build_schema/fixtures/invalid-extension-implements-noninterface.expected", input, expected).await; } -#[test] -fn invalid_implements_non_interface() { +#[tokio::test] +async fn invalid_implements_non_interface() { let input = include_str!("build_schema/fixtures/invalid-implements-non-interface.graphql"); let expected = include_str!("build_schema/fixtures/invalid-implements-non-interface.expected"); - test_fixture(transform_fixture, "invalid-implements-non-interface.graphql", "build_schema/fixtures/invalid-implements-non-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-implements-non-interface.graphql", "build_schema/fixtures/invalid-implements-non-interface.expected", input, expected).await; } -#[test] -fn invalid_input_type() { +#[tokio::test] +async fn invalid_input_type() { let input = include_str!("build_schema/fixtures/invalid-input-type.graphql"); let expected = include_str!("build_schema/fixtures/invalid-input-type.expected"); - test_fixture(transform_fixture, "invalid-input-type.graphql", "build_schema/fixtures/invalid-input-type.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-input-type.graphql", "build_schema/fixtures/invalid-input-type.expected", input, expected).await; } -#[test] -fn invalid_input_type_interface() { +#[tokio::test] +async fn invalid_input_type_interface() { let input = include_str!("build_schema/fixtures/invalid-input-type-interface.graphql"); let expected = include_str!("build_schema/fixtures/invalid-input-type-interface.expected"); - test_fixture(transform_fixture, "invalid-input-type-interface.graphql", "build_schema/fixtures/invalid-input-type-interface.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-input-type-interface.graphql", "build_schema/fixtures/invalid-input-type-interface.expected", input, expected).await; } -#[test] -fn invalid_input_type_union() { +#[tokio::test] +async fn invalid_input_type_union() { let input = include_str!("build_schema/fixtures/invalid-input-type-union.graphql"); let expected = include_str!("build_schema/fixtures/invalid-input-type-union.expected"); - test_fixture(transform_fixture, "invalid-input-type-union.graphql", "build_schema/fixtures/invalid-input-type-union.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-input-type-union.graphql", "build_schema/fixtures/invalid-input-type-union.expected", input, expected).await; } -#[test] -fn invalid_interface_implements_noninterface() { +#[tokio::test] +async fn invalid_interface_implements_noninterface() { let input = include_str!("build_schema/fixtures/invalid-interface-implements-noninterface.graphql"); let expected = include_str!("build_schema/fixtures/invalid-interface-implements-noninterface.expected"); - test_fixture(transform_fixture, "invalid-interface-implements-noninterface.graphql", "build_schema/fixtures/invalid-interface-implements-noninterface.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-interface-implements-noninterface.graphql", "build_schema/fixtures/invalid-interface-implements-noninterface.expected", input, expected).await; } -#[test] -fn invalid_object_extension_duplicated_server_field() { +#[tokio::test] +async fn invalid_object_extension_duplicated_server_field() { let input = include_str!("build_schema/fixtures/invalid-object-extension-duplicated-server-field.graphql"); let expected = include_str!("build_schema/fixtures/invalid-object-extension-duplicated-server-field.expected"); - test_fixture(transform_fixture, "invalid-object-extension-duplicated-server-field.graphql", "build_schema/fixtures/invalid-object-extension-duplicated-server-field.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-object-extension-duplicated-server-field.graphql", "build_schema/fixtures/invalid-object-extension-duplicated-server-field.expected", input, expected).await; } -#[test] -fn invalid_object_extension_local_duplicated_fields() { +#[tokio::test] +async fn invalid_object_extension_local_duplicated_fields() { let input = include_str!("build_schema/fixtures/invalid-object-extension-local-duplicated-fields.graphql"); let expected = include_str!("build_schema/fixtures/invalid-object-extension-local-duplicated-fields.expected"); - test_fixture(transform_fixture, "invalid-object-extension-local-duplicated-fields.graphql", "build_schema/fixtures/invalid-object-extension-local-duplicated-fields.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-object-extension-local-duplicated-fields.graphql", "build_schema/fixtures/invalid-object-extension-local-duplicated-fields.expected", input, expected).await; } -#[test] -fn invalid_sdl() { +#[tokio::test] +async fn invalid_sdl() { let input = include_str!("build_schema/fixtures/invalid-sdl.graphql"); let expected = include_str!("build_schema/fixtures/invalid-sdl.expected"); - test_fixture(transform_fixture, "invalid-sdl.graphql", "build_schema/fixtures/invalid-sdl.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-sdl.graphql", "build_schema/fixtures/invalid-sdl.expected", input, expected).await; } -#[test] -fn invalid_type_reference() { +#[tokio::test] +async fn invalid_type_reference() { let input = include_str!("build_schema/fixtures/invalid-type-reference.graphql"); let expected = include_str!("build_schema/fixtures/invalid-type-reference.expected"); - test_fixture(transform_fixture, "invalid-type-reference.graphql", "build_schema/fixtures/invalid-type-reference.expected", input, expected); + test_fixture(transform_fixture, file!(), "invalid-type-reference.graphql", "build_schema/fixtures/invalid-type-reference.expected", input, expected).await; } -#[test] -fn kitchen_sink() { +#[tokio::test] +async fn kitchen_sink() { let input = include_str!("build_schema/fixtures/kitchen-sink.graphql"); let expected = include_str!("build_schema/fixtures/kitchen-sink.expected"); - test_fixture(transform_fixture, "kitchen-sink.graphql", "build_schema/fixtures/kitchen-sink.expected", input, expected); + test_fixture(transform_fixture, file!(), "kitchen-sink.graphql", "build_schema/fixtures/kitchen-sink.expected", input, expected).await; } diff --git a/compiler/crates/signedsource/Cargo.toml b/compiler/crates/signedsource/Cargo.toml index a764d128dde0d..4d68654fea935 100644 --- a/compiler/crates/signedsource/Cargo.toml +++ b/compiler/crates/signedsource/Cargo.toml @@ -1,13 +1,15 @@ # @generated by autocargo from //relay/oss/crates/signedsource:signedsource + [package] name = "signedsource" version = "0.0.0" authors = ["Facebook"] edition = "2021" +repository = "https://github.com/facebook/relay" license = "MIT" [dependencies] hex = "0.4.3" lazy_static = "1.4" md-5 = "0.10" -regex = "1.6.0" +regex = "1.9.2" diff --git a/compiler/fixture_dirs.txt b/compiler/fixture_dirs.txt index 0dde34c226124..a269aaeb5227c 100644 --- a/compiler/fixture_dirs.txt +++ b/compiler/fixture_dirs.txt @@ -7,6 +7,7 @@ crates/graphql-ir-validations/tests/validate_selection_conflict crates/graphql-ir/tests/parse_with_extensions crates/graphql-ir/tests/parse_with_provider crates/graphql-ir/tests/parse +crates/graphql-syntax/tests/advance_schema_document crates/graphql-syntax/tests/parse_document crates/graphql-syntax/tests/parse_document_with_features crates/graphql-syntax/tests/parse_executable_document_with_error_recovery @@ -18,33 +19,40 @@ crates/graphql-text-printer/tests/operation_printer crates/graphql-text-printer/tests/print_ast crates/graphql-text-printer/tests/print crates/relay-codegen/tests/aliased_fragments +crates/relay-codegen/tests/catch_directive_codegen crates/relay-codegen/tests/client_edges crates/relay-codegen/tests/client_extensions crates/relay-codegen/tests/client_extensions_abstract_types crates/relay-codegen/tests/connections crates/relay-codegen/tests/deduped_json_codegen crates/relay-codegen/tests/defer_stream +crates/relay-codegen/tests/throw_on_field_error_directive_codegen crates/relay-codegen/tests/json_codegen -crates/relay-codegen/tests/react_flight_codegen crates/relay-codegen/tests/relay_actor_change crates/relay-codegen/tests/request_metadata crates/relay-codegen/tests/skip_printing_nulls crates/relay-codegen/tests/required_directive_codegen crates/relay-compiler/tests/compile_relay_artifacts crates/relay-compiler/tests/compile_relay_artifacts_with_custom_id +crates/relay-compiler/tests/relay_compiler_integration crates/relay-docblock/tests/parse crates/relay-docblock/tests/to_schema crates/relay-lsp/tests/find_field_usages crates/relay-lsp/tests/hover +crates/relay-schema-generation/tests/docblock +crates/relay-schema-generation/tests/extract crates/relay-transforms/tests/apply_fragment_arguments crates/relay-transforms/tests/assignable_directive crates/relay-transforms/tests/assignable_fragment_spread +crates/relay-transforms/tests/catch_directive crates/relay-transforms/tests/client_edges crates/relay-transforms/tests/client_extensions crates/relay-transforms/tests/client_extensions_abstract_types crates/relay-transforms/tests/declarative_connection crates/relay-transforms/tests/defer_stream crates/relay-transforms/tests/disallow_non_node_id_fields +crates/relay-transforms/tests/disallow_readtime_features_in_mutations +crates/relay-transforms/tests/disallow_required_on_non_null_field crates/relay-transforms/tests/disallow_reserved_aliases crates/relay-transforms/tests/disallow_typename_on_root crates/relay-transforms/tests/flatten @@ -62,8 +70,8 @@ crates/relay-transforms/tests/match_transform_local crates/relay-transforms/tests/provided_variable_fragment_transform crates/relay-transforms/tests/refetchable_fragment crates/relay-transforms/tests/relay_actor_change -crates/relay-transforms/tests/relay_client_component crates/relay-transforms/tests/relay_resolvers +crates/relay-transforms/tests/relay_resolvers_abstract_types crates/relay-transforms/tests/relay_test_operation crates/relay-transforms/tests/required_directive crates/relay-transforms/tests/skip_client_extensions diff --git a/compiler/rustfmt.toml b/compiler/rustfmt.toml index b7258ed0a8d84..336df502642ed 100644 --- a/compiler/rustfmt.toml +++ b/compiler/rustfmt.toml @@ -1,6 +1,7 @@ # Get help on options with `rustfmt --help=config` # Please keep these in alphabetical order. edition = "2021" +format_code_in_doc_comments = true group_imports = "StdExternalCrate" imports_granularity = "Item" merge_derives = false diff --git a/flow-typed/npm/jest_v23.x.x.js b/flow-typed/npm/jest_v23.x.x.js deleted file mode 100644 index 3e202de7fe14e..0000000000000 --- a/flow-typed/npm/jest_v23.x.x.js +++ /dev/null @@ -1,1192 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @format - * @oncall relay - */ - -/* eslint-disable no-unused-vars */ - -'use strict'; - -// Modified from https://raw.githubusercontent.com/flow-typed/flow-typed/master/definitions/npm/jest_v23.x.x/flow_v0.39.x-v0.103.x/jest_v23.x.x.js -// List of modifications: -// - fix some [] -> Array lint warnings -// - make it.each/describe.each take $ReadOnlyArray instead of Array -// - added definition for `isolateModules` -// -// TODO(T35016336) remove the .each modifications if flow-typed adopts them - -type JestMockFn, TReturn> = {| - (...args: TArguments): TReturn, - /** - * An object for introspecting mock calls - */ - mock: { - /** - * An array that represents all calls that have been made into this mock - * function. Each call is represented by an array of arguments that were - * passed during the call. - */ - calls: Array, - /** - * An array that contains all the object instances that have been - * instantiated from this mock function. - */ - instances: Array, - /** - * An array that contains all the object results that have been - * returned by this mock function call - */ - results: Array<{isThrow: boolean, value: TReturn, ...}>, - ... - }, - /** - * Resets all information stored in the mockFn.mock.calls and - * mockFn.mock.instances arrays. Often this is useful when you want to clean - * up a mock's usage data between two assertions. - */ - mockClear(): void, - /** - * Resets all information stored in the mock. This is useful when you want to - * completely restore a mock back to its initial state. - */ - mockReset(): void, - /** - * Removes the mock and restores the initial implementation. This is useful - * when you want to mock functions in certain test cases and restore the - * original implementation in others. Beware that mockFn.mockRestore only - * works when mock was created with jest.spyOn. Thus you have to take care of - * restoration yourself when manually assigning jest.fn(). - */ - mockRestore(): void, - /** - * Accepts a function that should be used as the implementation of the mock. - * The mock itself will still record all calls that go into and instances - * that come from itself -- the only difference is that the implementation - * will also be executed when the mock is called. - */ - mockImplementation( - fn: (...args: TArguments) => TReturn, - ): JestMockFn, - /** - * Accepts a function that will be used as an implementation of the mock for - * one call to the mocked function. Can be chained so that multiple function - * calls produce different results. - */ - mockImplementationOnce( - fn: (...args: TArguments) => TReturn, - ): JestMockFn, - /** - * Accepts a string to use in test result output in place of "jest.fn()" to - * indicate which mock function is being referenced. - */ - mockName(name: string): JestMockFn, - /** - * Just a simple sugar function for returning `this` - */ - mockReturnThis(): void, - /** - * Accepts a value that will be returned whenever the mock function is called. - */ - mockReturnValue(value: TReturn): JestMockFn, - /** - * Sugar for only returning a value once inside your mock - */ - mockReturnValueOnce(value: TReturn): JestMockFn, - /** - * Sugar for jest.fn().mockImplementation(() => Promise.resolve(value)) - */ - mockResolvedValue(value: TReturn): JestMockFn>, - /** - * Sugar for jest.fn().mockImplementationOnce(() => Promise.resolve(value)) - */ - mockResolvedValueOnce( - value: TReturn, - ): JestMockFn>, - /** - * Sugar for jest.fn().mockImplementation(() => Promise.reject(value)) - */ - mockRejectedValue(value: TReturn): JestMockFn>, - /** - * Sugar for jest.fn().mockImplementationOnce(() => Promise.reject(value)) - */ - mockRejectedValueOnce(value: TReturn): JestMockFn>, -|}; - -type JestAsymmetricEqualityType = { - /** - * A custom Jasmine equality tester - */ - asymmetricMatch(value: mixed): boolean, - ... -}; - -type JestCallsType = { - allArgs(): mixed, - all(): mixed, - any(): boolean, - count(): number, - first(): mixed, - mostRecent(): mixed, - reset(): void, - ... -}; - -type JestClockType = { - install(): void, - mockDate(date: Date): void, - tick(milliseconds?: number): void, - uninstall(): void, - ... -}; - -type JestMatcherResult = { - message?: string | (() => string), - pass: boolean, - ... -}; - -type JestMatcher = ( - actual: any, - expected: any, -) => JestMatcherResult | Promise; - -type JestPromiseType = { - /** - * Use rejects to unwrap the reason of a rejected promise so any other - * matcher can be chained. If the promise is fulfilled the assertion fails. - */ - rejects: JestExpectType, - /** - * Use resolves to unwrap the value of a fulfilled promise so any other - * matcher can be chained. If the promise is rejected the assertion fails. - */ - resolves: JestExpectType, - ... -}; - -/** - * Jest allows functions and classes to be used as test names in test() and - * describe() - */ -type JestTestName = string | Function; - -/** - * Plugin: jest-styled-components - */ - -type JestStyledComponentsMatcherValue = - | string - | JestAsymmetricEqualityType - | RegExp - | typeof undefined; - -type JestStyledComponentsMatcherOptions = { - media?: string, - modifier?: string, - supports?: string, - ... -}; - -type JestStyledComponentsMatchersType = { - toHaveStyleRule( - property: string, - value: JestStyledComponentsMatcherValue, - options?: JestStyledComponentsMatcherOptions, - ): void, - ... -}; - -/** - * Plugin: jest-enzyme - */ -type EnzymeMatchersType = { - toBeChecked(): void, - toBeDisabled(): void, - toBeEmpty(): void, - toBeEmptyRender(): void, - toBePresent(): void, - toContainReact(element: React$Element): void, - toExist(): void, - toHaveClassName(className: string): void, - toHaveHTML(html: string): void, - toHaveProp: ((propKey: string, propValue?: any) => void) & - ((props: Object) => void), - toHaveRef(refName: string): void, - toHaveState: ((stateKey: string, stateValue?: any) => void) & - ((state: Object) => void), - toHaveStyle: ((styleKey: string, styleValue?: any) => void) & - ((style: Object) => void), - toHaveTagName(tagName: string): void, - toHaveText(text: string): void, - toIncludeText(text: string): void, - toHaveValue(value: any): void, - toMatchElement(element: React$Element): void, - toMatchSelector(selector: string): void, - ... -}; - -// DOM testing library extensions https://github.com/kentcdodds/dom-testing-library#custom-jest-matchers -type DomTestingLibraryType = { - toBeInTheDOM(): void, - toHaveTextContent(content: string): void, - toHaveAttribute(name: string, expectedValue?: string): void, - ... -}; - -// Jest JQuery Matchers: https://github.com/unindented/custom-jquery-matchers -type JestJQueryMatchersType = { - toExist(): void, - toHaveLength(len: number): void, - toHaveId(id: string): void, - toHaveClass(className: string): void, - toHaveTag(tag: string): void, - toHaveAttr(key: string, val?: any): void, - toHaveProp(key: string, val?: any): void, - toHaveText(text: string | RegExp): void, - toHaveData(key: string, val?: any): void, - toHaveValue(val: any): void, - toHaveCss(css: {[key: string]: any, ...}): void, - toBeChecked(): void, - toBeDisabled(): void, - toBeEmpty(): void, - toBeHidden(): void, - toBeSelected(): void, - toBeVisible(): void, - toBeFocused(): void, - toBeInDom(): void, - toBeMatchedBy(sel: string): void, - toHaveDescendant(sel: string): void, - toHaveDescendantWithText(sel: string, text: string | RegExp): void, - ... -}; - -// Jest Extended Matchers: https://github.com/jest-community/jest-extended -type JestExtendedMatchersType = { - /** - * Note: Currently unimplemented - * Passing assertion - * - * @param {String} message - */ - // pass(message: string): void; - - /** - * Note: Currently unimplemented - * Failing assertion - * - * @param {String} message - */ - // fail(message: string): void; - - /** - * Use .toBeEmpty when checking if a String '', Array [] or Object {} is empty. - */ - toBeEmpty(): void, - - /** - * Use .toBeOneOf when checking if a value is a member of a given Array. - * @param {Array.<*>} members - */ - toBeOneOf(members: Array): void, - - /** - * Use `.toBeNil` when checking a value is `null` or `undefined`. - */ - toBeNil(): void, - - /** - * Use `.toSatisfy` when you want to use a custom matcher by supplying a predicate function that returns a `Boolean`. - * @param {Function} predicate - */ - toSatisfy(predicate: (n: any) => boolean): void, - - /** - * Use `.toBeArray` when checking if a value is an `Array`. - */ - toBeArray(): void, - - /** - * Use `.toBeArrayOfSize` when checking if a value is an `Array` of size x. - * @param {Number} x - */ - toBeArrayOfSize(x: number): void, - - /** - * Use `.toIncludeAllMembers` when checking if an `Array` contains all of the same members of a given set. - * @param {Array.<*>} members - */ - toIncludeAllMembers(members: Array): void, - - /** - * Use `.toIncludeAnyMembers` when checking if an `Array` contains any of the members of a given set. - * @param {Array.<*>} members - */ - toIncludeAnyMembers(members: Array): void, - - /** - * Use `.toSatisfyAll` when you want to use a custom matcher by supplying a predicate function that returns a `Boolean` for all values in an array. - * @param {Function} predicate - */ - toSatisfyAll(predicate: (n: any) => boolean): void, - - /** - * Use `.toBeBoolean` when checking if a value is a `Boolean`. - */ - toBeBoolean(): void, - - /** - * Use `.toBeTrue` when checking a value is equal (===) to `true`. - */ - toBeTrue(): void, - - /** - * Use `.toBeFalse` when checking a value is equal (===) to `false`. - */ - toBeFalse(): void, - - /** - * Use .toBeDate when checking if a value is a Date. - */ - toBeDate(): void, - - /** - * Use `.toBeFunction` when checking if a value is a `Function`. - */ - toBeFunction(): void, - - /** - * Use `.toHaveBeenCalledBefore` when checking if a `Mock` was called before another `Mock`. - * - * Note: Required Jest version >22 - * Note: Your mock functions will have to be asynchronous to cause the timestamps inside of Jest to occur in a differentJS event loop, otherwise the mock timestamps will all be the same - * - * @param {Mock} mock - */ - toHaveBeenCalledBefore(mock: JestMockFn): void, - - /** - * Use `.toBeNumber` when checking if a value is a `Number`. - */ - toBeNumber(): void, - - /** - * Use `.toBeNaN` when checking a value is `NaN`. - */ - toBeNaN(): void, - - /** - * Use `.toBeFinite` when checking if a value is a `Number`, not `NaN` or `Infinity`. - */ - toBeFinite(): void, - - /** - * Use `.toBePositive` when checking if a value is a positive `Number`. - */ - toBePositive(): void, - - /** - * Use `.toBeNegative` when checking if a value is a negative `Number`. - */ - toBeNegative(): void, - - /** - * Use `.toBeEven` when checking if a value is an even `Number`. - */ - toBeEven(): void, - - /** - * Use `.toBeOdd` when checking if a value is an odd `Number`. - */ - toBeOdd(): void, - - /** - * Use `.toBeWithin` when checking if a number is in between the given bounds of: start (inclusive) and end (exclusive). - * - * @param {Number} start - * @param {Number} end - */ - toBeWithin(start: number, end: number): void, - - /** - * Use `.toBeObject` when checking if a value is an `Object`. - */ - toBeObject(): void, - - /** - * Use `.toContainKey` when checking if an object contains the provided key. - * - * @param {String} key - */ - toContainKey(key: string): void, - - /** - * Use `.toContainKeys` when checking if an object has all of the provided keys. - * - * @param {Array.} keys - */ - toContainKeys(keys: Array): void, - - /** - * Use `.toContainAllKeys` when checking if an object only contains all of the provided keys. - * - * @param {Array.} keys - */ - toContainAllKeys(keys: Array): void, - - /** - * Use `.toContainAnyKeys` when checking if an object contains at least one of the provided keys. - * - * @param {Array.} keys - */ - toContainAnyKeys(keys: Array): void, - - /** - * Use `.toContainValue` when checking if an object contains the provided value. - * - * @param {*} value - */ - toContainValue(value: any): void, - - /** - * Use `.toContainValues` when checking if an object contains all of the provided values. - * - * @param {Array.<*>} values - */ - toContainValues(values: Array): void, - - /** - * Use `.toContainAllValues` when checking if an object only contains all of the provided values. - * - * @param {Array.<*>} values - */ - toContainAllValues(values: Array): void, - - /** - * Use `.toContainAnyValues` when checking if an object contains at least one of the provided values. - * - * @param {Array.<*>} values - */ - toContainAnyValues(values: Array): void, - - /** - * Use `.toContainEntry` when checking if an object contains the provided entry. - * - * @param {Array.} entry - */ - toContainEntry(entry: [string, string]): void, - - /** - * Use `.toContainEntries` when checking if an object contains all of the provided entries. - * - * @param {Array.>} entries - */ - toContainEntries(entries: Array<[string, string]>): void, - - /** - * Use `.toContainAllEntries` when checking if an object only contains all of the provided entries. - * - * @param {Array.>} entries - */ - toContainAllEntries(entries: Array<[string, string]>): void, - - /** - * Use `.toContainAnyEntries` when checking if an object contains at least one of the provided entries. - * - * @param {Array.>} entries - */ - toContainAnyEntries(entries: Array<[string, string]>): void, - - /** - * Use `.toBeExtensible` when checking if an object is extensible. - */ - toBeExtensible(): void, - - /** - * Use `.toBeFrozen` when checking if an object is frozen. - */ - toBeFrozen(): void, - - /** - * Use `.toBeSealed` when checking if an object is sealed. - */ - toBeSealed(): void, - - /** - * Use `.toBeString` when checking if a value is a `String`. - */ - toBeString(): void, - - /** - * Use `.toEqualCaseInsensitive` when checking if a string is equal (===) to another ignoring the casing of both strings. - * - * @param {String} string - */ - toEqualCaseInsensitive(string: string): void, - - /** - * Use `.toStartWith` when checking if a `String` starts with a given `String` prefix. - * - * @param {String} prefix - */ - toStartWith(prefix: string): void, - - /** - * Use `.toEndWith` when checking if a `String` ends with a given `String` suffix. - * - * @param {String} suffix - */ - toEndWith(suffix: string): void, - - /** - * Use `.toInclude` when checking if a `String` includes the given `String` substring. - * - * @param {String} substring - */ - toInclude(substring: string): void, - - /** - * Use `.toIncludeRepeated` when checking if a `String` includes the given `String` substring the correct number of times. - * - * @param {String} substring - * @param {Number} times - */ - toIncludeRepeated(substring: string, times: number): void, - - /** - * Use `.toIncludeMultiple` when checking if a `String` includes all of the given substrings. - * - * @param {Array.} substring - */ - toIncludeMultiple(substring: Array): void, - ... -}; - -interface JestExpectType { - not: JestExpectType & - EnzymeMatchersType & - DomTestingLibraryType & - JestJQueryMatchersType & - JestStyledComponentsMatchersType & - JestExtendedMatchersType; - /** - * If you have a mock function, you can use .lastCalledWith to test what - * arguments it was last called with. - */ - lastCalledWith(...args: Array): void; - /** - * toBe just checks that a value is what you expect. It uses === to check - * strict equality. - */ - toBe(value: any): void; - /** - * Use .toBeCalledWith to ensure that a mock function was called with - * specific arguments. - */ - toBeCalledWith(...args: Array): void; - /** - * Using exact equality with floating point numbers is a bad idea. Rounding - * means that intuitive things fail. - */ - toBeCloseTo(num: number, delta: any): void; - /** - * Use .toBeDefined to check that a variable is not undefined. - */ - toBeDefined(): void; - /** - * Use .toBeFalsy when you don't care what a value is, you just want to - * ensure a value is false in a boolean context. - */ - toBeFalsy(): void; - /** - * To compare floating point numbers, you can use toBeGreaterThan. - */ - toBeGreaterThan(number: number): void; - /** - * To compare floating point numbers, you can use toBeGreaterThanOrEqual. - */ - toBeGreaterThanOrEqual(number: number): void; - /** - * To compare floating point numbers, you can use toBeLessThan. - */ - toBeLessThan(number: number): void; - /** - * To compare floating point numbers, you can use toBeLessThanOrEqual. - */ - toBeLessThanOrEqual(number: number): void; - /** - * Use .toBeInstanceOf(Class) to check that an object is an instance of a - * class. - */ - toBeInstanceOf(cls: Class<*>): void; - /** - * .toBeNull() is the same as .toBe(null) but the error messages are a bit - * nicer. - */ - toBeNull(): void; - /** - * Use .toBeTruthy when you don't care what a value is, you just want to - * ensure a value is true in a boolean context. - */ - toBeTruthy(): void; - /** - * Use .toBeUndefined to check that a variable is undefined. - */ - toBeUndefined(): void; - /** - * Use .toContain when you want to check that an item is in a list. For - * testing the items in the list, this uses ===, a strict equality check. - */ - toContain(item: any): void; - /** - * Use .toContainEqual when you want to check that an item is in a list. For - * testing the items in the list, this matcher recursively checks the - * equality of all fields, rather than checking for object identity. - */ - toContainEqual(item: any): void; - /** - * Use .toEqual when you want to check that two objects have the same value. - * This matcher recursively checks the equality of all fields, rather than - * checking for object identity. - */ - toEqual(value: any): void; - /** - * Use .toHaveBeenCalled to ensure that a mock function got called. - */ - toHaveBeenCalled(): void; - toBeCalled(): void; - /** - * Use .toHaveBeenCalledTimes to ensure that a mock function got called exact - * number of times. - */ - toHaveBeenCalledTimes(number: number): void; - toBeCalledTimes(number: number): void; - /** - * - */ - toHaveBeenNthCalledWith(nthCall: number, ...args: Array): void; - nthCalledWith(nthCall: number, ...args: Array): void; - /** - * - */ - toHaveReturned(): void; - toReturn(): void; - /** - * - */ - toHaveReturnedTimes(number: number): void; - toReturnTimes(number: number): void; - /** - * - */ - toHaveReturnedWith(value: any): void; - toReturnWith(value: any): void; - /** - * - */ - toHaveLastReturnedWith(value: any): void; - lastReturnedWith(value: any): void; - /** - * - */ - toHaveNthReturnedWith(nthCall: number, value: any): void; - nthReturnedWith(nthCall: number, value: any): void; - /** - * Use .toHaveBeenCalledWith to ensure that a mock function was called with - * specific arguments. - */ - toHaveBeenCalledWith(...args: Array): void; - /** - * Use .toHaveBeenLastCalledWith to ensure that a mock function was last called - * with specific arguments. - */ - toHaveBeenLastCalledWith(...args: Array): void; - /** - * Check that an object has a .length property and it is set to a certain - * numeric value. - */ - toHaveLength(number: number): void; - /** - * - */ - toHaveProperty(propPath: string, value?: any): void; - /** - * Use .toMatch to check that a string matches a regular expression or string. - */ - toMatch(regexpOrString: RegExp | string): void; - /** - * Use .toMatchObject to check that a javascript object matches a subset of the properties of an object. - */ - toMatchObject(object: Object | Array): void; - /** - * Use .toStrictEqual to check that a javascript object matches a subset of the properties of an object. - */ - toStrictEqual(value: any): void; - /** - * This ensures that an Object matches the most recent snapshot. - */ - toMatchSnapshot( - propertyMatchers?: {[key: string]: JestAsymmetricEqualityType, ...}, - name?: string, - ): void; - /** - * This ensures that an Object matches the most recent snapshot. - */ - toMatchSnapshot(name: string): void; - - toMatchInlineSnapshot(snapshot?: string): void; - toMatchInlineSnapshot( - propertyMatchers?: {[key: string]: JestAsymmetricEqualityType, ...}, - snapshot?: string, - ): void; - /** - * Use .toThrow to test that a function throws when it is called. - * If you want to test that a specific error gets thrown, you can provide an - * argument to toThrow. The argument can be a string for the error message, - * a class for the error, or a regex that should match the error. - * - * Alias: .toThrowError - */ - toThrow(message?: string | Error | Class | RegExp): void; - toThrowError(message?: string | Error | Class | RegExp): void; - /** - * Use .toThrowErrorMatchingSnapshot to test that a function throws a error - * matching the most recent snapshot when it is called. - */ - toThrowErrorMatchingSnapshot(): void; - toThrowErrorMatchingInlineSnapshot(snapshot?: string): void; -} - -type JestObjectType = { - /** - * Disables automatic mocking in the module loader. - * - * After this method is called, all `require()`s will return the real - * versions of each module (rather than a mocked version). - */ - disableAutomock(): JestObjectType, - /** - * An un-hoisted version of disableAutomock - */ - autoMockOff(): JestObjectType, - /** - * Enables automatic mocking in the module loader. - */ - enableAutomock(): JestObjectType, - /** - * An un-hoisted version of enableAutomock - */ - autoMockOn(): JestObjectType, - /** - * Clears the mock.calls and mock.instances properties of all mocks. - * Equivalent to calling .mockClear() on every mocked function. - */ - clearAllMocks(): JestObjectType, - /** - * Resets the state of all mocks. Equivalent to calling .mockReset() on every - * mocked function. - */ - resetAllMocks(): JestObjectType, - /** - * Restores all mocks back to their original value. - */ - restoreAllMocks(): JestObjectType, - /** - * Removes any pending timers from the timer system. - */ - clearAllTimers(): void, - /** - * The same as `mock` but not moved to the top of the expectation by - * babel-jest. - */ - doMock(moduleName: string, moduleFactory?: any): JestObjectType, - /** - * The same as `unmock` but not moved to the top of the expectation by - * babel-jest. - */ - dontMock(moduleName: string): JestObjectType, - /** - * Returns a new, unused mock function. Optionally takes a mock - * implementation. - */ - fn, TReturn>( - implementation?: (...args: TArguments) => TReturn, - ): JestMockFn, - /** - * Determines if the given function is a mocked function. - */ - isMockFunction(fn: Function): boolean, - /** - * Given the name of a module, use the automatic mocking system to generate a - * mocked version of the module for you. - */ - genMockFromModule(moduleName: string): any, - /** - * Mocks a module with an auto-mocked version when it is being required. - * - * The second argument can be used to specify an explicit module factory that - * is being run instead of using Jest's automocking feature. - * - * The third argument can be used to create virtual mocks -- mocks of modules - * that don't exist anywhere in the system. - */ - mock( - moduleName: string, - moduleFactory?: any, - options?: Object, - ): JestObjectType, - /** - * Returns the actual module instead of a mock, bypassing all checks on - * whether the module should receive a mock implementation or not. - */ - requireActual(m: $Flow$ModuleRef | string): T, - /** - * Returns a mock module instead of the actual module, bypassing all checks - * on whether the module should be required normally or not. - */ - requireMock(moduleName: string): any, - /** - * Resets the module registry - the cache of all required modules. This is - * useful to isolate modules where local state might conflict between tests. - */ - resetModules(): JestObjectType, - /** - * Exhausts the micro-task queue (usually interfaced in node via - * process.nextTick). - */ - runAllTicks(): void, - /** - * Exhausts the macro-task queue (i.e., all tasks queued by setTimeout(), - * setInterval(), and setImmediate()). - */ - runAllTimers(): void, - /** - * Exhausts all tasks queued by setImmediate(). - */ - runAllImmediates(): void, - /** - * Executes only the macro task queue (i.e. all tasks queued by setTimeout() - * or setInterval() and setImmediate()). - */ - advanceTimersByTime(msToRun: number): void, - /** - * Executes only the macro task queue (i.e. all tasks queued by setTimeout() - * or setInterval() and setImmediate()). - * - * Renamed to `advanceTimersByTime`. - */ - runTimersToTime(msToRun: number): void, - /** - * Executes only the macro-tasks that are currently pending (i.e., only the - * tasks that have been queued by setTimeout() or setInterval() up to this - * point) - */ - runOnlyPendingTimers(): void, - /** - * Explicitly supplies the mock object that the module system should return - * for the specified module. Note: It is recommended to use jest.mock() - * instead. - */ - setMock(moduleName: string, moduleExports: any): JestObjectType, - /** - * Indicates that the module system should never return a mocked version of - * the specified module from require() (e.g. that it should always return the - * real module). - */ - unmock(moduleName: string): JestObjectType, - /** - * Instructs Jest to use fake versions of the standard timer functions - * (setTimeout, setInterval, clearTimeout, clearInterval, nextTick, - * setImmediate and clearImmediate). - */ - useFakeTimers(type?: 'legacy' | 'modern'): JestObjectType, - /** - * Instructs Jest to use the real versions of the standard timer functions. - */ - useRealTimers(): JestObjectType, - /** - * Creates a mock function similar to jest.fn but also tracks calls to - * object[methodName]. - */ - spyOn( - object: Object, - methodName: string, - accessType?: 'get' | 'set', - ): JestMockFn, - /** - * Set the default timeout interval for tests and before/after hooks in milliseconds. - * Note: The default timeout interval is 5 seconds if this method is not called. - */ - setTimeout(timeout: number): JestObjectType, - ... -}; - -type JestSpyType = { - calls: JestCallsType, - ... -}; - -/** Runs this function after every test inside this context */ -declare function afterEach( - fn: (done: () => void) => ?Promise, - timeout?: number, -): void; -/** Runs this function before every test inside this context */ -declare function beforeEach( - fn: (done: () => void) => ?Promise, - timeout?: number, -): void; -/** Runs this function after all tests have finished inside this context */ -declare function afterAll( - fn: (done: () => void) => ?Promise, - timeout?: number, -): void; -/** Runs this function before any tests have started inside this context */ -declare function beforeAll( - fn: (done: () => void) => ?Promise, - timeout?: number, -): void; - -/** A context for grouping tests together */ -declare var describe: { - /** - * Creates a block that groups together several related tests in one "test suite" - */ - (name: JestTestName, fn: () => void): void, - - /** - * Only run this describe block - */ - only(name: JestTestName, fn: () => void): void, - - /** - * Skip running this describe block - */ - skip(name: JestTestName, fn: () => void): void, - - /** - * each runs this test against array of argument arrays per each run - * - * @param {table} table of Test - */ - each( - table: $ReadOnlyArray | mixed>, - ): ( - name: JestTestName, - fn?: (...args: Array) => ?Promise, - ) => void, - ... -}; - -/** An individual test unit */ -declare var it: { - /** - * An individual test unit - * - * @param {JestTestName} Name of Test - * @param {Function} Test - * @param {number} Timeout for the test, in milliseconds. - */ - ( - name: JestTestName, - fn?: (done: () => void) => ?Promise, - timeout?: number, - ): void, - /** - * each runs this test against array of argument arrays per each run - * - * @param {table} table of Test - */ - each( - table: $ReadOnlyArray | mixed>, - ): ( - name: JestTestName, - fn?: (...args: Array) => ?Promise, - ) => void, - /** - * Only run this test - * - * @param {JestTestName} Name of Test - * @param {Function} Test - * @param {number} Timeout for the test, in milliseconds. - */ - only( - name: JestTestName, - fn?: (done: () => void) => ?Promise, - timeout?: number, - ): { - each( - table: $ReadOnlyArray | mixed>, - ): ( - name: JestTestName, - fn?: (...args: Array) => ?Promise, - ) => void, - ... - }, - /** - * Skip running this test - * - * @param {JestTestName} Name of Test - * @param {Function} Test - * @param {number} Timeout for the test, in milliseconds. - */ - skip( - name: JestTestName, - fn?: (done: () => void) => ?Promise, - timeout?: number, - ): void, - /** - * Run the test concurrently - * - * @param {JestTestName} Name of Test - * @param {Function} Test - * @param {number} Timeout for the test, in milliseconds. - */ - concurrent( - name: JestTestName, - fn?: (done: () => void) => ?Promise, - timeout?: number, - ): void, - ... -}; -declare function fit( - name: JestTestName, - fn: (done: () => void) => ?Promise, - timeout?: number, -): void; -/** An individual test unit */ -declare var test: typeof it; -/** A disabled group of tests */ -declare var xdescribe: typeof describe; -/** A focused group of tests */ -declare var fdescribe: typeof describe; -/** A disabled individual test */ -declare var xit: typeof it; -/** A disabled individual test */ -declare var xtest: typeof it; - -type JestPrettyFormatColors = { - comment: {close: string, open: string, ...}, - content: {close: string, open: string, ...}, - prop: {close: string, open: string, ...}, - tag: {close: string, open: string, ...}, - value: {close: string, open: string, ...}, - ... -}; - -type JestPrettyFormatIndent = string => string; -type JestPrettyFormatPrint = any => string; - -type JestPrettyFormatOptions = {| - callToJSON: boolean, - edgeSpacing: string, - escapeRegex: boolean, - highlight: boolean, - indent: number, - maxDepth: number, - min: boolean, - plugins: JestPrettyFormatPlugins, - printFunctionName: boolean, - spacing: string, - theme: {| - comment: string, - content: string, - prop: string, - tag: string, - value: string, - |}, -|}; - -type JestPrettyFormatPlugin = { - print: ( - val: any, - serialize: JestPrettyFormatPrint, - indent: JestPrettyFormatIndent, - opts: JestPrettyFormatOptions, - colors: JestPrettyFormatColors, - ) => string, - test: any => boolean, - ... -}; - -type JestPrettyFormatPlugins = Array; - -/** The expect function is used every time you want to test a value */ -declare var expect: { - /** The object that you want to make assertions against */ - ( - value: any, - ): JestExpectType & - JestPromiseType & - EnzymeMatchersType & - DomTestingLibraryType & - JestJQueryMatchersType & - JestStyledComponentsMatchersType & - JestExtendedMatchersType, - - /** Add additional Jasmine matchers to Jest's roster */ - extend(matchers: {[name: string]: JestMatcher, ...}): void, - /** Add a module that formats application-specific data structures. */ - addSnapshotSerializer(pluginModule: JestPrettyFormatPlugin): void, - assertions(expectedAssertions: number): void, - hasAssertions(): void, - any(value: mixed): JestAsymmetricEqualityType, - anything(): any, - arrayContaining(value: $ReadOnlyArray): Array, - objectContaining(value: Object): Object, - /** Matches any received string that contains the exact expected string. */ - stringContaining(value: string): string, - stringMatching(value: string | RegExp): string, - not: { - arrayContaining: (value: $ReadOnlyArray) => Array, - objectContaining: (value: {...}) => Object, - stringContaining: (value: string) => string, - stringMatching: (value: string | RegExp) => string, - ... - }, - ... -}; - -// TODO handle return type -// http://jasmine.github.io/2.4/introduction.html#section-Spies -declare function spyOn(value: mixed, method: string): Object; - -/** Holds all functions related to manipulating test runner */ -declare var jest: JestObjectType; - -/** - * https://jasmine.github.io/2.4/custom_reporter.html - */ -type JasmineReporter = { - jasmineStarted?: (suiteInfo: mixed) => void, - suiteStarted?: (result: mixed) => void, - specStarted?: (result: mixed) => void, - specDone?: (result: mixed) => void, - suiteDone?: (result: mixed) => void, - ... -}; - -/** - * The global Jasmine object, this is generally not exposed as the public API, - * using features inside here could break in later versions of Jest. - */ -declare var jasmine: { - DEFAULT_TIMEOUT_INTERVAL: number, - any(value: mixed): JestAsymmetricEqualityType, - anything(): any, - arrayContaining(value: $ReadOnlyArray): Array, - clock(): JestClockType, - createSpy(name: string): JestSpyType, - createSpyObj( - baseName: string, - methodNames: Array, - ): {[methodName: string]: JestSpyType, ...}, - getEnv(): {addReporter: (jasmineReporter: JasmineReporter) => void, ...}, - objectContaining(value: Object): Object, - stringMatching(value: string): string, - ... -}; diff --git a/flow-typed/npm/jest_v29.x.x.js b/flow-typed/npm/jest_v29.x.x.js new file mode 100644 index 0000000000000..da59fea96cf77 --- /dev/null +++ b/flow-typed/npm/jest_v29.x.x.js @@ -0,0 +1,1282 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow + * @format + * @oncall relay + */ + +// Copied from https://github.com/facebook/react-native/blob/93ce46113cf216f831448b1343caefb59f46e313/packages/react-native/flow/jest.js + +// Modified from https://raw.githubusercontent.com/flow-typed/flow-typed/master/definitions/npm/jest_v29.x.x/flow_v0.134.x-/jest_v29.x.x.js +// Modifications are explained inline by comments beginning with `// MODIFIED`. + +// MODIFIED: Added ESLint suppression comment - no-unused-vars doesn't understand declaration files +/* eslint-disable no-unused-vars */ + +type JestMockFn, TReturn> = { + (...args: TArguments): TReturn, + /** + * An object for introspecting mock calls + */ + mock: { + /** + * An array that represents all calls that have been made into this mock + * function. Each call is represented by an array of arguments that were + * passed during the call. + */ + calls: Array, + /** + * An array containing the call arguments of the last call that was made + * to this mock function. If the function was not called, it will return + * undefined. + */ + lastCall: TArguments, + /** + * An array that contains all the object instances that have been + * instantiated from this mock function. + */ + instances: Array, + /** + * An array that contains all the object results that have been + * returned by this mock function call + */ + results: Array<{ + isThrow: boolean, + value: TReturn, + ... + }>, + ... + }, + /** + * Resets all information stored in the mockFn.mock.calls and + * mockFn.mock.instances arrays. Often this is useful when you want to clean + * up a mock's usage data between two assertions. + */ + mockClear(): void, + /** + * Resets all information stored in the mock. This is useful when you want to + * completely restore a mock back to its initial state. + */ + mockReset(): void, + /** + * Removes the mock and restores the initial implementation. This is useful + * when you want to mock functions in certain test cases and restore the + * original implementation in others. Beware that mockFn.mockRestore only + * works when mock was created with jest.spyOn. Thus you have to take care of + * restoration yourself when manually assigning jest.fn(). + */ + mockRestore(): void, + /** + * Accepts a function that should be used as the implementation of the mock. + * The mock itself will still record all calls that go into and instances + * that come from itself -- the only difference is that the implementation + * will also be executed when the mock is called. + */ + mockImplementation( + fn: (...args: TArguments) => TReturn, + ): JestMockFn, + /** + * Accepts a function that will be used as an implementation of the mock for + * one call to the mocked function. Can be chained so that multiple function + * calls produce different results. + */ + mockImplementationOnce( + fn: (...args: TArguments) => TReturn, + ): JestMockFn, + /** + * Accepts a string to use in test result output in place of "jest.fn()" to + * indicate which mock function is being referenced. + */ + mockName(name: string): JestMockFn, + /** + * Just a simple sugar function for returning `this` + */ + mockReturnThis(): void, + /** + * Accepts a value that will be returned whenever the mock function is called. + */ + mockReturnValue(value: TReturn): JestMockFn, + /** + * Sugar for only returning a value once inside your mock + */ + mockReturnValueOnce(value: TReturn): JestMockFn, + /** + * Sugar for jest.fn().mockImplementation(() => Promise.resolve(value)) + */ + mockResolvedValue(value: TReturn): JestMockFn>, + /** + * Sugar for jest.fn().mockImplementationOnce(() => Promise.resolve(value)) + */ + mockResolvedValueOnce( + value: TReturn, + ): JestMockFn>, + /** + * Sugar for jest.fn().mockImplementation(() => Promise.reject(value)) + */ + mockRejectedValue(value: TReturn): JestMockFn>, + /** + * Sugar for jest.fn().mockImplementationOnce(() => Promise.reject(value)) + */ + mockRejectedValueOnce(value: TReturn): JestMockFn>, + ... +}; + +type JestAsymmetricEqualityType = { + /** + * A custom Jasmine equality tester + */ + asymmetricMatch(value: mixed): boolean, + ... +}; + +type JestCallsType = { + allArgs(): mixed, + all(): mixed, + any(): boolean, + count(): number, + first(): mixed, + mostRecent(): mixed, + reset(): void, + ... +}; + +type JestClockType = { + install(): void, + mockDate(date: Date): void, + tick(milliseconds?: number): void, + uninstall(): void, + ... +}; + +type JestMatcherResult = { + message?: string | (() => string), + pass: boolean, + ... +}; + +type JestMatcher = ( + received: any, + ...actual: Array +) => JestMatcherResult | Promise; + +type JestPromiseType = { + /** + * Use rejects to unwrap the reason of a rejected promise so any other + * matcher can be chained. If the promise is fulfilled the assertion fails. + */ + rejects: JestExpectType, + /** + * Use resolves to unwrap the value of a fulfilled promise so any other + * matcher can be chained. If the promise is rejected the assertion fails. + */ + resolves: JestExpectType, + ... +}; + +/** + * Jest allows functions and classes to be used as test names in test() and + * describe() + */ +type JestTestName = string | Function; + +type FakeableAPI = + | 'Date' + | 'hrtime' + | 'nextTick' + | 'performance' + | 'queueMicrotask' + | 'requestAnimationFrame' + | 'cancelAnimationFrame' + | 'requestIdleCallback' + | 'cancelIdleCallback' + | 'setImmediate' + | 'clearImmediate' + | 'setInterval' + | 'clearInterval' + | 'setTimeout' + | 'clearTimeout'; + +type FakeTimersConfig = { + advanceTimers?: boolean | number, + doNotFake?: Array, + now?: number | Date, + timerLimit?: number, + legacyFakeTimers?: boolean, + ... +}; + +/** + * Plugin: jest-styled-components + */ + +type JestStyledComponentsMatcherValue = + | string + | JestAsymmetricEqualityType + | RegExp + | typeof undefined; + +type JestStyledComponentsMatcherOptions = { + media?: string, + modifier?: string, + supports?: string, + ... +}; + +type JestStyledComponentsMatchersType = { + toHaveStyleRule( + property: string, + value: JestStyledComponentsMatcherValue, + options?: JestStyledComponentsMatcherOptions, + ): void, + ... +}; + +/** + * Plugin: jest-enzyme + */ +type EnzymeMatchersType = { + // 5.x + toBeEmpty(): void, + toBePresent(): void, + // 6.x + toBeChecked(): void, + toBeDisabled(): void, + toBeEmptyRender(): void, + toContainMatchingElement(selector: string): void, + toContainMatchingElements(n: number, selector: string): void, + toContainExactlyOneMatchingElement(selector: string): void, + toContainReact(element: React$Element): void, + toExist(): void, + toHaveClassName(className: string): void, + toHaveHTML(html: string): void, + toHaveProp: ((propKey: string, propValue?: any) => void) & + ((props: {...}) => void), + toHaveRef(refName: string): void, + toHaveState: ((stateKey: string, stateValue?: any) => void) & + ((state: {...}) => void), + toHaveStyle: ((styleKey: string, styleValue?: any) => void) & + ((style: {...}) => void), + toHaveTagName(tagName: string): void, + toHaveText(text: string): void, + toHaveValue(value: any): void, + toIncludeText(text: string): void, + toMatchElement( + element: React$Element, + options?: {|ignoreProps?: boolean, verbose?: boolean|}, + ): void, + toMatchSelector(selector: string): void, + // 7.x + toHaveDisplayName(name: string): void, + ... +}; + +// DOM testing library extensions (jest-dom) +// https://github.com/testing-library/jest-dom +type DomTestingLibraryType = { + /** + * @deprecated + */ + toBeInTheDOM(container?: HTMLElement): void, + + // 4.x + toBeInTheDocument(): void, + toBeVisible(): void, + toBeEmpty(): void, + toBeDisabled(): void, + toBeEnabled(): void, + toBeInvalid(): void, + toBeRequired(): void, + toBeValid(): void, + toContainElement(element: HTMLElement | null): void, + toContainHTML(htmlText: string): void, + toHaveAttribute(attr: string, value?: any): void, + toHaveClass(...classNames: string[]): void, + toHaveFocus(): void, + toHaveFormValues(expectedValues: {[name: string]: any, ...}): void, + toHaveStyle(css: string | {[name: string]: any, ...}): void, + toHaveTextContent( + text: string | RegExp, + options?: {|normalizeWhitespace: boolean|}, + ): void, + toHaveValue(value?: string | string[] | number): void, + + // 5.x + toHaveDisplayValue(value: string | string[]): void, + toBeChecked(): void, + toBeEmptyDOMElement(): void, + toBePartiallyChecked(): void, + toHaveDescription(text: string | RegExp): void, + ... +}; + +// Jest JQuery Matchers: https://github.com/unindented/custom-jquery-matchers +type JestJQueryMatchersType = { + toExist(): void, + toHaveLength(len: number): void, + toHaveId(id: string): void, + toHaveClass(className: string): void, + toHaveTag(tag: string): void, + toHaveAttr(key: string, val?: any): void, + toHaveProp(key: string, val?: any): void, + toHaveText(text: string | RegExp): void, + toHaveData(key: string, val?: any): void, + toHaveValue(val: any): void, + toHaveCss(css: {[key: string]: any, ...}): void, + toBeChecked(): void, + toBeDisabled(): void, + toBeEmpty(): void, + toBeHidden(): void, + toBeSelected(): void, + toBeVisible(): void, + toBeFocused(): void, + toBeInDom(): void, + toBeMatchedBy(sel: string): void, + toHaveDescendant(sel: string): void, + toHaveDescendantWithText(sel: string, text: string | RegExp): void, + ... +}; + +// Jest Extended Matchers: https://github.com/jest-community/jest-extended +type JestExtendedMatchersType = { + /** + * Note: Currently unimplemented + * Passing assertion + * + * @param {String} message + */ + // pass(message: string): void; + + /** + * Note: Currently unimplemented + * Failing assertion + * + * @param {String} message + */ + // fail(message: string): void; + + /** + * Use .toBeEmpty when checking if a String '', Array [] or Object {} is empty. + */ + toBeEmpty(): void, + /** + * Use .toBeOneOf when checking if a value is a member of a given Array. + * @param {Array.<*>} members + */ + toBeOneOf(members: any[]): void, + /** + * Use `.toBeNil` when checking a value is `null` or `undefined`. + */ + toBeNil(): void, + /** + * Use `.toSatisfy` when you want to use a custom matcher by supplying a predicate function that returns a `Boolean`. + * @param {Function} predicate + */ + toSatisfy(predicate: (n: any) => boolean): void, + /** + * Use `.toBeArray` when checking if a value is an `Array`. + */ + toBeArray(): void, + /** + * Use `.toBeArrayOfSize` when checking if a value is an `Array` of size x. + * @param {Number} x + */ + toBeArrayOfSize(x: number): void, + /** + * Use `.toIncludeAllMembers` when checking if an `Array` contains all of the same members of a given set. + * @param {Array.<*>} members + */ + toIncludeAllMembers(members: any[]): void, + /** + * Use `.toIncludeAnyMembers` when checking if an `Array` contains any of the members of a given set. + * @param {Array.<*>} members + */ + toIncludeAnyMembers(members: any[]): void, + /** + * Use `.toSatisfyAll` when you want to use a custom matcher by supplying a predicate function that returns a `Boolean` for all values in an array. + * @param {Function} predicate + */ + toSatisfyAll(predicate: (n: any) => boolean): void, + /** + * Use `.toBeBoolean` when checking if a value is a `Boolean`. + */ + toBeBoolean(): void, + /** + * Use `.toBeTrue` when checking a value is equal (===) to `true`. + */ + toBeTrue(): void, + /** + * Use `.toBeFalse` when checking a value is equal (===) to `false`. + */ + toBeFalse(): void, + /** + * Use .toBeDate when checking if a value is a Date. + */ + toBeDate(): void, + /** + * Use `.toBeFunction` when checking if a value is a `Function`. + */ + toBeFunction(): void, + /** + * Use `.toHaveBeenCalledBefore` when checking if a `Mock` was called before another `Mock`. + * + * Note: Required Jest version >22 + * Note: Your mock functions will have to be asynchronous to cause the timestamps inside of Jest to occur in a differentJS event loop, otherwise the mock timestamps will all be the same + * + * @param {Mock} mock + */ + toHaveBeenCalledBefore(mock: JestMockFn): void, + /** + * Use `.toBeNumber` when checking if a value is a `Number`. + */ + toBeNumber(): void, + /** + * Use `.toBeNaN` when checking a value is `NaN`. + */ + toBeNaN(): void, + /** + * Use `.toBeFinite` when checking if a value is a `Number`, not `NaN` or `Infinity`. + */ + toBeFinite(): void, + /** + * Use `.toBePositive` when checking if a value is a positive `Number`. + */ + toBePositive(): void, + /** + * Use `.toBeNegative` when checking if a value is a negative `Number`. + */ + toBeNegative(): void, + /** + * Use `.toBeEven` when checking if a value is an even `Number`. + */ + toBeEven(): void, + /** + * Use `.toBeOdd` when checking if a value is an odd `Number`. + */ + toBeOdd(): void, + /** + * Use `.toBeWithin` when checking if a number is in between the given bounds of: start (inclusive) and end (exclusive). + * + * @param {Number} start + * @param {Number} end + */ + toBeWithin(start: number, end: number): void, + /** + * Use `.toBeObject` when checking if a value is an `Object`. + */ + toBeObject(): void, + /** + * Use `.toContainKey` when checking if an object contains the provided key. + * + * @param {String} key + */ + toContainKey(key: string): void, + /** + * Use `.toContainKeys` when checking if an object has all of the provided keys. + * + * @param {Array.} keys + */ + toContainKeys(keys: string[]): void, + /** + * Use `.toContainAllKeys` when checking if an object only contains all of the provided keys. + * + * @param {Array.} keys + */ + toContainAllKeys(keys: string[]): void, + /** + * Use `.toContainAnyKeys` when checking if an object contains at least one of the provided keys. + * + * @param {Array.} keys + */ + toContainAnyKeys(keys: string[]): void, + /** + * Use `.toContainValue` when checking if an object contains the provided value. + * + * @param {*} value + */ + toContainValue(value: any): void, + /** + * Use `.toContainValues` when checking if an object contains all of the provided values. + * + * @param {Array.<*>} values + */ + toContainValues(values: any[]): void, + /** + * Use `.toContainAllValues` when checking if an object only contains all of the provided values. + * + * @param {Array.<*>} values + */ + toContainAllValues(values: any[]): void, + /** + * Use `.toContainAnyValues` when checking if an object contains at least one of the provided values. + * + * @param {Array.<*>} values + */ + toContainAnyValues(values: any[]): void, + /** + * Use `.toContainEntry` when checking if an object contains the provided entry. + * + * @param {Array.} entry + */ + toContainEntry(entry: [string, string]): void, + /** + * Use `.toContainEntries` when checking if an object contains all of the provided entries. + * + * @param {Array.>} entries + */ + toContainEntries(entries: [string, string][]): void, + /** + * Use `.toContainAllEntries` when checking if an object only contains all of the provided entries. + * + * @param {Array.>} entries + */ + toContainAllEntries(entries: [string, string][]): void, + /** + * Use `.toContainAnyEntries` when checking if an object contains at least one of the provided entries. + * + * @param {Array.>} entries + */ + toContainAnyEntries(entries: [string, string][]): void, + /** + * Use `.toBeExtensible` when checking if an object is extensible. + */ + toBeExtensible(): void, + /** + * Use `.toBeFrozen` when checking if an object is frozen. + */ + toBeFrozen(): void, + /** + * Use `.toBeSealed` when checking if an object is sealed. + */ + toBeSealed(): void, + /** + * Use `.toBeString` when checking if a value is a `String`. + */ + toBeString(): void, + /** + * Use `.toEqualCaseInsensitive` when checking if a string is equal (===) to another ignoring the casing of both strings. + * + * @param {String} string + */ + toEqualCaseInsensitive(string: string): void, + /** + * Use `.toStartWith` when checking if a `String` starts with a given `String` prefix. + * + * @param {String} prefix + */ + toStartWith(prefix: string): void, + /** + * Use `.toEndWith` when checking if a `String` ends with a given `String` suffix. + * + * @param {String} suffix + */ + toEndWith(suffix: string): void, + /** + * Use `.toInclude` when checking if a `String` includes the given `String` substring. + * + * @param {String} substring + */ + toInclude(substring: string): void, + /** + * Use `.toIncludeRepeated` when checking if a `String` includes the given `String` substring the correct number of times. + * + * @param {String} substring + * @param {Number} times + */ + toIncludeRepeated(substring: string, times: number): void, + /** + * Use `.toIncludeMultiple` when checking if a `String` includes all of the given substrings. + * + * @param {Array.} substring + */ + toIncludeMultiple(substring: string[]): void, + ... +}; + +// Diffing snapshot utility for Jest (snapshot-diff) +// https://github.com/jest-community/snapshot-diff +type SnapshotDiffType = { + /** + * Compare the difference between the actual in the `expect()` + * vs the object inside `valueB` with some extra options. + */ + toMatchDiffSnapshot( + valueB: any, + options?: {| + expand?: boolean, + colors?: boolean, + contextLines?: number, + stablePatchmarks?: boolean, + aAnnotation?: string, + bAnnotation?: string, + |}, + testName?: string, + ): void, + ... +}; + +interface JestExpectType { + not: JestExpectType & + EnzymeMatchersType & + DomTestingLibraryType & + JestJQueryMatchersType & + JestStyledComponentsMatchersType & + JestExtendedMatchersType & + SnapshotDiffType; + /** + * If you have a mock function, you can use .lastCalledWith to test what + * arguments it was last called with. + */ + lastCalledWith(...args: Array): void; + /** + * toBe just checks that a value is what you expect. It uses === to check + * strict equality. + */ + toBe(value: any): void; + /** + * Use .toBeCalledWith to ensure that a mock function was called with + * specific arguments. + */ + toBeCalledWith(...args: Array): void; + /** + * Using exact equality with floating point numbers is a bad idea. Rounding + * means that intuitive things fail. + */ + toBeCloseTo(num: number, delta: any): void; + /** + * Use .toBeDefined to check that a variable is not undefined. + */ + toBeDefined(): void; + /** + * Use .toBeFalsy when you don't care what a value is, you just want to + * ensure a value is false in a boolean context. + */ + toBeFalsy(): void; + /** + * To compare floating point numbers, you can use toBeGreaterThan. + */ + toBeGreaterThan(number: number): void; + /** + * To compare floating point numbers, you can use toBeGreaterThanOrEqual. + */ + toBeGreaterThanOrEqual(number: number): void; + /** + * To compare floating point numbers, you can use toBeLessThan. + */ + toBeLessThan(number: number): void; + /** + * To compare floating point numbers, you can use toBeLessThanOrEqual. + */ + toBeLessThanOrEqual(number: number): void; + /** + * Use .toBeInstanceOf(Class) to check that an object is an instance of a + * class. + */ + toBeInstanceOf(cls: Class): void; + /** + * .toBeNull() is the same as .toBe(null) but the error messages are a bit + * nicer. + */ + toBeNull(): void; + /** + * Use .toBeTruthy when you don't care what a value is, you just want to + * ensure a value is true in a boolean context. + */ + toBeTruthy(): void; + /** + * Use .toBeUndefined to check that a variable is undefined. + */ + toBeUndefined(): void; + /** + * Use .toContain when you want to check that an item is in a list. For + * testing the items in the list, this uses ===, a strict equality check. + */ + toContain(item: any): void; + /** + * Use .toContainEqual when you want to check that an item is in a list. For + * testing the items in the list, this matcher recursively checks the + * equality of all fields, rather than checking for object identity. + */ + toContainEqual(item: any): void; + /** + * Use .toEqual when you want to check that two objects have the same value. + * This matcher recursively checks the equality of all fields, rather than + * checking for object identity. + */ + toEqual(value: any): void; + /** + * Use .toHaveBeenCalled to ensure that a mock function got called. + */ + toHaveBeenCalled(): void; + toBeCalled(): void; + /** + * Use .toHaveBeenCalledTimes to ensure that a mock function got called exact + * number of times. + */ + toHaveBeenCalledTimes(number: number): void; + toBeCalledTimes(number: number): void; + /** + * + */ + toHaveBeenNthCalledWith(nthCall: number, ...args: Array): void; + nthCalledWith(nthCall: number, ...args: Array): void; + /** + * + */ + toHaveReturned(): void; + toReturn(): void; + /** + * + */ + toHaveReturnedTimes(number: number): void; + toReturnTimes(number: number): void; + /** + * + */ + toHaveReturnedWith(value: any): void; + toReturnWith(value: any): void; + /** + * + */ + toHaveLastReturnedWith(value: any): void; + lastReturnedWith(value: any): void; + /** + * + */ + toHaveNthReturnedWith(nthCall: number, value: any): void; + nthReturnedWith(nthCall: number, value: any): void; + /** + * Use .toHaveBeenCalledWith to ensure that a mock function was called with + * specific arguments. + */ + toHaveBeenCalledWith(...args: Array): void; + toBeCalledWith(...args: Array): void; + /** + * Use .toHaveBeenLastCalledWith to ensure that a mock function was last called + * with specific arguments. + */ + toHaveBeenLastCalledWith(...args: Array): void; + lastCalledWith(...args: Array): void; + /** + * Check that an object has a .length property and it is set to a certain + * numeric value. + */ + toHaveLength(number: number): void; + /** + * + */ + toHaveProperty(propPath: string | $ReadOnlyArray, value?: any): void; + /** + * Use .toMatch to check that a string matches a regular expression or string. + */ + toMatch(regexpOrString: RegExp | string): void; + /** + * Use .toMatchObject to check that a javascript object matches a subset of the properties of an object. + */ + toMatchObject(object: Object | Array): void; + /** + * Use .toStrictEqual to check that a javascript object matches a subset of the properties of an object. + */ + toStrictEqual(value: any): void; + /** + * This ensures that an Object matches the most recent snapshot. + */ + toMatchSnapshot(propertyMatchers?: any, name?: string): void; + /** + * This ensures that an Object matches the most recent snapshot. + */ + toMatchSnapshot(name: string): void; + + toMatchInlineSnapshot(snapshot?: string): void; + toMatchInlineSnapshot(propertyMatchers?: any, snapshot?: string): void; + /** + * Use .toThrow to test that a function throws when it is called. + * If you want to test that a specific error gets thrown, you can provide an + * argument to toThrow. The argument can be a string for the error message, + * a class for the error, or a regex that should match the error. + * + * Alias: .toThrowError + */ + toThrow(message?: string | Error | Class | RegExp): void; + toThrowError(message?: string | Error | Class | RegExp): void; + /** + * Use .toThrowErrorMatchingSnapshot to test that a function throws a error + * matching the most recent snapshot when it is called. + */ + toThrowErrorMatchingSnapshot(): void; + toThrowErrorMatchingInlineSnapshot(snapshot?: string): void; +} + +type JestObjectType = { + /** + * Disables automatic mocking in the module loader. + * + * After this method is called, all `require()`s will return the real + * versions of each module (rather than a mocked version). + */ + disableAutomock(): JestObjectType, + /** + * An un-hoisted version of disableAutomock + */ + autoMockOff(): JestObjectType, + /** + * Enables automatic mocking in the module loader. + */ + enableAutomock(): JestObjectType, + /** + * An un-hoisted version of enableAutomock + */ + autoMockOn(): JestObjectType, + /** + * Clears the mock.calls and mock.instances properties of all mocks. + * Equivalent to calling .mockClear() on every mocked function. + */ + clearAllMocks(): JestObjectType, + /** + * Resets the state of all mocks. Equivalent to calling .mockReset() on every + * mocked function. + */ + resetAllMocks(): JestObjectType, + /** + * Restores all mocks back to their original value. + */ + restoreAllMocks(): JestObjectType, + /** + * Removes any pending timers from the timer system. + */ + clearAllTimers(): void, + /** + * Returns the number of fake timers still left to run. + */ + getTimerCount(): number, + /** + * Set the current system time used by fake timers. + * Simulates a user changing the system clock while your program is running. + * It affects the current time but it does not in itself cause + * e.g. timers to fire; they will fire exactly as they would have done + * without the call to jest.setSystemTime(). + */ + setSystemTime(now?: number | Date): void, + /** + * The same as `mock` but not moved to the top of the expectation by + * babel-jest. + */ + doMock(moduleName: string, moduleFactory?: any): JestObjectType, + /** + * The same as `unmock` but not moved to the top of the expectation by + * babel-jest. + */ + dontMock(moduleName: string): JestObjectType, + /** + * Returns a new, unused mock function. Optionally takes a mock + * implementation. + */ + // MODIFIED: Added defaults to type arguments. + fn = $ReadOnlyArray, TReturn = any>( + implementation?: (...args: TArguments) => TReturn, + ): JestMockFn, + /** + * Determines if the given function is a mocked function. + */ + isMockFunction(fn: Function): boolean, + /** + * Alias of `createMockFromModule`. + */ + genMockFromModule(moduleName: string): any, + /** + * Given the name of a module, use the automatic mocking system to generate a + * mocked version of the module for you. + */ + createMockFromModule(moduleName: string): any, + /** + * Mocks a module with an auto-mocked version when it is being required. + * + * The second argument can be used to specify an explicit module factory that + * is being run instead of using Jest's automocking feature. + * + * The third argument can be used to create virtual mocks -- mocks of modules + * that don't exist anywhere in the system. + */ + mock( + moduleName: string, + moduleFactory?: any, + options?: Object, + ): JestObjectType, + /** + * Returns the actual module instead of a mock, bypassing all checks on + * whether the module should receive a mock implementation or not. + */ + requireActual(m: $Flow$ModuleRef | string): T, + /** + * Returns a mock module instead of the actual module, bypassing all checks + * on whether the module should be required normally or not. + */ + requireMock(moduleName: string): any, + /** + * Resets the module registry - the cache of all required modules. This is + * useful to isolate modules where local state might conflict between tests. + */ + resetModules(): JestObjectType, + /** + * Creates a sandbox registry for the modules that are loaded inside the + * callback function. This is useful to isolate specific modules for every + * test so that local module state doesn't conflict between tests. + */ + isolateModules(fn: () => void): JestObjectType, + /** + * Exhausts the micro-task queue (usually interfaced in node via + * process.nextTick). + */ + runAllTicks(): void, + /** + * Exhausts the macro-task queue (i.e., all tasks queued by setTimeout(), + * setInterval(), and setImmediate()). + */ + runAllTimers(): void, + /** + * Exhausts all tasks queued by setImmediate(). + */ + runAllImmediates(): void, + /** + * Executes only the macro task queue (i.e. all tasks queued by setTimeout() + * or setInterval() and setImmediate()). + */ + advanceTimersByTime(msToRun: number): void, + /** + * Executes only the macro-tasks that are currently pending (i.e., only the + * tasks that have been queued by setTimeout() or setInterval() up to this + * point) + */ + runOnlyPendingTimers(): void, + /** + * Explicitly supplies the mock object that the module system should return + * for the specified module. Note: It is recommended to use jest.mock() + * instead. + */ + setMock(moduleName: string, moduleExports: any): JestObjectType, + /** + * Indicates that the module system should never return a mocked version of + * the specified module from require() (e.g. that it should always return the + * real module). + */ + unmock(moduleName: string): JestObjectType, + /** + * Instructs Jest to use fake versions of the standard timer functions + * (setTimeout, setInterval, clearTimeout, clearInterval, nextTick, + * setImmediate and clearImmediate). + */ + useFakeTimers(fakeTimersConfig?: FakeTimersConfig): JestObjectType, + /** + * Instructs Jest to use the real versions of the standard timer functions. + */ + useRealTimers(): JestObjectType, + /** + * Creates a mock function similar to jest.fn but also tracks calls to + * object[methodName]. + */ + spyOn( + object: Object, + methodName: string, + accessType?: 'get' | 'set', + ): JestMockFn, + /** + * Set the default timeout interval for tests and before/after hooks in milliseconds. + * Note: The default timeout interval is 5 seconds if this method is not called. + */ + setTimeout(timeout: number): JestObjectType, + ... +}; + +type JestSpyType = {calls: JestCallsType, ...}; + +type JestDoneFn = {| + (error?: Error): void, + fail: (error: Error) => void, +|}; + +/** Runs this function after every test inside this context */ +declare function afterEach( + fn: (done: JestDoneFn) => ?Promise, + timeout?: number, +): void; +/** Runs this function before every test inside this context */ +declare function beforeEach( + fn: (done: JestDoneFn) => ?Promise, + timeout?: number, +): void; +/** Runs this function after all tests have finished inside this context */ +declare function afterAll( + fn: (done: JestDoneFn) => ?Promise, + timeout?: number, +): void; +/** Runs this function before any tests have started inside this context */ +declare function beforeAll( + fn: (done: JestDoneFn) => ?Promise, + timeout?: number, +): void; + +/** A context for grouping tests together */ +declare var describe: { + /** + * Creates a block that groups together several related tests in one "test suite" + */ + (name: JestTestName, fn: () => void): void, + /** + * Only run this describe block + */ + only(name: JestTestName, fn: () => void): void, + /** + * Skip running this describe block + */ + skip(name: JestTestName, fn: () => void): void, + /** + * each runs this test against array of argument arrays per each run + * + * @param {table} table of Test + */ + each( + ...table: Array | mixed> | [Array, string] + ): ( + name: JestTestName, + fn?: (...args: Array) => ?Promise, + timeout?: number, + ) => void, + ... +}; + +/** An individual test unit */ +declare var it: { + /** + * An individual test unit + * + * @param {JestTestName} Name of Test + * @param {Function} Test + * @param {number} Timeout for the test, in milliseconds. + */ + ( + name: JestTestName, + fn?: (done: JestDoneFn) => ?Promise, + timeout?: number, + ): void, + /** + * Only run this test + * + * @param {JestTestName} Name of Test + * @param {Function} Test + * @param {number} Timeout for the test, in milliseconds. + */ + only: {| + ( + name: JestTestName, + fn?: (done: JestDoneFn) => ?Promise, + timeout?: number, + ): void, + each( + ...table: Array | mixed> | [Array, string] + ): ( + name: JestTestName, + fn?: (...args: Array) => ?Promise, + timeout?: number, + ) => void, + |}, + /** + * Skip running this test + * + * @param {JestTestName} Name of Test + * @param {Function} Test + * @param {number} Timeout for the test, in milliseconds. + */ + skip: {| + ( + name: JestTestName, + fn?: (done: JestDoneFn) => ?Promise, + timeout?: number, + ): void, + each( + ...table: Array | mixed> | [Array, string] + ): ( + name: JestTestName, + fn?: (...args: Array) => ?Promise, + timeout?: number, + ) => void, + |}, + /** + * Highlight planned tests in the summary output + * + * @param {String} Name of Test to do + */ + todo(name: string): void, + /** + * Run the test concurrently + * + * @param {JestTestName} Name of Test + * @param {Function} Test + * @param {number} Timeout for the test, in milliseconds. + */ + concurrent( + name: JestTestName, + fn?: (done: JestDoneFn) => ?Promise, + timeout?: number, + ): void, + /** + * each runs this test against array of argument arrays per each run + * + * @param {table} table of Test + */ + each( + ...table: Array | mixed> | [Array, string] + ): ( + name: JestTestName, + fn?: (...args: Array) => ?Promise, + timeout?: number, + ) => void, + ... +}; + +declare function fit( + name: JestTestName, + fn: (done: JestDoneFn) => ?Promise, + timeout?: number, +): void; +/** An individual test unit */ +declare var test: typeof it; +/** A disabled group of tests */ +declare var xdescribe: typeof describe; +/** A focused group of tests */ +declare var fdescribe: typeof describe; +/** A disabled individual test */ +declare var xit: typeof it; +/** A disabled individual test */ +declare var xtest: typeof it; + +type JestPrettyFormatColors = { + comment: { + close: string, + open: string, + ... + }, + content: { + close: string, + open: string, + ... + }, + prop: { + close: string, + open: string, + ... + }, + tag: { + close: string, + open: string, + ... + }, + value: { + close: string, + open: string, + ... + }, + ... +}; + +type JestPrettyFormatIndent = string => string; +type JestPrettyFormatPrint = any => string; +type JestPrettyFormatOptions = {| + callToJSON: boolean, + edgeSpacing: string, + escapeRegex: boolean, + highlight: boolean, + indent: number, + maxDepth: number, + min: boolean, + plugins: JestPrettyFormatPlugins, + printFunctionName: boolean, + spacing: string, + theme: {| + comment: string, + content: string, + prop: string, + tag: string, + value: string, + |}, +|}; + +type JestPrettyFormatPlugin = { + print: ( + val: any, + serialize: JestPrettyFormatPrint, + indent: JestPrettyFormatIndent, + opts: JestPrettyFormatOptions, + colors: JestPrettyFormatColors, + ) => string, + test: any => boolean, + ... +}; + +type JestPrettyFormatPlugins = Array; + +/** The expect function is used every time you want to test a value */ +declare var expect: { + /** The object that you want to make assertions against */ + ( + value: any, + ): JestExpectType & + JestPromiseType & + EnzymeMatchersType & + DomTestingLibraryType & + JestJQueryMatchersType & + JestStyledComponentsMatchersType & + JestExtendedMatchersType & + SnapshotDiffType, + /** Add additional Jasmine matchers to Jest's roster */ + extend(matchers: {[name: string]: JestMatcher, ...}): void, + /** Add a module that formats application-specific data structures. */ + addSnapshotSerializer(pluginModule: JestPrettyFormatPlugin): void, + assertions(expectedAssertions: number): void, + hasAssertions(): void, + any(value: mixed): JestAsymmetricEqualityType, + anything(): any, + // MODIFIED: Array -> $ReadOnlyArray + arrayContaining(value: $ReadOnlyArray): Array, + objectContaining(value: Object): Object, + /** Matches any received string that contains the exact expected string. */ + stringContaining(value: string): string, + stringMatching(value: string | RegExp): string, + not: { + arrayContaining: (value: $ReadOnlyArray) => Array, + objectContaining: (value: {...}) => Object, + stringContaining: (value: string) => string, + stringMatching: (value: string | RegExp) => string, + ... + }, + ... +}; + +// TODO handle return type +// http://jasmine.github.io/2.4/introduction.html#section-Spies +declare function spyOn(value: mixed, method: string): Object; + +/** Holds all functions related to manipulating test runner */ +declare var jest: JestObjectType; + +/** + * The global Jasmine object, this is generally not exposed as the public API, + * using features inside here could break in later versions of Jest. + */ +declare var jasmine: { + DEFAULT_TIMEOUT_INTERVAL: number, + any(value: mixed): JestAsymmetricEqualityType, + anything(): any, + arrayContaining(value: Array): Array, + clock(): JestClockType, + createSpy(name: string): JestSpyType, + createSpyObj( + baseName: string, + methodNames: Array, + ): {[methodName: string]: JestSpyType, ...}, + objectContaining(value: Object): Object, + stringMatching(value: string): string, + ... +}; diff --git a/flow-typed/react.js b/flow-typed/react.js index 61592c0caadfb..c57aba2360fac 100644 --- a/flow-typed/react.js +++ b/flow-typed/react.js @@ -16,7 +16,7 @@ // the types of useTransition, startTranstion, and useDeferredValue used only in test. declare module react { declare export var DOM: any; - declare export var PropTypes: ReactPropTypes; + declare export var PropTypes: any; declare export var version: string; declare export function checkPropTypes( @@ -27,24 +27,19 @@ declare module react { getStack: ?() => ?string, ): void; - declare export var createClass: React$CreateClass; + declare export var createClass: $FlowFixMe; declare export function createContext( defaultValue: T, calculateChangedBits: ?(a: T, b: T) => number, ): React$Context; declare export var createElement: React$CreateElement; declare export var cloneElement: React$CloneElement; - declare export function createFactory( - type: ElementType, - ): React$ElementFactory; declare export function createRef(): {|current: null | T|}; declare export function isValidElement(element: any): boolean; declare export var Component: typeof React$Component; declare export var PureComponent: typeof React$PureComponent; - declare export type StatelessFunctionalComponent

= - React$StatelessFunctionalComponent

; declare export type ComponentType<-P> = React$ComponentType

; declare export type AbstractComponent< -Config, @@ -57,8 +52,6 @@ declare module react { declare export type Key = React$Key; declare export type Ref = React$Ref; declare export type Node = React$Node; - declare export type TransportObject = React$TransportObject; - declare export type TransportValue = React$TransportValue; declare export type Context = React$Context; declare export type Portal = React$Portal; declare export var ConcurrentMode: ({ @@ -107,12 +100,12 @@ declare module react { ): React$AbstractComponent; declare export function memo( - component: React$AbstractComponent, + component_: React$AbstractComponent, equal?: (Config, Config) => boolean, ): React$AbstractComponent; declare export function lazy( - component: () => Promise<{ + component_: () => Promise<{ default: React$AbstractComponent, ... }>, @@ -223,7 +216,6 @@ declare module react { +createContext: typeof createContext, +createElement: typeof createElement, +cloneElement: typeof cloneElement, - +createFactory: typeof createFactory, +createRef: typeof createRef, +forwardRef: typeof forwardRef, +isValidElement: typeof isValidElement, diff --git a/gulpfile.js b/gulpfile.js index 6e72dbd2dcf49..86e4aef8152ad 100644 --- a/gulpfile.js +++ b/gulpfile.js @@ -13,6 +13,7 @@ const babelOptions = require('./scripts/getBabelOptions')({ ast: false, plugins: [ + 'babel-plugin-syntax-hermes-parser', '@babel/plugin-transform-flow-strip-types', [ '@babel/plugin-transform-runtime', @@ -69,25 +70,17 @@ const buildDist = function (filename, opts, isProduction) { const webpackOpts = { externals: [/^[-/a-zA-Z0-9]+$/, /^@babel\/.+$/], target: opts.target, - node: { - fs: 'empty', - net: 'empty', - path: 'empty', - child_process: 'empty', - util: 'empty', - }, output: { filename: filename, libraryTarget: opts.libraryTarget, library: opts.libraryName, }, plugins: [ - new webpackStream.webpack.DefinePlugin({ + new webpack.DefinePlugin({ 'process.env.NODE_ENV': JSON.stringify( isProduction ? 'production' : 'development', ), }), - new webpackStream.webpack.optimize.OccurrenceOrderPlugin(), ], }; if (isProduction && !opts.noMinify) { diff --git a/package.json b/package.json index 52b34405bab21..7d5bef7c2a92c 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "relay-github-root", "description": "A framework for building data-driven React applications.", - "version": "15.0.0", + "version": "17.0.0", "license": "MIT", "homepage": "https://relay.dev", "bugs": "https://github.com/facebook/relay/issues", @@ -20,12 +20,10 @@ "test-dependencies": "node ./scripts/testDependencies.js", "typecheck": "flow check" }, - "workspaces": [ - "vscode-extension" - ], "dependencies": { "@babel/core": "^7.18.6", "@babel/generator": "^7.18.6", + "@babel/helper-check-duplicate-nodes": "^7.18.6", "@babel/parser": "^7.18.6", "@babel/plugin-proposal-nullish-coalescing-operator": "^7.0.0", "@babel/plugin-proposal-optional-catch-binding": "^7.0.0", @@ -38,15 +36,16 @@ "@babel/runtime": "^7.0.0", "@babel/traverse": "^7.14.0", "@babel/types": "^7.0.0", - "@jest/create-cache-key-function": "^26.6.2", + "@jest/create-cache-key-function": "^29.7.0", "babel-eslint": "^10.1.0", "babel-plugin-macros": "^2.0.0", + "babel-plugin-syntax-hermes-parser": "0.22.0", "babel-plugin-tester": "^6.0.1", "babel-preset-fbjs": "^3.4.0", "cosmiconfig": "^5.0.5", "cross-env": "^7.0.2", "del": "6.0.0", - "eslint": "^8.19.0", + "eslint": "^8.57.0", "eslint-config-fbjs": "4.0.0", "eslint-plugin-babel": "5.3.1", "eslint-plugin-ft-flow": "2.0.1", @@ -59,7 +58,7 @@ "eslint-plugin-relay": "1.8.3", "eslint-plugin-relay-internal": "link:./packages/eslint-plugin-relay-internal", "fbjs": "^3.0.2", - "flow-bin": "^0.201.0", + "flow-bin": "^0.238.0", "glob": "^7.1.1", "graphql": "15.3.0", "gulp": "4.0.2", @@ -68,17 +67,21 @@ "gulp-header": "2.0.9", "gulp-rename": "^2.0.0", "gulp-util": "3.0.8", - "hermes-eslint": "0.8.0", + "hermes-eslint": "0.22.0", "invariant": "^2.2.4", - "jest": "^26.6.3", + "jest": "^29.7.0", "nullthrows": "^1.1.1", - "prettier": "2.7.1", + "prettier": "2.8.8", + "prettier-plugin-hermes-parser": "0.22.0", "promise-polyfill": "6.1.0", - "react": "17.0.2", - "react-refresh": "^0.9.0", - "react-test-renderer": "17.0.2", - "webpack": "^4.30.0", - "webpack-stream": "^5.1.1" + "react": "18.3.1", + "react-refresh": "^0.14.0", + "react-test-renderer": "18.3.1", + "webpack": "^5.89.0", + "webpack-stream": "^7.0.0" + }, + "resolutions": { + "glob-watcher": "^6.0.0" }, "private": true, "devEngines": { @@ -92,10 +95,12 @@ "requirePragma": true, "singleQuote": true, "trailingComma": "all", - "parser": "flow" + "parser": "hermes" }, "jest": { - "testRegex": "/__tests__/.*-test\\.js$", + "testMatch": [ + "/packages/**/__tests__/**/*-test.js" + ], "modulePathIgnorePatterns": [ "/lib/", "/node_modules/(?!(fbjs/lib/|react/lib/))", @@ -109,14 +114,21 @@ "setupFiles": [ "/scripts/jest/environment.js" ], - "timers": "fake", + "fakeTimers": { + "enableGlobally": true, + "legacyFakeTimers": true + }, "transform": { ".*": "/scripts/jest/preprocessor.js" }, "transformIgnorePatterns": [ "/node_modules/" ], - "testEnvironment": "node" + "testEnvironment": "node", + "snapshotFormat": { + "escapeString": true, + "printBasicPrototype": true + } }, "devDependencies": {} } diff --git a/packages/babel-plugin-relay/__tests__/BabelPluginRelay-path-test.js b/packages/babel-plugin-relay/__tests__/BabelPluginRelay-path-test.js new file mode 100644 index 0000000000000..96836554754c3 --- /dev/null +++ b/packages/babel-plugin-relay/__tests__/BabelPluginRelay-path-test.js @@ -0,0 +1,46 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + * @oncall relay + */ + +'use strict'; + +describe('`development` option', () => { + function transformOnPlatform(platform: string) { + jest.resetModules(); + + Object.defineProperty(process, 'platform', { + value: platform, + }); + + jest.doMock('path', () => { + if (platform === 'win32') { + return jest.requireActual('path').win32; + } else { + return jest.requireActual('path').posix; + } + }); + + const transformerWithOptions = require('./transformerWithOptions'); + + return transformerWithOptions( + { + artifactDirectory: '/test/artifacts', + }, + 'development', + )('graphql`fragment TestFrag on Node { id }`'); + } + + it('tests the handling of file path', () => { + const codeOnPosix = transformOnPlatform('linux'); + const codeOnNonPosix = transformOnPlatform('win32'); + + expect(codeOnNonPosix).toEqual(codeOnPosix); + expect(codeOnPosix).toMatchSnapshot(); + }); +}); diff --git a/packages/babel-plugin-relay/__tests__/__snapshots__/BabelPluginRelay-path-test.js.snap b/packages/babel-plugin-relay/__tests__/__snapshots__/BabelPluginRelay-path-test.js.snap new file mode 100644 index 0000000000000..3e5b8dc2ba838 --- /dev/null +++ b/packages/babel-plugin-relay/__tests__/__snapshots__/BabelPluginRelay-path-test.js.snap @@ -0,0 +1,15 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`\`development\` option tests the handling of file path 1`] = ` +"var _TestFrag; +_TestFrag !== void 0 + ? _TestFrag + : ((_TestFrag = require('./test/artifacts/TestFrag.graphql')), + _TestFrag.hash && + _TestFrag.hash !== '0bb6b7b29bc3e910921551c4ff5b6757' && + console.error( + \\"The definition of 'TestFrag' appears to have changed. Run \`relay-compiler\` to update the generated files to receive the expected data.\\", + ), + _TestFrag); +" +`; diff --git a/packages/babel-plugin-relay/__tests__/transformerWithOptions.js b/packages/babel-plugin-relay/__tests__/transformerWithOptions.js index 7948ebd84ceca..5222f66062ed6 100644 --- a/packages/babel-plugin-relay/__tests__/transformerWithOptions.js +++ b/packages/babel-plugin-relay/__tests__/transformerWithOptions.js @@ -12,6 +12,8 @@ const BabelPluginRelay = require('../BabelPluginRelay'); const babel = require('@babel/core'); +const checkDuplicatedNodes = + require('@babel/helper-check-duplicate-nodes').default; const prettier = require('prettier'); function transformerWithOptions( @@ -23,14 +25,16 @@ function transformerWithOptions( const previousEnv = process.env.BABEL_ENV; try { process.env.BABEL_ENV = environment; - const code = babel.transform(text, { + const {code, ast} = babel.transformSync(text, { compact: false, cwd: '/', filename: filename || providedFileName || 'test.js', highlightCode: false, parserOpts: {plugins: ['jsx']}, plugins: [[BabelPluginRelay, options]], - }).code; + ast: true, + }); + checkDuplicatedNodes(ast); return prettier.format(code, { bracketSameLine: true, bracketSpacing: false, diff --git a/packages/babel-plugin-relay/compileGraphQLTag.js b/packages/babel-plugin-relay/compileGraphQLTag.js index f3586e7dd92b0..785f2e29e6c93 100644 --- a/packages/babel-plugin-relay/compileGraphQLTag.js +++ b/packages/babel-plugin-relay/compileGraphQLTag.js @@ -29,6 +29,14 @@ const { const GENERATED = './__generated__/'; +/** + * Converts backslashes in a path to forward slashes (POSIX style) for + * cross-platform compatibility. + */ +function posixifyPath(path: string): string { + return process.platform === 'win32' ? path.replace(/\\/g, '/') : path; +} + /** * Given a graphql`` tagged template literal, replace it with the appropriate * runtime artifact. @@ -107,11 +115,13 @@ function createNode( throw new Error('GraphQL operations and fragments must contain names'); } const requiredFile = definitionName + '.graphql'; - const requiredPath = options.isHasteMode - ? requiredFile - : options.artifactDirectory - ? getRelativeImportPath(state, options.artifactDirectory, requiredFile) - : GENERATED + requiredFile; + const requiredPath = posixifyPath( + options.isHasteMode + ? requiredFile + : options.artifactDirectory + ? getRelativeImportPath(state, options.artifactDirectory, requiredFile) + : GENERATED + requiredFile, + ); const hash = crypto .createHash('md5') @@ -125,11 +135,11 @@ function createNode( const id = topScope.generateUidIdentifier(definitionName); - const expHash = t.MemberExpression(id, t.Identifier('hash')); + const expHash = t.MemberExpression(t.cloneNode(id), t.Identifier('hash')); const expWarn = warnNeedsRebuild(t, definitionName, options.buildCommand); const expWarnIfOutdated = t.LogicalExpression( '&&', - expHash, + t.cloneNode(expHash), t.LogicalExpression( '&&', t.BinaryExpression('!==', expHash, t.StringLiteral(hash)), @@ -145,34 +155,41 @@ function createNode( const program = path.findParent(parent => parent.isProgram()); program.unshiftContainer('body', importDeclaration); - const expAssignAndCheck = t.SequenceExpression([expWarnIfOutdated, id]); + const expAssignAndCheck = t.SequenceExpression([ + expWarnIfOutdated, + t.cloneNode(id), + ]); let expAssign; if (options.isDevVariable != null) { expAssign = t.ConditionalExpression( t.Identifier(options.isDevVariable), expAssignAndCheck, - id, + t.cloneNode(id), ); } else if (options.isDevelopment) { expAssign = expAssignAndCheck; } else { - expAssign = id; + expAssign = t.cloneNode(id); } path.replaceWith(expAssign); } else { - topScope.push({id}); + topScope.push({id: t.cloneNode(id)}); const requireGraphQLModule = t.CallExpression(t.Identifier('require'), [ t.StringLiteral(requiredPath), ]); - const expAssignProd = t.AssignmentExpression('=', id, requireGraphQLModule); + const expAssignProd = t.AssignmentExpression( + '=', + t.cloneNode(id), + requireGraphQLModule, + ); const expAssignAndCheck = t.SequenceExpression([ expAssignProd, expWarnIfOutdated, - id, + t.cloneNode(id), ]); let expAssign; @@ -191,9 +208,9 @@ function createNode( const expVoid0 = t.UnaryExpression('void', t.NumericLiteral(0)); path.replaceWith( t.ConditionalExpression( - t.BinaryExpression('!==', id, expVoid0), - id, - expAssign, + t.BinaryExpression('!==', t.cloneNode(id), expVoid0), + t.cloneNode(id), + t.cloneNode(expAssign), ), ); } diff --git a/packages/babel-plugin-relay/package.json b/packages/babel-plugin-relay/package.json index dc01015165f2f..31a69efdc7ace 100644 --- a/packages/babel-plugin-relay/package.json +++ b/packages/babel-plugin-relay/package.json @@ -1,7 +1,7 @@ { "name": "babel-plugin-relay", "description": "A Babel Plugin for use with Relay applications.", - "version": "15.0.0", + "version": "17.0.0", "keywords": [ "graphql", "relay", @@ -23,6 +23,7 @@ }, "devDependencies": { "@babel/core": "^7.20.0", - "prettier": "2.7.1" + "prettier": "2.8.8", + "prettier-plugin-hermes-parser": "0.22.0" } } diff --git a/packages/babel-plugin-relay/yarn.lock b/packages/babel-plugin-relay/yarn.lock index 37f143462e467..da144111b161d 100644 --- a/packages/babel-plugin-relay/yarn.lock +++ b/packages/babel-plugin-relay/yarn.lock @@ -305,9 +305,9 @@ cosmiconfig@^5.0.5: parse-json "^4.0.0" debug@^4.1.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.2.0.tgz#7f150f93920e94c58f5574c2fd01a3110effe7f1" - integrity sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg== + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== dependencies: ms "2.1.2" @@ -358,6 +358,18 @@ has-flag@^3.0.0: resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= +hermes-estree@0.16.0: + version "0.16.0" + resolved "https://registry.yarnpkg.com/hermes-estree/-/hermes-estree-0.16.0.tgz#e2c76a1e9d5a4d620790b9fe05fb01f2d53da07d" + integrity sha512-XCoTuBU8S+Jg8nFzaqgy6pNEYo0WYkbMmuJldb3svzpJ2SNUYJDg28b1ltoDMo7k3YlJwPRg7ZS3JTWV3DkDZA== + +hermes-parser@0.16.0: + version "0.16.0" + resolved "https://registry.yarnpkg.com/hermes-parser/-/hermes-parser-0.16.0.tgz#92d0a34ff4f9b7ffcb04511dfed0cc19df5038e0" + integrity sha512-tdJJntb45DUpv8j7ybHfq8NfIQgz8AgaD+PVFyfjK+O+v2N5zbsSDtlvQN2uxCghoTkQL86BEs9oi8IPrUE9Pg== + dependencies: + hermes-estree "0.16.0" + is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" @@ -431,10 +443,19 @@ picocolors@^1.0.0: resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== -prettier@2.7.1: - version "2.7.1" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.7.1.tgz#e235806850d057f97bb08368a4f7d899f7760c64" - integrity sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g== +prettier-plugin-hermes-parser@0.16.0: + version "0.16.0" + resolved "https://registry.yarnpkg.com/prettier-plugin-hermes-parser/-/prettier-plugin-hermes-parser-0.16.0.tgz#4393d43a4a6f4ed976493dccbbb93f62c44215c3" + integrity sha512-J4HdSmlxf3a0nVHVi0G6JJJ7sDVtSb5a+QR52LpiQonpQzMkqgIqyCg+Gt1sGMTJqn19Z0yTHxwCmUicVYXUVg== + dependencies: + hermes-estree "0.16.0" + hermes-parser "0.16.0" + prettier-plugin-hermes-parser "0.16.0" + +prettier@2.8.8: + version "2.8.8" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da" + integrity sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q== resolve@^1.8.1: version "1.8.1" diff --git a/packages/eslint-plugin-relay-internal/lib/rules/sort-imports.js b/packages/eslint-plugin-relay-internal/lib/rules/sort-imports.js index 0c3c2f50e080e..6f52515b75afc 100644 --- a/packages/eslint-plugin-relay-internal/lib/rules/sort-imports.js +++ b/packages/eslint-plugin-relay-internal/lib/rules/sort-imports.js @@ -6,7 +6,7 @@ * * To regenerate this file, please run this command on Meta's monorepo: * @codegen-command : xplat/js/tools/sort-imports/scripts/build.sh - * @generated SignedSource<<7b94a136bcb0a75c0ed8a3e4fea96e2b>> + * @generated SignedSource<<165e00cb24cdaa9e47306c8384d4355f>> * @nolint */ -"use strict";function e(e){return e&&"object"==typeof e&&"default"in e?e:{default:e}}function t(e){var t=e.default;if("function"==typeof t){var n=function(){return t.apply(this,arguments)};n.prototype=t.prototype}else n={};return Object.defineProperty(n,"__esModule",{value:!0}),Object.keys(e).forEach((function(t){var r=Object.getOwnPropertyDescriptor(e,t);Object.defineProperty(n,t,r.get?r:{enumerable:!0,get:function(){return e[t]}})})),n}const n=e(require("path")).default,r=__dirname.split(n.sep).includes("xplat");function i(e){if(("Literal"===e.type||"JSXText"===e.type)&&"string"==typeof e.value)return e.value;if("BinaryExpression"===e.type&&"+"===e.operator){const t=i(e.left),n=i(e.right);if(null!=t&&null!=n)return t+n}return null}function o(e){if("Identifier"===e.type)return e.name;if("ThisExpression"===e.type)return"this";if("MemberExpression"===e.type){const t=o(e.object),n=e.computed?i(e.property):o(e.property);if(null!=t&&null!=n)return t+"."+n}else if("TypeCastExpression"===e.type)return o(e.expression);return null}function a(e){return e.callee?o(e.callee):null}function l(e,t,n){const r=e.getSourceCode().getText(),i=function(e,t){if(t.line<1)throw new RangeError("Line number "+t.line+" is before the start of file");const n=/\r\n|\r|\n|\u2028|\u2029/g;let r={index:0};for(let i=1;i=e.length)throw new RangeError("computed offset "+t+" is past the end of file");const n=/\r\n|\r|\n|\u2028|\u2029/g;let r,i={index:0},o=0;do{r=i,i=n.exec(e),++o}while(i&&i.index"string"==typeof e&&e.charCodeAt(0)>=97,c=e=>"string"==typeof e&&e.charCodeAt(0)<=90;function p(e){return null!=e&&"VariableDeclaration"===e.type&&"Program"===e.parent.type&&1===e.declarations.length&&null!=e.declarations[0].init&&m(e.declarations[0].init)}function f(e){return null!=e&&"VariableDeclarator"===e.type&&"VariableDeclaration"===e.parent.type&&"Program"===e.parent.parent.type&&1===e.parent.declarations.length&&m(e.init)}function m(e){return null!=e&&(d(e)||y(e)||h(e))}function g(e){return null!=e&&"CallExpression"===e.type&&e.callee&&"Identifier"===e.callee.type&&"requireNUX"===e.callee.name&&2===e.arguments.length&&"Literal"===e.arguments[0].type}function d(e){return"CallExpression"===e.type&&e.callee&&"Identifier"===e.callee.type&&"require"===e.callee.name&&1===e.arguments.length&&"Literal"===e.arguments[0].type}function y(e){return"CallExpression"===e.type&&"Identifier"===e.callee.type&&"requireDeferred"===e.callee.name&&1===e.arguments.length&&"Literal"===e.arguments[0].type&&"string"==typeof e.arguments[0].value}function h(e){return"CallExpression"===e.type&&"Identifier"===e.callee.type&&"requireDeferredForDisplay"===e.callee.name&&1===e.arguments.length&&"Literal"===e.arguments[0].type&&"string"==typeof e.arguments[0].value}function x(e){return"ImportDeclaration"===e.type&&(null==e.importKind||"value"===e.importKind)}function b(e){return"ImportDeclaration"===e.type&&("type"===e.importKind||"typeof"===e.importKind)}function C(e){return x(e)||b(e)}function S(e){return null!=e&&"CallExpression"===e.type&&e.callee&&"Identifier"===e.callee.type&&"JSResource"===e.callee.name&&1===e.arguments.length&&"Literal"===e.arguments[0].type}function v(e){return null!=e&&"CallExpression"===e.type&&e.callee&&"Identifier"===e.callee.type&&"JSResourceForInteraction"===e.callee.name&&2===e.arguments.length&&"Literal"===e.arguments[0].type}function E(e){return null!=e&&"CallExpression"===e.type&&"Identifier"===e.callee.type&&"ClientJSResource"===e.callee.name&&1===e.arguments.length&&"Literal"===e.arguments[0].type&&"string"==typeof e.arguments[0].value}function I(e){if(null==e||"CallExpression"!==e.type||null==e.callee)return!1;let t;return"Identifier"===e.callee.type&&(t=e.callee),"MemberExpression"===e.callee.type&&"Identifier"===e.callee.object.type&&(t=e.callee.object),null!=t&&"requireCond"===t.name&&e.arguments.length>0}function T(e){return 0===e.indexOf("m#")?e.substring(2):e}function R(e,t,n=1){return"CallExpression"!==e.type||e.arguments.length!==n||null!=t&&"string"==typeof t&&a(e)!==t||"Literal"!==e.arguments[0].type||"string"!=typeof e.arguments[0].value?null:T(e.arguments[0].value)}function D(e,t,n){const r=e.arguments;return I(e)?3!==r.length?null:(null==t||r[0]&&"Literal"===r[0].type&&r[0].value===t)&&(null==n||r[1]&&"Literal"===r[1].type&&r[1].value===n)?r[2]&&"ObjectExpression"===r[2].type?r[2].properties.reduce(((e,t)=>{if("Property"===t.type&&"Literal"===t.value.type){let n;if("Identifier"===t.key.type)n=t.key.name;else{if("Literal"!==t.key.type)return e;n=String(t.key.value)}e[n]="string"==typeof t.value.value?T(t.value.value):null}return e}),{}):r[2]&&"Literal"===r[2].type&&"string"==typeof r[2].value?T(r[2].value):null:null:null}function w(e){return R(e,"JSResource")}function N(e){return null==e?null:m(e)?"string"!=typeof e.arguments[0].value?null:T(e.arguments[0].value):null}function q(e,t){const n=e.getSourceCode().ast.body.find((e=>"VariableDeclaration"===e.type&&1===e.declarations.length&&"VariableDeclarator"===e.declarations[0].type&&null!=e.declarations[0].init&&N(e.declarations[0].init)===t));return n||null}function k(e){let t=e;for(;"TypeCastExpression"===t.type;)t=t.expression;return t}function L(e){if("value"!==e.importKind)return!0;return e.specifiers.every((e=>"ImportSpecifier"===e.type&&("type"===e.importKind||"typeof"===e.importKind)))}function O(e,t){const n=e.getSourceCode().ast.body.find((e=>"ImportDeclaration"===e.type&&e.source.value===t&&!L(e)));return n||null}var P={asAnyKindOfRequireCall:function(e){return m(e)?e:null},asAnyKindOfRequireVariableDeclaration:function(e){return p(e)?e:null},asAnyKindOfRequireVariableDeclarator:function(e){return f(e)?e:null},getAnyRequiredModuleName:function(e,t=Object.freeze({})){const n=N(e)||w(e)||function(e){return R(e,"JSResourceForInteraction",2)}(e)||function(e){if(!g(e))return null;if(2!==e.arguments.length)return null;if("string"!=typeof e.arguments[1].value)return null;return T(e.arguments[1].value)}(e);return null!=n?n:D(e,t.condType,t.condition)},getBaseNode:function(e){let t=e;for(;"MemberExpression"===t.type;)t=t.object;return t},getBinding:s,getBootloadedModuleNames:function(e){return"ExpressionStatement"===e.type&&e.expression&&"CallExpression"===e.expression.type&&e.expression.callee&&"MemberExpression"===e.expression.callee.type&&e.expression.callee.object&&"Bootloader"===e.expression.callee.object.name&&e.expression.callee.property&&"loadModules"===e.expression.callee.property.name&&e.expression.arguments.length>0&&"ArrayExpression"===e.expression.arguments[0].type&&e.expression.arguments[0].elements.length>0?e.expression.arguments[0].elements.map((e=>"Literal"===e.type&&"string"==typeof e.value?e.value:null)).filter(Boolean):null},getCalleeName:a,getConstantStringExpression:i,getCurrentClassName:function(e){const t=e.getAncestors().find((e=>"ClassDeclaration"===e.type));return t&&"ClassDeclaration"===t.type&&null!=t.id?t.id.name:null},getEnglishForNth:function(e){return["first","second","third","fourth","fifth","sixth"][e]},getFullyQualifiedIdentifier:o,getJSResourceModuleName:w,getJSXMemberOrNamespaceRoot:function(e){let t=e;for(;"JSXIdentifier"!==t.type;)if("JSXMemberExpression"===t.type)t=t.object;else{if("JSXNamespacedName"!==t.type)throw new Error("unexpected "+t.type);t=t.namespace}return t},getLocOffset:function(e,t,n){return l(e,t.loc.start,n)},getLocOffsetOfLoc:l,getName:function(e){const t=k(e);return"Identifier"===t.type?t.name:"Literal"===t.type?String(t.value):null},getObjectPropertyName:function(e){if("Property"!==e.type&&"PropertyDefinition"!==e.type&&"MethodDefinition"!==e.type)return null;const t=e.key;return"Identifier"!==t.type||e.computed?function(e){switch(e.type){case"Literal":switch(e.literalType){case"bigint":return e.bigint;case"null":return"null";case"regexp":return`/${e.regex.pattern}/${e.regex.flags}`;default:return String(e.value)}case"TemplateLiteral":if(0===e.expressions.length&&1===e.quasis.length)return e.quasis[0].value.cooked}return null}(t):t.name},getParamComments:function(e,t){return t.params.map((function(t){const n=e.getSourceCode().getCommentsBefore(t);return n[n.length-1]}))},getPropertyName:function(e){return"MemberExpression"!==e.type?null:e.computed?i(e.property):"Identifier"!==e.property.type?null:e.property.name},getPropTokens:function(e,t){const n=e.getSourceCode().getTokens(t),r=[];return n.forEach(((e,t,n)=>{"JSXIdentifier"===e.type&&"Punctuator"===n[t+1].type&&"="===n[t+1].value&&r.push(e)})),r},getRequireCondModules:D,getRequireModuleName:N,getRequireModuleNode:q,getReturnComment:function(e,t){return e.getSourceCode().getCommentsBefore(t.body)[0]},getValueImportNode:O,hasValueImport:function(e){return e.getSourceCode().ast.body.some((e=>"ImportDeclaration"===e.type&&!L(e)))},getVariable:function(e,t){let n=t;for(;n;){const t=n.set.get(e);if(t)return t;n=n.upper}return null},insertRequireStatement:function(e,t,n,r=""){if(""===n)throw new Error("Name must be a string with length larger than 0");const i=`const ${n} = require('${n}${r}');`,o=[];e.getSourceCode().ast.body.forEach((e=>{if("VariableDeclaration"===e.type&&1===e.declarations.length&&"VariableDeclarator"===e.declarations[0].type&&e.declarations[0].init){const t=N(e.declarations[0].init);null!=t&&o.push({name:t,node:e})}}));const a=o.find((e=>e.name>=n));if(a){if(a.name.replace(r,"")===n)return[];const e=o[0];if(u(a.name)&&c(n)){if(e!==a){const e=a.node.range[0];return[t.removeRange([e-1,e-1]),t.insertTextBefore(a.node,i+"\n\n")]}return[t.insertTextBefore(a.node,i+"\n\n")]}return[t.insertTextBefore(a.node,i+"\n")]}if(o.length>0){const e=o[o.length-1],r=c(e.name)&&u(n)?"\n":"";return[t.insertTextAfter(e.node,"\n"+i+r)]}{const n=e.getSourceCode().ast.body,r=n[0];return"ExpressionStatement"===r.type&&"Literal"===r.expression.type&&"use strict"===r.expression.value?[t.insertTextBefore(n[1],i+"\n")]:[t.insertTextBefore(n[0],i+"\n")]}},insertValueImportStatement:function(e,t,n,r=""){if(""===n)throw new Error("Name must be a string with length larger than 0");const i=`import ${n} from '${n}${r}';`,o=[];e.getSourceCode().ast.body.forEach((e=>{if(x(e)){const t=function(e){if(m(e))return"string"!=typeof e.arguments[0].value?null:T(e.arguments[0].value);if(C(e))return e.source.value;return null}(e);null!=t&&o.push({name:t,node:e})}}));const a=o.find((e=>e.name>=n));if(a){if(a.name.replace(r,"")===n)return[];const e=o[0];if(u(a.name)&&c(n)){if(e!==a){const e=a.node.range[0];return[t.removeRange([e-1,e-1]),t.insertTextBefore(a.node,i+"\n\n")]}return[t.insertTextBefore(a.node,i+"\n\n")]}return[t.insertTextBefore(a.node,i+"\n")]}if(o.length>0){const e=o[o.length-1],r=c(e.name)&&u(n)?"\n":"";return[t.insertTextAfter(e.node,"\n"+i+r)]}{const n=e.getSourceCode().ast.body,r=n[0];return"ExpressionStatement"===r.type&&"Literal"===r.expression.type&&"use strict"===r.expression.value?[t.insertTextBefore(n[1],i+"\n")]:[t.insertTextBefore(n[0],i+"\n")]}},isAnyKindOfImport:C,isAnyKindOfModuleCall:function(e){return m(e)||I(e)||S(e)||v(e)||E(e)||g(e)},isAnyKindOfRequireCall:m,isAnyKindOfRequireVariableDeclaration:p,isAnyKindOfRequireVariableDeclarator:f,isClientJSResource:E,isFbSourceRepo:r,isGraphQLTemplate:function(e){return"Identifier"===e.tag.type&&"graphql"===e.tag.name&&1===e.quasi.quasis.length},isInsideMethod:function(e,t){return e.getAncestors().some((e=>"MethodDefinition"===e.type&&"Identifier"===e.key.type&&e.key.name===t))},isJSResource:S,isJSResourceForInteraction:v,isModuleRef:function(e){return"Literal"===e.type&&"string"==typeof e.value&&e.value.startsWith("m#")},isOnlyTypeImport:L,isOnlyTypeExport:function(e){return"type"===e.exportKind},isReferenced:function(e){const t=e.parent;switch(t.type){case"MemberExpression":case"JSXMemberExpression":return t.property===e&&!0===t.computed||t.object===e;case"MetaProperty":case"ImportDefaultSpecifier":case"ImportNamespaceSpecifier":case"ImportSpecifier":case"LabeledStatement":case"RestElement":case"ObjectPattern":case"ArrayPattern":return!1;case"Property":case"MethodDefinition":return t.key===e&&t.computed;case"VariableDeclarator":case"ClassDeclaration":case"ClassExpression":return t.id!==e;case"ArrowFunctionExpression":case"FunctionDeclaration":case"FunctionExpression":for(let n=0;ne.writeExpr));return null!=i?i.writeExpr:null},stripModuleRef:T,uncast:k};var j=t(Object.freeze({__proto__:null,isCommaOrSemiToken:function(e){return"Punctuator"===e.type&&(","===e.value||";"===e.value)}}));const{isCommaOrSemiToken:B}=j;var F={getInlineComments:function(e,t,n=B){const r=e.ast.comments.filter((e=>e.loc.start.line===t.loc.end.line&&e.range[0]>t.range[1])).sort(((e,t)=>e.range[0]-t.range[0])),i=[];let o=t;for(const t of r){const r=e.getTokensBetween(o,t);if(r.length>0){if(!n)break;if(!r.every(n))break}i.push(t),o=t}return i},getLeadingComments:function(e,t,n){const r=e.getCommentsBefore(t),i=[];let o=t;for(let t=r.length-1;t>=0;t-=1){const a=r[t];if(a===n)break;if(a.loc.end.line===o.loc.start.line){i.unshift(a),o=a;continue}if(a.loc.end.line!==o.loc.start.line-1)break;const l=e.getTokenBefore(a);if(l&&l.loc.end.line===a.loc.start.line)break;i.unshift(a),o=a}return i}};var M=function(e){return e.getSourceCode().ast.docblock?.comment};const{getInlineComments:A,getLeadingComments:V}=F,{isCommaOrSemiToken:$}=j;function K(e){return"@"===e[0]||":"===e[0]}const J=/\d{1,3}(\.\d+)?%/;function _(e){switch(typeof e){case"number":return{isSafeNumericString:!0,isPercentage:!1};case"boolean":return{isSafeNumericString:!1,isPercentage:!1}}return isNaN(e)||isNaN(parseFloat(e))?J.test(e)?{isSafeNumericString:!0,isPercentage:!0}:{isSafeNumericString:!1,isPercentage:!1}:{isSafeNumericString:!0,isPercentage:!1}}function X(e){return e.reduce((([e,t],[n,r])=>[Math.min(e,n),Math.max(t,r)]),[Number.MAX_SAFE_INTEGER,0])}var U={compareNames:function(e,t,n=!1){if("number"==typeof e&&"number"==typeof t)return e-t;const r=String(e),i=String(t);if(""===r&&""!==i)return-1;if(""!==r&&""===i)return 1;if(""===r&&""===i)return 0;const{isSafeNumericString:o,isPercentage:a}=_(r),{isSafeNumericString:l,isPercentage:s}=_(i);if(o&&l){const e=Number.parseFloat(r),t=Number.parseFloat(i);if(e===t){if(!a&&s)return-1;if(a&&!s)return 1}return e-t}if(n){const e=K(r),t=K(i);if(!e&&t)return-1;if(e&&!t)return 1}const u=o||r[0].toLowerCase()===r[0].toUpperCase(),c=l||i[0].toLowerCase()===i[0].toUpperCase();if(!u&&c)return 1;if(u&&!c)return-1;if(!u&&!c){const e=r[0].toLowerCase()===r[0],t=i[0].toLowerCase()===i[0];if(!e&&t)return-1;if(e&&!t)return 1}return r.localeCompare(i,"en",{caseFirst:"upper",sensitivity:"base"})},getEncompassingRange:X,getNodeTextWithComments:function(e,t,n,{shouldIncludeNextTokenInRange:r=$,ensureTextFollowsNode:i,inlineCommentIgnoreToken:o}={}){const a=A(e,t,o),l=[...V(e,t,n).map((({range:e})=>e)),t.range,...a.map((({range:e})=>e))],s=e.getTokenAfter(t);s&&!0===r?.(s)&&l.push(s.range);const u=X(l);let c=e.text.slice(u[0],u[1]);const p=t.range[1]-u[0];if(null!=i){e.getTokenAfter(t)?.value!==i&&(c=c.slice(0,p)+i+c.slice(p))}return{range:u,text:c}},isComma:function(e){return"Punctuator"===e.type&&","===e.value}};const z=P,{getInlineComments:G,getLeadingComments:Q}=F,W=M,{compareNames:H}=U,Y=0,Z={default:1,namespace:2,named:3},ee={default:4,namespace:5,named:6},te={default:7,namespace:8,named:9},ne=0,re=1,ie=2,oe=3,ae=99;var le={meta:{fixable:"code",messages:{incorrectOrder:"Requires should be sorted alphabetically"}},create(e){const t=e.getSourceCode(),n=function(e){const t=W(e);if(null!=t)return t;const n=e.getSourceCode().ast,r=n.body.length>0?n.body[0]:n,i=e.getSourceCode().getCommentsBefore(r)[0];return i&&"Block"===i.type?i:null}(e);if(n&&(n.value.includes("* @generated")||n.value.includes("* @partially-generated")))return{};const r=Object.freeze({typeImport:{priority:10,uppercase:[],lowercase:[],tiebreakFunction:s},valueImport:{priority:20,uppercase:[],lowercase:[],tiebreakFunction:s},requiresUsedByOtherRequires:{priority:30,uppercase:[],lowercase:[],tiebreakFunction:u},require:{priority:40,uppercase:[],lowercase:[],tiebreakFunction:u}}),i=[];let o=null,a=null;const l=new Set;return{Program(n){for(const e of n.body)switch(e.type){case"ImportDeclaration":if("type"===e.importKind||"typeof"===e.importKind)g(r.typeImport,e,e.source.value,!0);else{const n=t.getLastToken(e.source,(e=>"from"===e.value));0===e.specifiers.length&&null==n?d(e):g(r.valueImport,e,e.source.value)}break;case"VariableDeclaration":{const t=e.declarations[0]?.init;if(1!==e.declarations.length||null==t){d(e);break}f(t,e);break}default:d(e)}const s=[];for(const e of Object.keys(r)){const t=r[e];s.push({priority:t.priority,nodes:t.uppercase.sort(((e,n)=>c(e,n,t.tiebreakFunction)))}),s.push({priority:t.priority+5,nodes:t.lowercase.sort(((e,n)=>c(e,n,t.tiebreakFunction)))})}function u(e){return[e.leadingComments.length?e.leadingComments.map((e=>t.getText(e))).join("\n")+"\n":"",e.inlineComments.length?" "+e.inlineComments.map((e=>t.getText(e))).join(" "):""]}const p=s.filter((e=>0!==e.nodes.length)).sort(((e,t)=>e.priority-t.priority)).map((e=>e.nodes.map((e=>{const[n,r]=u(e),i=function(e,t){const n=t.node,r=e.getText(n),i=(()=>{if("ImportDeclaration"===n.type&&null!=n.specifiers.find((e=>"ImportSpecifier"===e.type))){const t=e.getFirstToken(n,(e=>"Punctuator"===e.type&&"{"===e.value)),r=e.getFirstToken(n,(e=>"Punctuator"===e.type&&"}"===e.value));return null==t||null==r?null:e.getText().substring(t.range[0],r.range[1])}if("VariableDeclaration"===n.type&&"ObjectPattern"===n.declarations[0].id.type){const t=n.declarations[0].id.typeAnnotation,r=e.getText(n.declarations[0].id);return t?r.substr(0,t.range[0]-n.declarations[0].id.range[0]):r}return null})();if(null==i)return r;let o=[],a=null;"ImportDeclaration"===n.type?o=n.specifiers.map((t=>"ImportDefaultSpecifier"===t.type||"ImportNamespaceSpecifier"===t.type?null:{leadingComments:e.getCommentsBefore(t),name:t.imported.name,node:t})).filter(Boolean):"VariableDeclaration"===n.type&&"ObjectPattern"===n.declarations[0].id.type&&(o=n.declarations[0].id.properties.map((t=>{if("ExperimentalRestProperty"===t.type||"RestElement"===t.type)return a=t,null;const n=t.key,r="Literal"===n.type?String(n.value):"Identifier"===n.type?t.computed?null:n.name:null;return null==r?null:{leadingComments:e.getCommentsBefore(t),name:r,node:t}})).filter(Boolean));if(o.length<=1)return r;const l=i.indexOf("\n")>=0,s=o.sort(((e,t)=>H(e.name,t.name))).map((e=>e.node));null!=a&&s.push(a);const u=s.map((t=>{const n=e.getCommentsBefore(t).map((t=>e.getText(t))),r=n.length?n.join(""):"";return l?(r?" "+r+"\n":"")+" "+e.getText(t):(r?r+" ":"")+e.getText(t)})),c=(()=>{const t=[];if("ImportDeclaration"===n.type){if(t.push(...e.getCommentsBefore(n.source)),l&&null!=n.specifiers.find((e=>"ImportSpecifier"===e.type))){const r=e.getTokenBefore(n.source,(e=>"Punctuator"===e.type&&"}"===e.value));null!=r&&t.push(...e.getCommentsBefore(r))}}else if("VariableDeclaration"===n.type&&"ObjectPattern"===n.declarations[0].id.type){const r=e.getLastToken(n.declarations[0].id);null!=r&&t.push(...e.getCommentsBefore(r))}return t})(),p=l&&c.length?c.map((t=>" "+e.getText(t))).join("\n")+"\n":"",f=l?"\n"+u.map((e=>e.includes("...")?`${e}\n`:`${e},\n`)).join("")+p:u.join(", ");return r.replace(i,(()=>`{${f}}`))}(t,e);return n+i+r})).join("\n"))).join("\n\n"),m=o;if(null==m||null==a)return;const y=m.leadingComments.length?m.leadingComments[0].range[0]:m.node.range[0],h=a.inlineComments.length>0?a.inlineComments[a.inlineComments.length-1].range[1]:a.node.range[1];t.getText(m.node,m.node.range[0]-y,h-m.node.range[1])!==p&&e.report({node:m.node,messageId:"incorrectOrder",fix(e){const n=i.filter((({node:e})=>e.range[0]>=y&&e.range[1]<=h)).map((e=>{const[n,r]=u(e),i=t.getText(e.node);return{range:e.node.range,text:n+i+r}})).flat().concat(t.getAllComments().filter((e=>e.range[0]>=y&&e.range[1]<=h&&!l.has(e))).map((e=>({range:e.range,text:t.getText(e)})))).sort(((e,t)=>e.range[0]-t.range[0]||e.range[1]-t.range[1])).map((({text:e})=>e)).join("\n");return e.replaceTextRange([y,h],[p,n].filter(Boolean).join("\n\n"))}})}};function s(e){if(0===e.specifiers.length)return Y;const t=(()=>{switch(e.importKind){default:case"value":return Z;case"type":return ee;case"typeof":return te}})();return e.specifiers.find((e=>"ImportDefaultSpecifier"===e.type))?t.default:e.specifiers.find((e=>"ImportNamespaceSpecifier"===e.type))?t.namespace:t.named}function u(e){if("ExpressionStatement"===e.type)return ne;switch(e.declarations[0].id.type){case"Identifier":return re;case"ObjectPattern":return ie;case"ArrayPattern":return oe}return ae}function c(e,t,n){const r=H(e.moduleName,t.moduleName);if(0!==r)return r;const i=n(e.node)-n(t.node);return 0!==i?i:e.node.loc.start.line-t.node.loc.start.line}function p(e,t){if(null==e)throw new Error("Missing required module name");return null!=t?`${e}_${t}`:e}function f(e,n,i){const o=z.getRequireModuleName(e);if(z.isRequire(e)){const e=p(o,i);g("requireCond"===e||"requireDeferred"===e||"requireDeferredForDisplay"===e?r.requiresUsedByOtherRequires:r.require,n,p(o,i))}else if(z.isRequireDeferred(e)||z.isRequireDeferredForDisplay(e))g(r.require,n,p(o,i));else{if(z.isRequireCond(e)){if("VariableDeclaration"===n.type){const e=t.getText(n.declarations[0].id);return void g(r.require,n,p(e,i))}}else{if("MemberExpression"===e.type)return void f(e.object,n,p(t.getText(e.property),i));if("CallExpression"===e.type)return void f(e.callee,n,i)}d(n)}}function m(e){e.leadingComments.forEach((e=>l.add(e))),e.inlineComments.forEach((e=>l.add(e)));t.getCommentsInside(e.node).forEach((e=>l.add(e)))}function g(e,r,i,l=!1){const s={inlineComments:G(t,r),leadingComments:Q(t,r,n),moduleName:i,node:r};if(l)e.uppercase.push(s);else{const t=i[0]||"";t.toLowerCase()===t?e.lowercase.push(s):e.uppercase.push(s)}null==o&&(o=s),a=s,m(s)}function d(e){const r={inlineComments:G(t,e),leadingComments:Q(t,e,n),node:e};i.push(r),m(r)}}};const se=le;var ue={...se,create:e=>e.getSourceCode().getText().includes("@generated SignedSource<<")?{}:se.create(e)};module.exports=ue; +"use strict";function e(e){return e&&"object"==typeof e&&"default"in e?e:{default:e}}function t(e){var t=e.default;if("function"==typeof t){var n=function(){return t.apply(this,arguments)};n.prototype=t.prototype}else n={};return Object.defineProperty(n,"__esModule",{value:!0}),Object.keys(e).forEach((function(t){var r=Object.getOwnPropertyDescriptor(e,t);Object.defineProperty(n,t,r.get?r:{enumerable:!0,get:function(){return e[t]}})})),n}const n=e(require("path")).default,r=__dirname.split(n.sep).includes("xplat");function i(e){if(("Literal"===e.type||"JSXText"===e.type)&&"string"==typeof e.value)return e.value;if("BinaryExpression"===e.type&&"+"===e.operator){const t=i(e.left),n=i(e.right);if(null!=t&&null!=n)return t+n}return null}function o(e){if("Identifier"===e.type)return e.name;if("ThisExpression"===e.type)return"this";if("MemberExpression"===e.type){const t=o(e.object),n=e.computed?i(e.property):o(e.property);if(null!=t&&null!=n)return t+"."+n}else if("TypeCastExpression"===e.type||"AsExpression"===e.type)return o(e.expression);return null}function a(e){return e.callee?o(e.callee):null}function l(e,t,n){const r=e.getSourceCode().getText(),i=function(e,t){if(t.line<1)throw new RangeError("Line number "+t.line+" is before the start of file");const n=/\r\n|\r|\n|\u2028|\u2029/g;let r={index:0};for(let i=1;i=e.length)throw new RangeError("computed offset "+t+" is past the end of file");const n=/\r\n|\r|\n|\u2028|\u2029/g;let r,i={index:0},o=0;do{r=i,i=n.exec(e),++o}while(i&&i.index"string"==typeof e&&e.charCodeAt(0)>=97,c=e=>"string"==typeof e&&e.charCodeAt(0)<=90;function p(e){return null!=e&&"VariableDeclaration"===e.type&&"Program"===e.parent.type&&1===e.declarations.length&&null!=e.declarations[0].init&&m(e.declarations[0].init)}function f(e){return null!=e&&"VariableDeclarator"===e.type&&"VariableDeclaration"===e.parent.type&&"Program"===e.parent.parent.type&&1===e.parent.declarations.length&&m(e.init)}function m(e){return null!=e&&(g(e)||y(e)||h(e))}function d(e){return null!=e&&"CallExpression"===e.type&&e.callee&&"Identifier"===e.callee.type&&"requireNUX"===e.callee.name&&2===e.arguments.length&&"Literal"===e.arguments[0].type}function g(e){return"CallExpression"===e.type&&e.callee&&"Identifier"===e.callee.type&&"require"===e.callee.name&&1===e.arguments.length&&"Literal"===e.arguments[0].type}function y(e){return"CallExpression"===e.type&&"Identifier"===e.callee.type&&"requireDeferred"===e.callee.name&&1===e.arguments.length&&"Literal"===e.arguments[0].type&&"string"==typeof e.arguments[0].value}function h(e){return"CallExpression"===e.type&&"Identifier"===e.callee.type&&"requireDeferredForDisplay"===e.callee.name&&1===e.arguments.length&&"Literal"===e.arguments[0].type&&"string"==typeof e.arguments[0].value}function x(e){return"ImportDeclaration"===e.type&&(null==e.importKind||"value"===e.importKind)}function b(e){return"ImportDeclaration"===e.type&&("type"===e.importKind||"typeof"===e.importKind)}function C(e){return x(e)||b(e)}function v(e){return null!=e&&"CallExpression"===e.type&&e.callee&&"Identifier"===e.callee.type&&"JSResource"===e.callee.name&&1===e.arguments.length&&"Literal"===e.arguments[0].type}function S(e){return null!=e&&"CallExpression"===e.type&&e.callee&&"Identifier"===e.callee.type&&"JSResourceForInteraction"===e.callee.name&&2===e.arguments.length&&"Literal"===e.arguments[0].type}function E(e){return null!=e&&"CallExpression"===e.type&&"Identifier"===e.callee.type&&"ClientJSResource"===e.callee.name&&1===e.arguments.length&&"Literal"===e.arguments[0].type&&"string"==typeof e.arguments[0].value}function I(e){if(null==e||"CallExpression"!==e.type||null==e.callee)return!1;let t;return"Identifier"===e.callee.type&&(t=e.callee),"MemberExpression"===e.callee.type&&"Identifier"===e.callee.object.type&&(t=e.callee.object),null!=t&&"requireCond"===t.name&&e.arguments.length>0}function T(e){return 0===e.indexOf("m#")?e.substring(2):e}function D(e,t,n=1){return"CallExpression"!==e.type||e.arguments.length!==n||null!=t&&"string"==typeof t&&a(e)!==t||"Literal"!==e.arguments[0].type||"string"!=typeof e.arguments[0].value?null:T(e.arguments[0].value)}function R(e,t,n){const r=e.arguments;return I(e)?3!==r.length?null:(null==t||r[0]&&"Literal"===r[0].type&&r[0].value===t)&&(null==n||r[1]&&"Literal"===r[1].type&&r[1].value===n)?r[2]&&"ObjectExpression"===r[2].type?r[2].properties.reduce(((e,t)=>{if("Property"===t.type&&"Literal"===t.value.type){let n;if("Identifier"===t.key.type)n=t.key.name;else{if("Literal"!==t.key.type)return e;n=String(t.key.value)}e[n]="string"==typeof t.value.value?T(t.value.value):null}return e}),{}):r[2]&&"Literal"===r[2].type&&"string"==typeof r[2].value?T(r[2].value):null:null:null}function w(e){return D(e,"JSResource")}function N(e){return D(e,"JSResourceForInteraction",2)}function k(e){return null==e?null:m(e)?"string"!=typeof e.arguments[0].value?null:T(e.arguments[0].value):null}function q(e){return d(e)?2!==e.arguments.length||"string"!=typeof e.arguments[1].value?null:T(e.arguments[1].value):null}function L(e,t){const n=e.getSourceCode().ast.body.find((e=>"VariableDeclaration"===e.type&&1===e.declarations.length&&"VariableDeclarator"===e.declarations[0].type&&null!=e.declarations[0].init&&k(e.declarations[0].init)===t));return n||null}function O(e){let t=e;for(;"TypeCastExpression"===t.type||"AsExpression"===t.type;)t=t.expression;return t}function P(e){if("value"!==e.importKind)return!0;return e.specifiers.every((e=>"ImportSpecifier"===e.type&&("type"===e.importKind||"typeof"===e.importKind)))}function j(e,t){const n=e.getSourceCode().ast.body.find((e=>"ImportDeclaration"===e.type&&e.source.value===t&&!P(e)));return n||null}var B={asAnyKindOfRequireCall:function(e){return m(e)?e:null},asAnyKindOfRequireVariableDeclaration:function(e){return p(e)?e:null},asAnyKindOfRequireVariableDeclarator:function(e){return f(e)?e:null},getAnyRequiredModuleName:function(e,t=Object.freeze({})){const n=k(e)||w(e)||N(e)||q(e);return null!=n?n:R(e,t.condType,t.condition)},getBaseNode:function(e){let t=e;for(;"MemberExpression"===t.type;)t=t.object;return t},getBinding:s,getBootloadedModuleNames:function(e){return"ExpressionStatement"===e.type&&e.expression&&"CallExpression"===e.expression.type&&e.expression.callee&&"MemberExpression"===e.expression.callee.type&&e.expression.callee.object&&"Bootloader"===e.expression.callee.object.name&&e.expression.callee.property&&"loadModules"===e.expression.callee.property.name&&e.expression.arguments.length>0&&"ArrayExpression"===e.expression.arguments[0].type&&e.expression.arguments[0].elements.length>0?e.expression.arguments[0].elements.map((e=>"Literal"===e.type&&"string"==typeof e.value?T(e.value):null)).filter(Boolean):null},getCalleeName:a,getConstantStringExpression:i,getCurrentClassName:function(e){const t=e.getAncestors().find((e=>"ClassDeclaration"===e.type));return t&&"ClassDeclaration"===t.type&&null!=t.id?t.id.name:null},getEnglishForNth:function(e){return["first","second","third","fourth","fifth","sixth"][e]},getFullyQualifiedIdentifier:o,getJSResourceModuleName:w,getJSXMemberOrNamespaceRoot:function(e){let t=e;for(;"JSXIdentifier"!==t.type;)if("JSXMemberExpression"===t.type)t=t.object;else{if("JSXNamespacedName"!==t.type)throw new Error("unexpected "+t.type);t=t.namespace}return t},getLocOffset:function(e,t,n){return l(e,t.loc.start,n)},getLocOffsetOfLoc:l,getName:function(e){const t=O(e);return"Identifier"===t.type?t.name:"Literal"===t.type?String(t.value):null},getObjectPropertyName:function(e){if("Property"!==e.type&&"PropertyDefinition"!==e.type&&"MethodDefinition"!==e.type)return null;const t=e.key;return"Identifier"!==t.type||e.computed?function(e){switch(e.type){case"Literal":switch(e.literalType){case"bigint":return e.bigint;case"null":return"null";case"regexp":return`/${e.regex.pattern}/${e.regex.flags}`;default:return String(e.value)}case"TemplateLiteral":if(0===e.expressions.length&&1===e.quasis.length)return e.quasis[0].value.cooked}return null}(t):t.name},getParamComments:function(e,t){return t.params.map((function(t){const n=e.getSourceCode().getCommentsBefore(t);return n[n.length-1]}))},getPropertyName:function(e){return"MemberExpression"!==e.type?null:e.computed?i(e.property):"Identifier"!==e.property.type?null:e.property.name},getPropTokens:function(e,t){const n=e.getSourceCode().getTokens(t),r=[];return n.forEach(((e,t,n)=>{"JSXIdentifier"===e.type&&"Punctuator"===n[t+1].type&&"="===n[t+1].value&&r.push(e)})),r},getRequireCondModules:R,getRequireModuleName:k,getRequireModuleNode:L,getReturnComment:function(e,t){return e.getSourceCode().getCommentsBefore(t.body)[0]},getValueImportNode:j,getValueImportVariables:function(e,t){const n=[];for(const r of e.getSourceCode().ast.body)"ImportDeclaration"!==r.type||r.source.value!==t||P(r)||r.specifiers.filter((e=>{null!=e.importKind&&"value"!==e.importKind||n.push(e.local.name)}));return n},hasValueImport:function(e){return e.getSourceCode().ast.body.some((e=>"ImportDeclaration"===e.type&&!P(e)))},getVariable:function(e,t){let n=t;for(;n;){const t=n.set.get(e);if(t)return t;n=n.upper}return null},insertRequireStatement:function(e,t,n,r=""){if(""===n)throw new Error("Name must be a string with length larger than 0");const i=`const ${n} = require('${n}${r}');`,o=[];e.getSourceCode().ast.body.forEach((e=>{if("VariableDeclaration"===e.type&&1===e.declarations.length&&"VariableDeclarator"===e.declarations[0].type&&e.declarations[0].init){const t=k(e.declarations[0].init);null!=t&&o.push({name:t,node:e})}}));const a=o.find((e=>e.name>=n));if(a){if(a.name.replace(r,"")===n)return[];const e=o[0];if(u(a.name)&&c(n)){if(e!==a){const e=a.node.range[0];return[t.removeRange([e-1,e-1]),t.insertTextBefore(a.node,i+"\n\n")]}return[t.insertTextBefore(a.node,i+"\n\n")]}return[t.insertTextBefore(a.node,i+"\n")]}if(o.length>0){const e=o[o.length-1],r=c(e.name)&&u(n)?"\n":"";return[t.insertTextAfter(e.node,"\n"+i+r)]}{const n=e.getSourceCode().ast.body,r=n[0];return"ExpressionStatement"===r.type&&"Literal"===r.expression.type&&"use strict"===r.expression.value?[t.insertTextBefore(n[1],i+"\n")]:[t.insertTextBefore(n[0],i+"\n")]}},insertValueImportStatement:function(e,t,n,r="",i){if(""===n)throw new Error("Name must be a string with length larger than 0");const o=`import ${i??n} from '${n}${r}';`,a=[];e.getSourceCode().ast.body.forEach((e=>{if(x(e)){const t=function(e){if(m(e))return"string"!=typeof e.arguments[0].value?null:T(e.arguments[0].value);if(C(e))return e.source.value;return null}(e);null!=t&&a.push({name:t,node:e})}}));const l=a.find((e=>e.name>=n));if(l){if(l.name.replace(r,"")===n)return[];const e=a[0];if(u(l.name)&&c(n)){if(e!==l){const e=l.node.range[0];return[t.removeRange([e-1,e-1]),t.insertTextBefore(l.node,o+"\n\n")]}return[t.insertTextBefore(l.node,o+"\n\n")]}return[t.insertTextBefore(l.node,o+"\n")]}if(a.length>0){const e=a[a.length-1],r=c(e.name)&&u(n)?"\n":"";return[t.insertTextAfter(e.node,"\n"+o+r)]}{const n=e.getSourceCode().ast.body,r=n[0];return"ExpressionStatement"===r.type&&"Literal"===r.expression.type&&"use strict"===r.expression.value?[t.insertTextBefore(n[1],o+"\n")]:[t.insertTextBefore(n[0],o+"\n")]}},isAnyKindOfImport:C,isAnyKindOfModuleCall:function(e){return m(e)||I(e)||v(e)||S(e)||E(e)||d(e)},isAnyKindOfRequireCall:m,isAnyKindOfRequireVariableDeclaration:p,isAnyKindOfRequireVariableDeclarator:f,isClientJSResource:E,isFbSourceRepo:r,isGraphQLTemplate:function(e){return"Identifier"===e.tag.type&&"graphql"===e.tag.name&&1===e.quasi.quasis.length},isInsideMethod:function(e,t){return e.getAncestors().some((e=>"MethodDefinition"===e.type&&"Identifier"===e.key.type&&e.key.name===t))},isJSResource:v,isJSResourceForInteraction:S,isModuleRef:function(e){return"Literal"===e.type&&"string"==typeof e.value&&e.value.startsWith("m#")},isOnlyTypeImport:P,isOnlyTypeExport:function(e){return"type"===e.exportKind},isReferenced:function(e){const t=e.parent;switch(t.type){case"MemberExpression":case"JSXMemberExpression":return t.property===e&&!0===t.computed||t.object===e;case"MetaProperty":case"ImportDefaultSpecifier":case"ImportNamespaceSpecifier":case"ImportSpecifier":case"LabeledStatement":case"RestElement":case"ObjectPattern":case"ArrayPattern":return!1;case"Property":case"MethodDefinition":return t.key===e&&t.computed;case"VariableDeclarator":case"ClassDeclaration":case"ClassExpression":return t.id!==e;case"ArrowFunctionExpression":case"FunctionDeclaration":case"HookDeclaration":case"FunctionExpression":for(let n=0;ne.writeExpr));return null!=i?i.writeExpr:null},resolveModuleSource:function(e,t){const n=s(e.getScope(),t);if(null==n)return null;const r=n.defs[0];switch(r.node.type){case"VariableDeclarator":{const e=r.node.init;if(null!=e)return k(e)||w(e)||N(e)||q(e);break}case"ImportNamespaceSpecifier":case"ImportSpecifier":case"ImportDefaultSpecifier":return r.node.parent.source.value}return null},stripModuleRef:T,uncast:O};var F=t(Object.freeze({__proto__:null,isCommaOrSemiToken:function(e){return"Punctuator"===e.type&&(","===e.value||";"===e.value)}}));const{isCommaOrSemiToken:M}=F;var A={getInlineComments:function(e,t,n=M){const r=e.ast.comments.filter((e=>e.loc.start.line===t.loc.end.line&&e.range[0]>t.range[1])).sort(((e,t)=>e.range[0]-t.range[0])),i=[];let o=t;for(const t of r){const r=e.getTokensBetween(o,t);if(r.length>0){if(!n)break;if(!r.every(n))break}i.push(t),o=t}return i},getLeadingComments:function(e,t,n){const r=e.getCommentsBefore(t),i=[];let o=t;for(let t=r.length-1;t>=0;t-=1){const a=r[t];if(a===n)break;if(a.loc.end.line===o.loc.start.line){i.unshift(a),o=a;continue}if(a.loc.end.line!==o.loc.start.line-1)break;const l=e.getTokenBefore(a);if(l&&l.loc.end.line===a.loc.start.line)break;i.unshift(a),o=a}return i}};var V=function(e){return e.getSourceCode().ast.docblock?.comment};const{getInlineComments:$,getLeadingComments:K}=A,{isCommaOrSemiToken:_}=F;function J(e){return"@"===e[0]||":"===e[0]}const X=/\d{1,3}(\.\d+)?%/;function U(e){switch(typeof e){case"number":return{isSafeNumericString:!0,isPercentage:!1};case"boolean":return{isSafeNumericString:!1,isPercentage:!1}}return isNaN(e)||isNaN(parseFloat(e))?X.test(e)?{isSafeNumericString:!0,isPercentage:!0}:{isSafeNumericString:!1,isPercentage:!1}:{isSafeNumericString:!0,isPercentage:!1}}function z(e){return e.reduce((([e,t],[n,r])=>[Math.min(e,n),Math.max(t,r)]),[Number.MAX_SAFE_INTEGER,0])}var G={compareNames:function(e,t,n=!1){if("number"==typeof e&&"number"==typeof t)return e-t;const r=String(e),i=String(t);if(""===r&&""!==i)return-1;if(""!==r&&""===i)return 1;if(""===r&&""===i)return 0;const{isSafeNumericString:o,isPercentage:a}=U(r),{isSafeNumericString:l,isPercentage:s}=U(i);if(o&&l){const e=Number.parseFloat(r),t=Number.parseFloat(i);if(e===t){if(!a&&s)return-1;if(a&&!s)return 1}return e-t}if(n){const e=J(r),t=J(i);if(!e&&t)return-1;if(e&&!t)return 1}const u=o||r[0].toLowerCase()===r[0].toUpperCase(),c=l||i[0].toLowerCase()===i[0].toUpperCase();if(!u&&c)return 1;if(u&&!c)return-1;if(!u&&!c){const e=r[0].toLowerCase()===r[0],t=i[0].toLowerCase()===i[0];if(!e&&t)return-1;if(e&&!t)return 1}return r.localeCompare(i,"en",{caseFirst:"upper",sensitivity:"base"})},getEncompassingRange:z,getNodeTextWithComments:function(e,t,n,{shouldIncludeNextTokenInRange:r=_,ensureTextFollowsNode:i,inlineCommentIgnoreToken:o}={}){const a=$(e,t,o),l=[...K(e,t,n).map((({range:e})=>e)),t.range,...a.map((({range:e})=>e))],s=e.getTokenAfter(t);s&&!0===r?.(s)&&l.push(s.range);const u=z(l);let c=e.text.slice(u[0],u[1]);const p=t.range[1]-u[0];if(null!=i){e.getTokenAfter(t)?.value!==i&&(c=c.slice(0,p)+i+c.slice(p))}return{range:u,text:c}},isComma:function(e){return"Punctuator"===e.type&&","===e.value}};const Q=B,{getInlineComments:W,getLeadingComments:H}=A,Y=V,{compareNames:Z}=G,ee=0,te={default:1,namespace:2,named:3},ne={default:4,namespace:5,named:6},re={default:7,namespace:8,named:9},ie=0,oe=1,ae=2,le=3,se=99;var ue={meta:{fixable:"code",messages:{incorrectOrder:"Requires should be sorted alphabetically"}},create(e){const t=e.getSourceCode(),n=function(e){const t=Y(e);if(null!=t)return t;const n=e.getSourceCode().ast,r=n.body.length>0?n.body[0]:n,i=e.getSourceCode().getCommentsBefore(r)[0];return i&&"Block"===i.type?i:null}(e);if(n&&(n.value.includes("* @generated")||n.value.includes("* @partially-generated"))&&!n.value.includes("* @xpr_allow_generated_lint"))return{};const r=Object.freeze({typeImport:{priority:10,uppercase:[],lowercase:[],tiebreakFunction:s},valueImport:{priority:20,uppercase:[],lowercase:[],tiebreakFunction:s},requiresUsedByOtherRequires:{priority:30,uppercase:[],lowercase:[],tiebreakFunction:u},require:{priority:40,uppercase:[],lowercase:[],tiebreakFunction:u}}),i=[];let o=null,a=null;const l=new Set;return{Program(n){for(const e of n.body)switch(e.type){case"ImportDeclaration":if("type"===e.importKind||"typeof"===e.importKind)d(r.typeImport,e,e.source.value,!0);else{const n=t.getLastToken(e.source,(e=>"from"===e.value));0===e.specifiers.length&&null==n?g(e):d(r.valueImport,e,e.source.value)}break;case"VariableDeclaration":{const t=e.declarations[0]?.init;if(1!==e.declarations.length||null==t){g(e);break}f(t,e);break}default:g(e)}const s=[];for(const e of Object.keys(r)){const t=r[e];s.push({priority:t.priority,nodes:t.uppercase.sort(((e,n)=>c(e,n,t.tiebreakFunction)))}),s.push({priority:t.priority+5,nodes:t.lowercase.sort(((e,n)=>c(e,n,t.tiebreakFunction)))})}function u(e){return[e.leadingComments.length?e.leadingComments.map((e=>t.getText(e))).join("\n")+"\n":"",e.inlineComments.length?" "+e.inlineComments.map((e=>t.getText(e))).join(" "):""]}const p=s.filter((e=>0!==e.nodes.length)).sort(((e,t)=>e.priority-t.priority)).map((e=>e.nodes.map((e=>{const[n,r]=u(e),i=function(e,t){const n=t.node,r=e.getText(n),i=(()=>{if("ImportDeclaration"===n.type&&null!=n.specifiers.find((e=>"ImportSpecifier"===e.type))){const t=e.getFirstToken(n,(e=>"Punctuator"===e.type&&"{"===e.value)),r=e.getFirstToken(n,(e=>"Punctuator"===e.type&&"}"===e.value));return null==t||null==r?null:e.getText().substring(t.range[0],r.range[1])}if("VariableDeclaration"===n.type&&"ObjectPattern"===n.declarations[0].id.type){const t=n.declarations[0].id.typeAnnotation,r=e.getText(n.declarations[0].id);return t?r.substr(0,t.range[0]-n.declarations[0].id.range[0]):r}return null})();if(null==i)return r;let o=[],a=null;"ImportDeclaration"===n.type?o=n.specifiers.map((t=>"ImportDefaultSpecifier"===t.type||"ImportNamespaceSpecifier"===t.type?null:{leadingComments:e.getCommentsBefore(t),name:t.imported.name,node:t})).filter(Boolean):"VariableDeclaration"===n.type&&"ObjectPattern"===n.declarations[0].id.type&&(o=n.declarations[0].id.properties.map((t=>{if("ExperimentalRestProperty"===t.type||"RestElement"===t.type)return a=t,null;const n=t.key,r="Literal"===n.type?String(n.value):"Identifier"===n.type?t.computed?null:n.name:null;return null==r?null:{leadingComments:e.getCommentsBefore(t),name:r,node:t}})).filter(Boolean));if(o.length<=1)return r;const l=i.indexOf("\n")>=0,s=o.sort(((e,t)=>Z(e.name,t.name))).map((e=>e.node));null!=a&&s.push(a);const u=s.map((t=>{const n=e.getCommentsBefore(t).map((t=>e.getText(t))),r=n.length?n.join(""):"";return l?(r?" "+r+"\n":"")+" "+e.getText(t):(r?r+" ":"")+e.getText(t)})),c=(()=>{const t=[];if("ImportDeclaration"===n.type){if(t.push(...e.getCommentsBefore(n.source)),l&&null!=n.specifiers.find((e=>"ImportSpecifier"===e.type))){const r=e.getTokenBefore(n.source,(e=>"Punctuator"===e.type&&"}"===e.value));null!=r&&t.push(...e.getCommentsBefore(r))}}else if("VariableDeclaration"===n.type&&"ObjectPattern"===n.declarations[0].id.type){const r=e.getLastToken(n.declarations[0].id);null!=r&&t.push(...e.getCommentsBefore(r))}return t})(),p=l&&c.length?c.map((t=>" "+e.getText(t))).join("\n")+"\n":"",f=l?"\n"+u.map((e=>e.includes("...")?`${e}\n`:`${e},\n`)).join("")+p:u.join(", ");return r.replace(i,(()=>`{${f}}`))}(t,e);return n+i+r})).join("\n"))).join("\n\n"),m=o;if(null==m||null==a)return;const y=m.leadingComments.length?m.leadingComments[0].range[0]:m.node.range[0],h=a.inlineComments.length>0?a.inlineComments[a.inlineComments.length-1].range[1]:a.node.range[1];t.getText(m.node,m.node.range[0]-y,h-m.node.range[1])!==p&&e.report({node:m.node,messageId:"incorrectOrder",fix(e){const n=i.filter((({node:e})=>e.range[0]>=y&&e.range[1]<=h)).map((e=>{const[n,r]=u(e),i=t.getText(e.node);return{range:e.node.range,text:n+i+r}})).flat().concat(t.getAllComments().filter((e=>e.range[0]>=y&&e.range[1]<=h&&!l.has(e))).map((e=>({range:e.range,text:t.getText(e)})))).sort(((e,t)=>e.range[0]-t.range[0]||e.range[1]-t.range[1])).map((({text:e})=>e)).join("\n");return e.replaceTextRange([y,h],[p,n].filter(Boolean).join("\n\n"))}})}};function s(e){if(0===e.specifiers.length)return ee;const t=(()=>{switch(e.importKind){default:case"value":return te;case"type":return ne;case"typeof":return re}})();return e.specifiers.find((e=>"ImportDefaultSpecifier"===e.type))?t.default:e.specifiers.find((e=>"ImportNamespaceSpecifier"===e.type))?t.namespace:t.named}function u(e){if("ExpressionStatement"===e.type)return ie;switch(e.declarations[0].id.type){case"Identifier":return oe;case"ObjectPattern":return ae;case"ArrayPattern":return le}return se}function c(e,t,n){const r=Z(e.moduleName,t.moduleName);if(0!==r)return r;const i=n(e.node)-n(t.node);return 0!==i?i:e.node.loc.start.line-t.node.loc.start.line}function p(e,t){if(null==e)throw new Error("Missing required module name");return null!=t?`${e}_${t}`:e}function f(e,n,i){const o=Q.getRequireModuleName(e);if(Q.isRequire(e)){const e=p(o,i);d("requireCond"===e||"requireDeferred"===e||"requireDeferredForDisplay"===e?r.requiresUsedByOtherRequires:r.require,n,p(o,i))}else if(Q.isRequireDeferred(e)||Q.isRequireDeferredForDisplay(e))d(r.require,n,p(o,i));else{if(Q.isRequireCond(e)){if("VariableDeclaration"===n.type){const e=t.getText(n.declarations[0].id);return void d(r.require,n,p(e,i))}}else{if("MemberExpression"===e.type)return void f(e.object,n,p(t.getText(e.property),i));if("CallExpression"===e.type)return void f(e.callee,n,i)}g(n)}}function m(e){e.leadingComments.forEach((e=>l.add(e))),e.inlineComments.forEach((e=>l.add(e)));t.getCommentsInside(e.node).forEach((e=>l.add(e)))}function d(e,r,i,l=!1){const s={inlineComments:W(t,r),leadingComments:H(t,r,n),moduleName:i,node:r};if(l)e.uppercase.push(s);else{const t=i[0]||"";t.toLowerCase()===t?e.lowercase.push(s):e.uppercase.push(s)}null==o&&(o=s),a=s,m(s)}function g(e){const r={inlineComments:W(t,e),leadingComments:H(t,e,n),node:e};i.push(r),m(r)}}};var ce=ue;module.exports=ce; diff --git a/packages/react-relay/ReactRelayFragmentContainer.js b/packages/react-relay/ReactRelayFragmentContainer.js index 4a52bf4e3494f..0ea602c727cb7 100644 --- a/packages/react-relay/ReactRelayFragmentContainer.js +++ b/packages/react-relay/ReactRelayFragmentContainer.js @@ -242,12 +242,14 @@ function createContainerWithFragments< // eslint-disable-next-line no-unused-vars const {componentRef, __relayContext, __rootIsQueryRenderer, ...props} = this.props; - return React.createElement(Component, { - ...props, - ...this.state.data, - ref: componentRef, - relay: this.state.relayProp, - }); + return ( + + ); } }; } diff --git a/packages/react-relay/ReactRelayLocalQueryRenderer.js b/packages/react-relay/ReactRelayLocalQueryRenderer.js index 4ac1abc7beabd..354aae8b59a85 100644 --- a/packages/react-relay/ReactRelayLocalQueryRenderer.js +++ b/packages/react-relay/ReactRelayLocalQueryRenderer.js @@ -40,14 +40,17 @@ const queryRendererContext: ReactRelayQueryRendererContextType = { rootIsQueryRenderer: true, }; -function useDeepCompare(value: T): T { +hook useDeepCompare(value: T): T { const latestValue = React.useRef(value); + // $FlowFixMe[react-rule-unsafe-ref] if (!areEqual(latestValue.current, value)) { if (__DEV__) { deepFreeze(value); } + // $FlowFixMe[react-rule-unsafe-ref] latestValue.current = value; } + // $FlowFixMe[react-rule-unsafe-ref] return latestValue.current; } diff --git a/packages/react-relay/ReactRelayPaginationContainer.js b/packages/react-relay/ReactRelayPaginationContainer.js index eeb5b2a4fefa2..55050930243ab 100644 --- a/packages/react-relay/ReactRelayPaginationContainer.js +++ b/packages/react-relay/ReactRelayPaginationContainer.js @@ -595,7 +595,9 @@ function createContainerWithFragments< PAGE_INFO, connectionData, ); + // $FlowFixMe[invalid-computed-prop] const edges = connectionData[EDGES]; + // $FlowFixMe[invalid-computed-prop] const pageInfo = connectionData[PAGE_INFO]; if (edges == null || pageInfo == null) { return null; diff --git a/packages/react-relay/ReactRelayQueryFetcher.js b/packages/react-relay/ReactRelayQueryFetcher.js index 04658c4573e57..184d9f77ea1e9 100644 --- a/packages/react-relay/ReactRelayQueryFetcher.js +++ b/packages/react-relay/ReactRelayQueryFetcher.js @@ -27,11 +27,7 @@ const { isRelayModernEnvironment, } = require('relay-runtime'); -type OnDataChange = ({ - error?: Error, - snapshot?: Snapshot, - ... -}) => void; +type OnDataChange = ({error?: Error, snapshot?: Snapshot, ...}) => void; /** The external API of 'fetch' **/ export type FetchOptions = { diff --git a/packages/react-relay/ReactRelayQueryRenderer.js b/packages/react-relay/ReactRelayQueryRenderer.js index f82a203fa8423..1f623ee83ae83 100644 --- a/packages/react-relay/ReactRelayQueryRenderer.js +++ b/packages/react-relay/ReactRelayQueryRenderer.js @@ -27,7 +27,6 @@ const ReactRelayQueryFetcher = require('./ReactRelayQueryFetcher'); const ReactRelayQueryRendererContext = require('./ReactRelayQueryRendererContext'); const areEqual = require('areEqual'); const React = require('react'); -const {RelayFeatureFlags} = require('relay-runtime'); const { createOperationDescriptor, deepFreeze, @@ -37,11 +36,7 @@ const { type RetryCallbacks = { handleDataChange: | null - | (({ - error?: Error, - snapshot?: Snapshot, - ... - }) => void), + | (({error?: Error, snapshot?: Snapshot, ...}) => void), handleRetryAfterError: null | ((error: Error) => void), }; @@ -66,14 +61,14 @@ const queryRendererContext: ReactRelayQueryRendererContextType = { rootIsQueryRenderer: true, }; -export type Props = { +export type Props = $ReadOnly<{ cacheConfig?: ?CacheConfig, fetchPolicy?: 'store-and-network' | 'network-only', environment: IEnvironment, query: ?GraphQLTaggedNode, render: (renderProps: RenderProps) => React.Node, variables: Variables, -}; +}>; type State = { error: Error | null, @@ -131,6 +126,7 @@ class ReactRelayQueryRenderer extends React.Component { this._maybeHiddenOrFastRefresh = false; + // $FlowFixMe[incompatible-type] this.state = { prevPropsEnvironment: props.environment, prevPropsVariables: props.variables, @@ -149,7 +145,7 @@ class ReactRelayQueryRenderer extends React.Component { static getDerivedStateFromProps( nextProps: Props, prevState: State, - ): $Shape | null { + ): Partial | null { if ( prevState.prevQuery !== nextProps.query || prevState.prevPropsEnvironment !== nextProps.environment || @@ -174,31 +170,8 @@ class ReactRelayQueryRenderer extends React.Component { const newState = resetQueryStateForUpdate(this.props, prevState); const {requestCacheKey, queryFetcher} = newState; if (requestCacheKey != null && requestCache[requestCacheKey] != null) { - if (RelayFeatureFlags.ENABLE_QUERY_RENDERER_SET_STATE_PREVENTION) { - const fetchResult = queryFetcher.getFetchResult(); - if (fetchResult != null) { - const snapshot = fetchResult.snapshot ?? null; - const error = fetchResult.error ?? null; - - const {requestCacheKey: prevRequestCacheKey} = prevState; - if (prevRequestCacheKey != null) { - delete requestCache[prevRequestCacheKey]; - } - - newState.renderProps = getRenderProps( - error, - snapshot, - queryFetcher, - prevState.retryCallbacks, - ); - newState.snapshot = snapshot; - newState.requestCacheKey = null; - } else { - queryFetcher.setOnDataChange(this._handleDataChange); - } - } else { - queryFetcher.setOnDataChange(this._handleDataChange); - } + // $FlowFixMe[incompatible-use] + queryFetcher.setOnDataChange(this._handleDataChange); } return newState; }); @@ -366,7 +339,7 @@ function getRequestCacheKey( function resetQueryStateForUpdate( props: Props, prevState: State, -): $Shape { +): Partial { const {query} = props; const prevSelectionReferences = @@ -403,9 +376,9 @@ function fetchQueryAndComputeStateFromProps( queryFetcher: ReactRelayQueryFetcher, retryCallbacks: RetryCallbacks, requestCacheKey: ?string, -): $Shape { +): Partial { const {environment, query, variables, cacheConfig} = props; - const genericEnvironment = (environment: IEnvironment); + const genericEnvironment: IEnvironment = environment; if (query) { const request = getRequest(query); const operation = createOperationDescriptor( diff --git a/packages/react-relay/ReactRelayTypes.js b/packages/react-relay/ReactRelayTypes.js index 277a23c61c8b8..81a9e4b47a9d8 100644 --- a/packages/react-relay/ReactRelayTypes.js +++ b/packages/react-relay/ReactRelayTypes.js @@ -102,6 +102,8 @@ export type $FragmentRef = { * `props.relay` and returns the props of the container. */ // prettier-ignore +// $FlowFixMe[extra-type-arg] xplat redux flow type error +// $FlowFixMe[deprecated-type] export type $RelayProps = $ObjMap< $Diff, & (( T) => T) diff --git a/packages/react-relay/__mocks__/RelayTestRenderer.js b/packages/react-relay/__mocks__/RelayTestRenderer.js index 405183e66c223..506a53ab12248 100644 --- a/packages/react-relay/__mocks__/RelayTestRenderer.js +++ b/packages/react-relay/__mocks__/RelayTestRenderer.js @@ -58,6 +58,7 @@ class RelayTestRenderer extends React.Component { }; render(): React.Element { + // $FlowFixMe[prop-missing] Suppressed after making React.Element fully opaque const childProps = this.props.children.props; const newProps = {...childProps, ...this.state.data}; return ( diff --git a/packages/react-relay/__tests__/ClientEdges-test.js b/packages/react-relay/__tests__/ClientEdges-test.js index 84264c24100c2..cf6d137d7fe81 100644 --- a/packages/react-relay/__tests__/ClientEdges-test.js +++ b/packages/react-relay/__tests__/ClientEdges-test.js @@ -33,270 +33,275 @@ disallowConsoleErrors(); beforeEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = true; }); afterEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = false; }); -describe('ClientEdges', () => { - let networkSink; - let environment; - let fetchFn; - beforeEach(() => { - fetchFn = jest.fn(() => - // $FlowFixMe[missing-local-annot] Error found while enabling LTI on this file - RelayObservable.create(sink => { - networkSink = sink; - }), - ); +let networkSink; +let environment; +let fetchFn; +beforeEach(() => { + fetchFn = jest.fn(() => + // $FlowFixMe[missing-local-annot] Error found while enabling LTI on this file + RelayObservable.create(sink => { + networkSink = sink; + }), + ); - environment = new Environment({ - store: new LiveResolverStore( - new RecordSource({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - me: {__ref: '1'}, - }, - '1': { - __id: '1', - id: '1', - __typename: 'User', - }, - }), - ), - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - network: Network.create(fetchFn), - }); + environment = new Environment({ + store: new LiveResolverStore( + new RecordSource({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + }, + }), + ), + // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file + network: Network.create(fetchFn), }); +}); - it('should fetch and render client-edge query', () => { - function TestComponent() { - return ( - - - - - - ); - } +it('should fetch and render client-edge query', () => { + function TestComponent() { + return ( + + + + + + ); + } - const variables = {id: '4'}; - function InnerComponent() { - const data = useLazyLoadQuery( - graphql` - query ClientEdgesTest1Query($id: ID!) { - me { - client_node(id: $id) @waterfall { - ... on User { - name - } + const variables = {id: '4'}; + function InnerComponent() { + const data = useLazyLoadQuery( + graphql` + query ClientEdgesTest1Query($id: ID!) { + me { + client_node(id: $id) @waterfall { + ... on User { + name } } } - `, - variables, - ); - return data.me?.client_node?.name; - } - - let renderer; - TestRenderer.act(() => { - renderer = TestRenderer.create(); - }); - expect(fetchFn.mock.calls.length).toEqual(1); - // We should send the client-edge query - // $FlowFixMe[invalid-tuple-index] Error found while enabling LTI on this file - expect(fetchFn.mock.calls[0][0].name).toBe( - 'ClientEdgeQuery_ClientEdgesTest1Query_me__client_node', + } + `, + variables, ); - // Check variables - // $FlowFixMe[invalid-tuple-index] Error found while enabling LTI on this file - expect(fetchFn.mock.calls[0][1]).toEqual(variables); - expect(renderer?.toJSON()).toBe('Loading'); + return data.me?.client_node?.name; + } - TestRenderer.act(() => { - // This should resolve client-edge query - networkSink.next({ - data: { - node: { - id: '4', - __typename: 'User', - name: 'Alice', - }, + let renderer; + TestRenderer.act(() => { + renderer = TestRenderer.create(); + }); + expect(fetchFn.mock.calls.length).toEqual(1); + // We should send the client-edge query + // $FlowFixMe[invalid-tuple-index] Error found while enabling LTI on this file + expect(fetchFn.mock.calls[0][0].name).toBe( + 'ClientEdgeQuery_ClientEdgesTest1Query_me__client_node', + ); + // Check variables + // $FlowFixMe[invalid-tuple-index] Error found while enabling LTI on this file + expect(fetchFn.mock.calls[0][1]).toEqual(variables); + expect(renderer?.toJSON()).toBe('Loading'); + + TestRenderer.act(() => { + // This should resolve client-edge query + networkSink.next({ + data: { + node: { + id: '4', + __typename: 'User', + name: 'Alice', }, - }); - jest.runAllImmediates(); + }, }); - expect(renderer?.toJSON()).toBe('Alice'); + jest.runAllImmediates(); }); + expect(renderer?.toJSON()).toBe('Alice'); +}); - it('should fetch and render `null` for client-edge query that returns `null`.', () => { - function TestComponent() { - return ( - - - - - - ); - } +// The Relay store does not have a concept of _records_ being null. This means that when a Node +// query returns null, we can't actally write to the store "The record with this ID is null". +// Instead, we just write that `node(id: 4): null` into the root record in the store. +// +// This is a general limitiaton of node fetches in Relay today. +it('should fetch and render `undefined` for client-edge to server query that returns `null`.', () => { + function TestComponent() { + return ( + + + + + + ); + } - const variables = {id: '4'}; - function InnerComponent() { - const data = useLazyLoadQuery( - graphql` - query ClientEdgesTest2Query($id: ID!) { - me { - client_node(id: $id) @waterfall { - ... on User { - name - } + const variables = {id: '4'}; + function InnerComponent() { + const data = useLazyLoadQuery( + graphql` + query ClientEdgesTest2Query($id: ID!) { + me { + client_node(id: $id) @waterfall { + ... on User { + name } } } - `, - variables, - ); - return data.me?.client_node?.name ?? 'MISSING'; + } + `, + variables, + ); + if (data.me?.client_node === undefined) { + return 'client_node is undefined'; } + return data.me?.client_node?.name ?? 'MISSING'; + } - let renderer; - TestRenderer.act(() => { - renderer = TestRenderer.create(); - }); - expect(fetchFn.mock.calls.length).toEqual(1); - // We should send the client-edge query - // $FlowFixMe[invalid-tuple-index] Error found while enabling LTI on this file - expect(fetchFn.mock.calls[0][0].name).toBe( - 'ClientEdgeQuery_ClientEdgesTest2Query_me__client_node', - ); - // Check variables - // $FlowFixMe[invalid-tuple-index] Error found while enabling LTI on this file - expect(fetchFn.mock.calls[0][1]).toEqual(variables); - expect(renderer?.toJSON()).toBe('Loading'); + let renderer; + TestRenderer.act(() => { + renderer = TestRenderer.create(); + }); + expect(fetchFn.mock.calls.length).toEqual(1); + // We should send the client-edge query + // $FlowFixMe[invalid-tuple-index] Error found while enabling LTI on this file + expect(fetchFn.mock.calls[0][0].name).toBe( + 'ClientEdgeQuery_ClientEdgesTest2Query_me__client_node', + ); + // Check variables + // $FlowFixMe[invalid-tuple-index] Error found while enabling LTI on this file + expect(fetchFn.mock.calls[0][1]).toEqual(variables); + expect(renderer?.toJSON()).toBe('Loading'); - TestRenderer.act(() => { - // This should resolve client-edge query - networkSink.next({ - data: { - node: null, - }, - }); - // It is important to complete network request here, - // otherwise, client-edge query will think that the query is still in progress - // and will show a suspense placeholder - networkSink.complete(); - jest.runAllImmediates(); + TestRenderer.act(() => { + // This should resolve client-edge query + networkSink.next({ + data: { + node: null, + }, }); - expect(renderer?.toJSON()).toBe('MISSING'); + // It is important to complete network request here, + // otherwise, client-edge query will think that the query is still in progress + // and will show a suspense placeholder + networkSink.complete(); + jest.runAllImmediates(); }); + expect(renderer?.toJSON()).toBe('client_node is undefined'); + expect(fetchFn.mock.calls.length).toBe(1); +}); - it('should throw for missing client-edge field data marked with @required', () => { - function TestComponent() { - return ( - - - - - - ); - } +it('should throw for missing client-edge field data marked with @required', () => { + function TestComponent() { + return ( + + + + + + ); + } - const variables = {id: '4'}; - function InnerComponent() { - const data = useLazyLoadQuery( - graphql` - query ClientEdgesTest3Query($id: ID!) { - me { - client_node(id: $id) @waterfall @required(action: THROW) { - ... on User { - name - } + const variables = {id: '4'}; + function InnerComponent() { + const data = useLazyLoadQuery( + graphql` + query ClientEdgesTest3Query($id: ID!) { + me { + client_node(id: $id) @waterfall @required(action: THROW) { + ... on User { + name } } } - `, - variables, - ); - return data.me?.client_node?.name; - } + } + `, + variables, + ); + return data.me?.client_node?.name; + } - let renderer; - TestRenderer.act(() => { - renderer = TestRenderer.create(); + let renderer; + TestRenderer.act(() => { + renderer = TestRenderer.create(); + }); + expect(fetchFn.mock.calls.length).toEqual(1); + // We should send the client-edge query + // $FlowFixMe[invalid-tuple-index] Error found while enabling LTI on this file + expect(fetchFn.mock.calls[0][0].name).toBe( + 'ClientEdgeQuery_ClientEdgesTest3Query_me__client_node', + ); + // Check variables + // $FlowFixMe[invalid-tuple-index] Error found while enabling LTI on this file + expect(fetchFn.mock.calls[0][1]).toEqual(variables); + expect(renderer?.toJSON()).toBe('Loading'); + + TestRenderer.act(() => { + networkSink.next({ + data: { + node: null, + }, }); - expect(fetchFn.mock.calls.length).toEqual(1); - // We should send the client-edge query - // $FlowFixMe[invalid-tuple-index] Error found while enabling LTI on this file - expect(fetchFn.mock.calls[0][0].name).toBe( - 'ClientEdgeQuery_ClientEdgesTest3Query_me__client_node', - ); - // Check variables - // $FlowFixMe[invalid-tuple-index] Error found while enabling LTI on this file - expect(fetchFn.mock.calls[0][1]).toEqual(variables); - expect(renderer?.toJSON()).toBe('Loading'); + jest.runAllImmediates(); + }); + // Still waiting, maybe the data will be there + expect(renderer?.toJSON()).toBe('Loading'); + expect(() => { TestRenderer.act(() => { - networkSink.next({ - data: { - node: null, - }, - }); + // This should resolve client-edge query + networkSink.complete(); jest.runAllImmediates(); }); - // Still waiting, maybe the data will be there - expect(renderer?.toJSON()).toBe('Loading'); + }).toThrow( + "Relay: Missing @required value at path 'me.client_node' in 'ClientEdgesTest3Query'.", + ); + expect(renderer?.toJSON()).toBe(null); +}); - expect(() => { - TestRenderer.act(() => { - // This should resolve client-edge query - networkSink.complete(); - jest.runAllImmediates(); - }); - }).toThrow( - "Relay: Missing @required value at path 'me.client_node' in 'ClientEdgesTest3Query'.", +it('should throw for missing client-edge (client object) field data marked with @required', () => { + function TestComponent() { + return ( + + + + + ); - expect(renderer?.toJSON()).toBe(null); - }); - - it('should throw for missing client-edge (client object) field data marked with @required', () => { - function TestComponent() { - return ( - - - - - - ); - } - // See UserClientEdgeClientObjectResolver: for `0` we should return `null` for `client_object`. - const variables = {id: '0'}; - function InnerComponent() { - const data = useLazyLoadQuery( - graphql` - query ClientEdgesTest4Query($id: ID!) { - me { - client_object(id: $id) @required(action: THROW) { - description - } + } + const variables = {return_null: true}; + function InnerComponent() { + const data = useLazyLoadQuery( + graphql` + query ClientEdgesTest4Query($return_null: Boolean!) { + me { + client_object(return_null: $return_null) @required(action: THROW) { + description } } - `, - variables, - ); - return data.me?.client_object?.description; - } - expect(() => { - TestRenderer.act(() => { - TestRenderer.create(); - }); - }).toThrow( - "Relay: Missing @required value at path 'me.client_object' in 'ClientEdgesTest4Query'.", + } + `, + variables, ); - expect(fetchFn.mock.calls.length).toEqual(0); - }); + + return data.me?.client_object?.description; + } + expect(() => { + TestRenderer.act(() => { + TestRenderer.create(); + }); + }).toThrow( + "Relay: Missing @required value at path 'me.client_object' in 'ClientEdgesTest4Query'.", + ); + expect(fetchFn.mock.calls.length).toEqual(0); }); diff --git a/packages/react-relay/__tests__/ClientOnlyQueries-test.js b/packages/react-relay/__tests__/ClientOnlyQueries-test.js index ea92f6f191c84..ee124d5323c2f 100644 --- a/packages/react-relay/__tests__/ClientOnlyQueries-test.js +++ b/packages/react-relay/__tests__/ClientOnlyQueries-test.js @@ -51,12 +51,10 @@ function createEnvironment( beforeEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = true; }); afterEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = false; }); describe('Client-only queries', () => { diff --git a/packages/react-relay/__tests__/LiveResolvers-test.js b/packages/react-relay/__tests__/LiveResolvers-test.js index 50d8337044694..03e32184daffc 100644 --- a/packages/react-relay/__tests__/LiveResolvers-test.js +++ b/packages/react-relay/__tests__/LiveResolvers-test.js @@ -12,17 +12,16 @@ 'use strict'; import type {IEnvironment} from 'relay-runtime'; +import type {RelayFieldLoggerEvent} from 'relay-runtime/store/RelayStoreTypes'; import type {MutableRecordSource} from 'relay-runtime/store/RelayStoreTypes'; const React = require('react'); const { RelayEnvironmentProvider, useClientQuery, - useFragment: useFragment_LEGACY, - useLazyLoadQuery: useLazyLoadQuery_LEGACY, + useFragment, + useLazyLoadQuery, } = require('react-relay'); -const useFragment_REACT_CACHE = require('react-relay/relay-hooks/react-cache/useFragment_REACT_CACHE'); -const useLazyLoadQuery_REACT_CACHE = require('react-relay/relay-hooks/react-cache/useLazyLoadQuery_REACT_CACHE'); const TestRenderer = require('react-test-renderer'); const {RelayFeatureFlags, getRequest} = require('relay-runtime'); const RelayNetwork = require('relay-runtime/network/RelayNetwork'); @@ -43,7 +42,6 @@ const RelayRecordSource = require('relay-runtime/store/RelayRecordSource'); const { disallowConsoleErrors, disallowWarnings, - expectToWarn, } = require('relay-test-utils-internal'); disallowWarnings(); @@ -51,997 +49,732 @@ disallowConsoleErrors(); beforeEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = true; resetStore(); }); afterEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = false; }); -describe.each([ - ['React Cache', useLazyLoadQuery_REACT_CACHE, useFragment_REACT_CACHE], - ['Legacy', useLazyLoadQuery_LEGACY, useFragment_LEGACY], -])('Hook implementation: %s', (_hookName, useLazyLoadQuery, useFragment) => { - const usingReactCache = useLazyLoadQuery === useLazyLoadQuery_REACT_CACHE; - // Our open-source build is still on React 17, so we need to skip these tests there: - if (usingReactCache) { - // $FlowExpectedError[prop-missing] Cache not yet part of Flow types - if (React.unstable_getCacheForType === undefined) { - return; - } - } - beforeEach(() => { - RelayFeatureFlags.USE_REACT_CACHE = usingReactCache; - }); - afterEach(() => { - RelayFeatureFlags.USE_REACT_CACHE = false; +test('Can read an external state resolver directly', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + __typename: 'User', + id: '1', + }, }); + const FooQuery = graphql` + query LiveResolversTest1Query { + counter + } + `; - test('Can read an external state resolver directly', () => { - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - me: {__ref: '1'}, - }, - '1': { - __id: '1', - __typename: 'User', - id: '1', - }, - }); - const FooQuery = graphql` - query LiveResolversTest1Query { - counter - } - `; + const operation = createOperationDescriptor(FooQuery, {}); + const store = new LiveResolverStore(source, { + gcReleaseBufferSize: 0, + }); - const operation = createOperationDescriptor(FooQuery, {}); - const store = new LiveResolverStore(source, { - gcReleaseBufferSize: 0, - }); + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); - const environment = new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store, - }); + const data = environment.lookup(operation.fragment).data; + expect(data).toEqual({ + counter: 0, + }); +}); - const data = environment.lookup(operation.fragment).data; - expect(data).toEqual({ - counter: 0, - }); +test('Environment subscribers see updates pushed from external data source', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + __typename: 'User', + id: '1', + }, }); + const FooQuery = graphql` + query LiveResolversTest2Query { + counter + } + `; - test('Environment subscribers see updates pushed from external data source', () => { - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - me: {__ref: '1'}, - }, - '1': { - __id: '1', - __typename: 'User', - id: '1', - }, - }); - const FooQuery = graphql` - query LiveResolversTest2Query { - counter - } - `; + const operation = createOperationDescriptor(FooQuery, {}); + const store = new LiveResolverStore(source, { + gcReleaseBufferSize: 0, + }); - const operation = createOperationDescriptor(FooQuery, {}); - const store = new LiveResolverStore(source, { - gcReleaseBufferSize: 0, - }); + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); - const environment = new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store, - }); + let observedCounter = null; - let observedCounter = null; + const snapshot = environment.lookup(operation.fragment); + // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: + observedCounter = (snapshot.data: any).counter; - const snapshot = environment.lookup(operation.fragment); + const environmentUpdateHandler = jest.fn(() => { + const s = environment.lookup(operation.fragment); // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - observedCounter = (snapshot.data: any).counter; + observedCounter = (s.data: any).counter; + }); + const disposable = environment.subscribe( + snapshot, + // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file + environmentUpdateHandler, + ); - const environmentUpdateHandler = jest.fn(() => { - const s = environment.lookup(operation.fragment); - // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - observedCounter = (s.data: any).counter; - }); - const disposable = environment.subscribe( - snapshot, - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - environmentUpdateHandler, - ); + // SETUP COMPLETE - // SETUP COMPLETE + // Read the initial value + expect(observedCounter).toBe(0); + expect(environmentUpdateHandler).not.toHaveBeenCalled(); - // Read the initial value - expect(observedCounter).toBe(0); - expect(environmentUpdateHandler).not.toHaveBeenCalled(); + // Increment and assert we get notified of the new value + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + expect(environmentUpdateHandler).toHaveBeenCalledTimes(1); + expect(observedCounter).toBe(1); - // Increment and assert we get notified of the new value - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - expect(environmentUpdateHandler).toHaveBeenCalledTimes(1); - expect(observedCounter).toBe(1); + // Unsubscribe then increment and assert don't get notified. + disposable.dispose(); + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + expect(environmentUpdateHandler).toHaveBeenCalledTimes(1); + expect(observedCounter).toBe(1); - // Unsubscribe then increment and assert don't get notified. - disposable.dispose(); - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - expect(environmentUpdateHandler).toHaveBeenCalledTimes(1); - expect(observedCounter).toBe(1); + // Explicitly read and assert we see the incremented value + // missed before due to unsubscribing. + const nextSnapshot = environment.lookup(operation.fragment); - // Explicitly read and assert we see the incremented value - // missed before due to unsubscribing. - const nextSnapshot = environment.lookup(operation.fragment); + expect(nextSnapshot.data).toEqual({ + counter: 2, + }); +}); - expect(nextSnapshot.data).toEqual({ - counter: 2, - }); +test('Relay Resolvers that read Live Resolvers see updates pushed from external data source', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + __typename: 'User', + id: '1', + }, }); + const FooQuery = graphql` + query LiveResolversTest3Query { + counter_plus_one + } + `; - test('Relay Resolvers that read Live Resolvers see updates pushed from external data source', () => { - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - me: {__ref: '1'}, - }, - '1': { - __id: '1', - __typename: 'User', - id: '1', - }, - }); - const FooQuery = graphql` - query LiveResolversTest3Query { - counter_plus_one - } - `; + const operation = createOperationDescriptor(FooQuery, {}); + const store = new LiveResolverStore(source, { + gcReleaseBufferSize: 0, + }); - const operation = createOperationDescriptor(FooQuery, {}); - const store = new LiveResolverStore(source, { - gcReleaseBufferSize: 0, - }); + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); - const environment = new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store, - }); + let observedCounterPlusOne = null; - let observedCounterPlusOne = null; + const snapshot = environment.lookup(operation.fragment); + // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: + observedCounterPlusOne = (snapshot.data: any).counter_plus_one; - const snapshot = environment.lookup(operation.fragment); + const environmentUpdateHandler = jest.fn(() => { + const s = environment.lookup(operation.fragment); // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - observedCounterPlusOne = (snapshot.data: any).counter_plus_one; + observedCounterPlusOne = (s.data: any).counter_plus_one; + }); + const disposable = environment.subscribe( + snapshot, + // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file + environmentUpdateHandler, + ); - const environmentUpdateHandler = jest.fn(() => { - const s = environment.lookup(operation.fragment); - // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - observedCounterPlusOne = (s.data: any).counter_plus_one; - }); - const disposable = environment.subscribe( - snapshot, - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - environmentUpdateHandler, - ); + // SETUP COMPLETE - // SETUP COMPLETE + // Read the initial value + expect(observedCounterPlusOne).toBe(1); + expect(environmentUpdateHandler).not.toHaveBeenCalled(); - // Read the initial value - expect(observedCounterPlusOne).toBe(1); - expect(environmentUpdateHandler).not.toHaveBeenCalled(); + // Increment and assert we get notified of the new value + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + expect(environmentUpdateHandler).toHaveBeenCalledTimes(1); + expect(observedCounterPlusOne).toBe(2); - // Increment and assert we get notified of the new value - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - expect(environmentUpdateHandler).toHaveBeenCalledTimes(1); - expect(observedCounterPlusOne).toBe(2); + // Unsubscribe then increment and assert don't get notified. + disposable.dispose(); + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + expect(environmentUpdateHandler).toHaveBeenCalledTimes(1); + expect(observedCounterPlusOne).toBe(2); - // Unsubscribe then increment and assert don't get notified. - disposable.dispose(); - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - expect(environmentUpdateHandler).toHaveBeenCalledTimes(1); - expect(observedCounterPlusOne).toBe(2); - - // Explicitly read and assert we see the incremented value - // missed before due to unsubscribing. - const nextSnapshot = environment.lookup(operation.fragment); - expect(nextSnapshot.data).toEqual({ - counter_plus_one: 3, - }); + // Explicitly read and assert we see the incremented value + // missed before due to unsubscribing. + const nextSnapshot = environment.lookup(operation.fragment); + expect(nextSnapshot.data).toEqual({ + counter_plus_one: 3, }); +}); - // This triggers a potential edge case where the subscription is created before - // we create the record where we store the value. - test('Can handle a Live Resolver that triggers an update immediately on subscribe', () => { - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - me: {__ref: '1'}, - }, - '1': { - __id: '1', - __typename: 'User', - id: '1', - }, - }); - const FooQuery = graphql` - query LiveResolversTest4Query { - ping - } - `; - - const operation = createOperationDescriptor(FooQuery, {}); - const store = new LiveResolverStore(source, { - gcReleaseBufferSize: 0, - }); +// This triggers a potential edge case where the subscription is created before +// we create the record where we store the value. +test('Can handle a Live Resolver that triggers an update immediately on subscribe', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + __typename: 'User', + id: '1', + }, + }); + const FooQuery = graphql` + query LiveResolversTest4Query { + ping + } + `; - const environment = new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store, - }); + const operation = createOperationDescriptor(FooQuery, {}); + const store = new LiveResolverStore(source, { + gcReleaseBufferSize: 0, + }); - const data = environment.lookup(operation.fragment).data; - expect(data).toEqual({ - ping: 'pong', - }); + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, }); - test('Subscriptions created while in an optimistic state is in place get cleaned up correctly', () => { - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - }, - '1': { - __id: '1', - __typename: 'User', - id: '1', - name: 'Alice', - }, - }); - const store = new LiveResolverStore(source, {gcReleaseBufferSize: 0}); + const data = environment.lookup(operation.fragment).data; + expect(data).toEqual({ + ping: 'pong', + }); +}); - const environment = new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store, - }); +test('Subscriptions created while in an optimistic state is in place get cleaned up correctly', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + }, + '1': { + __id: '1', + __typename: 'User', + id: '1', + name: 'Alice', + }, + }); + const store = new LiveResolverStore(source, {gcReleaseBufferSize: 0}); - const update = environment.applyUpdate({ - storeUpdater: store => { - const alice = store.get('1'); - if (alice == null) { - throw new Error('Expected to have record "1"'); - } - const storeRoot = store.getRoot(); - storeRoot.setLinkedRecord(alice, 'me'); - }, - }); + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); - const FooQuery = graphql` - query LiveResolversTestOptimisticUpdateQuery { - counter + const update = environment.applyUpdate({ + storeUpdater: store => { + const alice = store.get('1'); + if (alice == null) { + throw new Error('Expected to have record "1"'); } - `; - - const operation = createOperationDescriptor(FooQuery, {}); - - // Read a live resolver field (Creating a subscription to the live state) - const snapshot = environment.lookup(operation.fragment); - const disposable = environment.subscribe(snapshot, () => { - // Noop. We just need to be subscribed. - }); + const storeRoot = store.getRoot(); + storeRoot.setLinkedRecord(alice, 'me'); + }, + }); - // Revert the optimisitic update. - // This should unsubscribe the subscription created during the optimistic - // update, and then reread `counter`. Since `counter` is missing its `me` - // dependency, it should leave `counter` in a state with no liveValue and - // _no subscription_. - update.dispose(); + const FooQuery = graphql` + query LiveResolversTestOptimisticUpdateQuery { + counter + } + `; - // Fire the subscription, which should be ignored by Relay. - expect(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }).not.toThrow(); + const operation = createOperationDescriptor(FooQuery, {}); - // Clean up (just good hygiene) - disposable.dispose(); + // Read a live resolver field (Creating a subscription to the live state) + const snapshot = environment.lookup(operation.fragment); + const disposable = environment.subscribe(snapshot, () => { + // Noop. We just need to be subscribed. }); - test('Outer resolvers do not overwrite subscriptions made by inner resolvers (regression)', () => { - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - me: {__ref: '1'}, - }, - '1': { - __id: '1', - __typename: 'User', - id: '1', - name: 'Alice', - }, - }); + // Revert the optimisitic update. + // This should unsubscribe the subscription created during the optimistic + // update, and then reread `counter`. Since `counter` is missing its `me` + // dependency, it should leave `counter` in a state with no liveValue and + // _no subscription_. + update.dispose(); - const FooQuery = graphql` - query LiveResolversTestNestedQuery { - # Outer consumes inner - outer - # We include inner again as a subsequent sibling of outer. This ensures - # that even if outer overwrites the cached version of inner, we end with - # inner in a valid state. This is nessesary to trigger the error. - inner - } - `; + // Fire the subscription, which should be ignored by Relay. + expect(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }).not.toThrow(); - const store = new LiveResolverStore(source, {gcReleaseBufferSize: 0}); + // Clean up (just good hygiene) + disposable.dispose(); +}); - const environment = new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store, - }); +test('Outer resolvers do not overwrite subscriptions made by inner resolvers (regression)', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + __typename: 'User', + id: '1', + name: 'Alice', + }, + }); - function Environment({children}: {children: React.Node}) { - return ( - - {children} - - ); + const FooQuery = graphql` + query LiveResolversTestNestedQuery { + # Outer consumes inner + outer + # We include inner again as a subsequent sibling of outer. This ensures + # that even if outer overwrites the cached version of inner, we end with + # inner in a valid state. This is nessesary to trigger the error. + inner } + `; - function TestComponent() { - const queryData = useLazyLoadQuery(FooQuery, {}); - return queryData.outer ?? null; - } + const store = new LiveResolverStore(source, {gcReleaseBufferSize: 0}); - const renderer = TestRenderer.create( - - - , + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); + + function Environment({children}: {children: React.Node}) { + return ( + + {children} + ); + } - expect(renderer.toJSON()).toEqual('0'); + function TestComponent() { + const queryData = useLazyLoadQuery(FooQuery, {}); + return queryData.outer ?? null; + } - let update; - // Delete data putting `inner`'s fragment into a state where it's missing - // data. This _should_ unsubscribe us from `inner`'s external state. - TestRenderer.act(() => { - update = environment.applyUpdate({ - storeUpdater: store => { - const alice = store.get('1'); - if (alice == null) { - throw new Error('Expected to have record "1"'); - } - alice.setValue(undefined, 'name'); - }, - }); - }); + const renderer = TestRenderer.create( + + + , + ); - TestRenderer.act(() => jest.runAllImmediates()); - expect(renderer.toJSON()).toEqual(null); + expect(renderer.toJSON()).toEqual('0'); - // Calling increment here should be ignored by Relay. However, if there are - // dangling subscriptions, this will put inner into a dirty state. - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + let update; + // Delete data putting `inner`'s fragment into a state where it's missing + // data. This _should_ unsubscribe us from `inner`'s external state. + TestRenderer.act(() => { + update = environment.applyUpdate({ + storeUpdater: store => { + const alice = store.get('1'); + if (alice == null) { + throw new Error('Expected to have record "1"'); + } + alice.setValue(undefined, 'name'); + }, }); - TestRenderer.act(() => jest.runAllImmediates()); - expect(renderer.toJSON()).toEqual(null); + }); - // Revering optimistic update puts inner back into a state where its - // fragment is valid. HOWEVER, if a dangling subscription has marked inner - // as dirty, we will try to read from a LiveValue that does not exist. - TestRenderer.act(() => update.dispose()); - expect(renderer.toJSON()).toEqual('1'); + TestRenderer.act(() => jest.runAllImmediates()); + expect(renderer.toJSON()).toEqual(null); - // Not part of the repro, but just to confirm: We should now be resubscribed... - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }); - TestRenderer.act(() => jest.runAllImmediates()); - expect(renderer.toJSON()).toEqual('2'); + // Calling increment here should be ignored by Relay. However, if there are + // dangling subscriptions, this will put inner into a dirty state. + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); }); + TestRenderer.act(() => jest.runAllImmediates()); + expect(renderer.toJSON()).toEqual(null); - test("Resolvers without fragments aren't reevaluated when their parent record updates.", async () => { - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - }, - }); + // Revering optimistic update puts inner back into a state where its + // fragment is valid. HOWEVER, if a dangling subscription has marked inner + // as dirty, we will try to read from a LiveValue that does not exist. + TestRenderer.act(() => update.dispose()); + expect(renderer.toJSON()).toEqual('1'); - const FooQuery = graphql` - query LiveResolversTest14Query { - counter_no_fragment + // Not part of the repro, but just to confirm: We should now be resubscribed... + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }); + TestRenderer.act(() => jest.runAllImmediates()); + expect(renderer.toJSON()).toEqual('2'); +}); - # An additional field on Query which can be updated, invalidating the root record. - me { - __typename - } +test("Resolvers without fragments aren't reevaluated when their parent record updates.", async () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + }, + }); + + const FooQuery = graphql` + query LiveResolversTest14Query { + counter_no_fragment + + # An additional field on Query which can be updated, invalidating the root record. + me { + __typename } - `; + } + `; - const store = new LiveResolverStore(source, {gcReleaseBufferSize: 0}); + const store = new LiveResolverStore(source, {gcReleaseBufferSize: 0}); - const mockPayload = Promise.resolve({ - data: { - me: { - id: '1', - __typename: 'User', - }, + const mockPayload = Promise.resolve({ + data: { + me: { + id: '1', + __typename: 'User', }, - }); + }, + }); - const environment = new RelayModernEnvironment({ - network: RelayNetwork.create(() => mockPayload), - store, - }); + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(() => mockPayload), + store, + }); - function Environment({children}: {children: React.Node}) { - return ( - - {children} - - ); - } + function Environment({children}: {children: React.Node}) { + return ( + + {children} + + ); + } - function TestComponent() { - const queryData = useLazyLoadQuery(FooQuery, {}); - return queryData.counter_no_fragment; - } + function TestComponent() { + const queryData = useLazyLoadQuery(FooQuery, {}); + return queryData.counter_no_fragment; + } - const initialCallCount = counterNoFragmentResolver.callCount; + const initialCallCount = counterNoFragmentResolver.callCount; - const renderer = TestRenderer.create( - - - , - ); + const renderer = TestRenderer.create( + + + , + ); - expect(counterNoFragmentResolver.callCount).toBe(initialCallCount + 1); - // Initial render evaluates (and caches) the `counter_no_fragment` resolver. - expect(renderer.toJSON()).toEqual('Loading...'); + expect(counterNoFragmentResolver.callCount).toBe(initialCallCount + 1); + // Initial render evaluates (and caches) the `counter_no_fragment` resolver. + expect(renderer.toJSON()).toEqual('Loading...'); - // When the network response returns, it updates the query root, which would - // invalidate a resolver with a fragment on Query. However, - // `counter_no_fragment` has no fragment, so it should not be revaluated. - TestRenderer.act(() => jest.runAllImmediates()); + // When the network response returns, it updates the query root, which would + // invalidate a resolver with a fragment on Query. However, + // `counter_no_fragment` has no fragment, so it should not be revaluated. + TestRenderer.act(() => jest.runAllImmediates()); - expect(counterNoFragmentResolver.callCount).toBe(initialCallCount + 1); - expect(renderer.toJSON()).toEqual('0'); + expect(counterNoFragmentResolver.callCount).toBe(initialCallCount + 1); + expect(renderer.toJSON()).toEqual('0'); +}); + +test('Can suspend', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + }, }); - test('Can suspend', () => { - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - }, - }); + const Fragment = graphql` + fragment LiveResolversTest5Fragment on Query { + counter_suspends_when_odd + } + `; + const FooQuery = graphql` + query LiveResolversTest5Query { + ...LiveResolversTest5Fragment + } + `; - const Fragment = graphql` - fragment LiveResolversTest5Fragment on Query { - counter_suspends_when_odd - } - `; - const FooQuery = graphql` - query LiveResolversTest5Query { - ...LiveResolversTest5Fragment - } - `; + const store = new LiveResolverStore(source, { + gcReleaseBufferSize: 0, + }); - const store = new LiveResolverStore(source, { - gcReleaseBufferSize: 0, - }); + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); + environment.commitPayload( + createOperationDescriptor(getRequest(FooQuery), {}), + { + me: {id: '1'}, + }, + ); - const environment = new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store, - }); - environment.commitPayload( - createOperationDescriptor(getRequest(FooQuery), {}), - { - me: {id: '1'}, - }, + function Environment({children}: {children: React.Node}) { + return ( + + {children} + ); + } - function Environment({children}: {children: React.Node}) { - return ( - - {children} - - ); - } - - function TestComponent() { - const queryData = useLazyLoadQuery(FooQuery, {}); - const fragmentData = useFragment(Fragment, queryData); - return fragmentData.counter_suspends_when_odd; - } + function TestComponent() { + const queryData = useLazyLoadQuery(FooQuery, {}); + const fragmentData = useFragment(Fragment, queryData); + return fragmentData.counter_suspends_when_odd; + } - const renderer = TestRenderer.create( - - - , - ); - expect(renderer.toJSON()).toEqual('0'); - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }); - // If do not trigger `act` here, the renderer is still `0`. Probably, a React thing... - TestRenderer.act(() => jest.runAllImmediates()); - expect(renderer.toJSON()).toEqual('Loading...'); - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }); - expect(renderer.toJSON()).toEqual('2'); + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('0'); + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }); + // If do not trigger `act` here, the renderer is still `0`. Probably, a React thing... + TestRenderer.act(() => jest.runAllImmediates()); + expect(renderer.toJSON()).toEqual('Loading...'); + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); }); + expect(renderer.toJSON()).toEqual('2'); +}); - test('Can suspend with resolver that uses live resolver', () => { - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - me: {__ref: '1'}, - }, - '1': { - __id: '1', - __typename: 'User', - id: '1', - name: 'Alice', - }, - }); +test('Can suspend with resolver that uses live resolver', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + __typename: 'User', + id: '1', + name: 'Alice', + }, + }); - const FooQuery = graphql` - query LiveResolversTest6Query { - ...LiveResolversTest6Fragment - } - `; + const FooQuery = graphql` + query LiveResolversTest6Query { + ...LiveResolversTest6Fragment + } + `; - const store = new LiveResolverStore(source, { - gcReleaseBufferSize: 0, - }); + const store = new LiveResolverStore(source, { + gcReleaseBufferSize: 0, + }); - const environment = new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store, - }); + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); - function Environment({children}: {children: React.Node}) { - return ( - - {children} - - ); - } + function Environment({children}: {children: React.Node}) { + return ( + + {children} + + ); + } - function TestComponent() { - const queryData = useLazyLoadQuery(FooQuery, {}); - const fragmentData = useFragment( - graphql` - fragment LiveResolversTest6Fragment on Query { - user_name_and_counter_suspends_when_odd - } - `, - queryData, - ); - return fragmentData.user_name_and_counter_suspends_when_odd; - } + function TestComponent() { + const queryData = useLazyLoadQuery(FooQuery, {}); + const fragmentData = useFragment( + graphql` + fragment LiveResolversTest6Fragment on Query { + user_name_and_counter_suspends_when_odd + } + `, + queryData, + ); + return fragmentData.user_name_and_counter_suspends_when_odd; + } - const renderer = TestRenderer.create( - - - , + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('Alice 0'); + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }); + // If do not trigger `act` here, the renderer is still `0`. Probably, a React thing... + TestRenderer.act(() => jest.runAllImmediates()); + expect(renderer.toJSON()).toEqual('Loading...'); + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }); + expect(renderer.toJSON()).toEqual('Alice 2'); + TestRenderer.act(() => { + const operationDescriptor = createOperationDescriptor( + getRequest(FooQuery), + {}, ); - expect(renderer.toJSON()).toEqual('Alice 0'); - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }); - // If do not trigger `act` here, the renderer is still `0`. Probably, a React thing... - TestRenderer.act(() => jest.runAllImmediates()); - expect(renderer.toJSON()).toEqual('Loading...'); - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }); - expect(renderer.toJSON()).toEqual('Alice 2'); - TestRenderer.act(() => { - const operationDescriptor = createOperationDescriptor( - getRequest(FooQuery), - {}, - ); - environment.commitPayload(operationDescriptor, { - me: {id: '1', name: 'Bob', __typename: 'User'}, - }); + environment.commitPayload(operationDescriptor, { + me: {id: '1', name: 'Bob', __typename: 'User'}, }); - expect(renderer.toJSON()).toEqual('Bob 2'); }); + expect(renderer.toJSON()).toEqual('Bob 2'); +}); - describe('Live Resolver with Suspense and Missing Data', () => { - let renderer; +describe('Live Resolver with Suspense and Missing Data', () => { + let renderer; - function InnerTestComponent({scale}: {scale: number}) { - const data = useLazyLoadQuery( - graphql` - query LiveResolversTest7Query($id: ID!, $scale: Float!) { - node(id: $id) { - ... on User { - name - user_profile_picture_uri_suspends_when_the_counter_is_odd( - scale: $scale - ) - } + function InnerTestComponent({scale}: {scale: number}) { + const data = useLazyLoadQuery( + graphql` + query LiveResolversTest7Query($id: ID!, $scale: Float!) { + node(id: $id) { + ... on User { + name + user_profile_picture_uri_suspends_when_the_counter_is_odd( + scale: $scale + ) } } - `, - {id: '1', scale}, - {fetchPolicy: 'store-only'}, - ); - return `${String(data.node?.name)}: ${String( - data.node?.user_profile_picture_uri_suspends_when_the_counter_is_odd, - )}`; - } - - function TestComponent({ - environment, - ...rest - }: { - environment: RelayModernEnvironment, - scale: number, - }) { - return ( - - - - - - ); - } - - function createEnvironment(source: MutableRecordSource) { - return new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store: new LiveResolverStore(source), - }); - } + } + `, + {id: '1', scale}, + {fetchPolicy: 'store-only'}, + ); + return `${String(data.node?.name)}: ${String( + data.node?.user_profile_picture_uri_suspends_when_the_counter_is_odd, + )}`; + } - it('should renderer the data from the store, after global state resolves the value', () => { - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': {__ref: '1'}, - }, - '1': { - __id: '1', - __typename: 'User', - name: 'Alice', - id: '1', - 'profile_picture(scale:1.5)': { - __ref: 'client:1:profile_picture(scale:1.5)', - }, - }, - 'client:1:profile_picture(scale:1.5)': { - __id: 'client:1:profile_picture(scale:1.5)', - uri: 'scale 1.5', - }, - }); - const environment = createEnvironment(source); - - TestRenderer.act(() => { - renderer = TestRenderer.create( - , - ); - }); - expect(renderer.toJSON()).toEqual('Loading...'); - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }); - TestRenderer.act(() => jest.runAllImmediates()); - expect(renderer.toJSON()).toEqual( - 'Alice: Hello, Alice! Picture Url: scale 1.5', - ); - }); + function TestComponent({ + environment, + ...rest + }: { + environment: RelayModernEnvironment, + scale: number, + }) { + return ( + + + + + + ); + } - it('should render undefined value for missing data in live resolver field', () => { - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': {__ref: '1'}, - }, - '1': { - __id: '1', - __typename: 'User', - name: 'Alice', - id: '1', - 'profile_picture(scale:1.5)': { - __ref: 'client:1:profile_picture(scale:1.5)', - }, - 'profile_picture(scale:2)': { - __ref: 'client:1:profile_picture(scale:2)', - }, - }, - 'client:1:profile_picture(scale:1.5)': { - __id: 'client:1:profile_picture(scale:1.5)', - uri: 'scale 1.5', - }, - 'client:1:profile_picture(scale:2)': { - __id: 'client:1:profile_picture(scale:2)', - // missing data for uri - }, - }); - const environment = createEnvironment(source); - TestRenderer.act(() => { - renderer = TestRenderer.create( - , - ); - }); - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }); - TestRenderer.act(() => jest.runAllImmediates()); - expect(renderer.toJSON()).toEqual( - 'Alice: Hello, Alice! Picture Url: scale 1.5', - ); - TestRenderer.act(() => { - renderer.update(); - }); - // the data for scale 2 is missing in the store - expect(renderer.toJSON()).toEqual('Alice: undefined'); + function createEnvironment(source: MutableRecordSource) { + return new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store: new LiveResolverStore(source), }); + } - it('should render undefined value for missing data in live resolver field and trigger different states of suspense ', () => { - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': {__ref: '1'}, - }, - '1': { - __id: '1', - __typename: 'User', - name: 'Alice', - id: '1', - 'profile_picture(scale:1.5)': { - __ref: 'client:1:profile_picture(scale:1.5)', - }, - 'profile_picture(scale:2)': { - __ref: 'client:1:profile_picture(scale:2)', - }, - 'profile_picture(scale:3)': { - __ref: 'client:1:profile_picture(scale:3)', - }, - }, - 'client:1:profile_picture(scale:1.5)': { - __id: 'client:1:profile_picture(scale:1.5)', - uri: 'scale 1.5', - }, - 'client:1:profile_picture(scale:2)': { - __id: 'client:1:profile_picture(scale:2)', - // missing data for uri - }, - 'client:1:profile_picture(scale:3)': { - __id: 'client:1:profile_picture(scale:3)', - uri: 'scale 3', + it('should renderer the data from the store, after global state resolves the value', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + 'node(id:"1")': {__ref: '1'}, + }, + '1': { + __id: '1', + __typename: 'User', + name: 'Alice', + id: '1', + 'profile_picture(scale:1.5)': { + __ref: 'client:1:profile_picture(scale:1.5)', }, - }); - const environment = createEnvironment(source); - - TestRenderer.act(() => { - renderer = TestRenderer.create( - , - ); - }); - - expect(renderer.toJSON()).toEqual('Loading...'); - // This should trigger the re-render with the missing data in the fragment - TestRenderer.act(() => { - renderer.update(); - }); - // Now, the whole live field became undefined, as some of - // the data in the live field resolver fragment is missing - expect(renderer.toJSON()).toEqual('Alice: undefined'); - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }); - TestRenderer.act(() => jest.runAllImmediates()); - expect(renderer.toJSON()).toEqual('Alice: undefined'); - - // Next, we're re-rendering with new `scale`, and for this value (3) we have the data in - // the store (no missing data) - TestRenderer.act(() => { - renderer.update(); - }); - // And we are rendering the data with the new scale - expect(renderer.toJSON()).toEqual( - 'Alice: Hello, Alice! Picture Url: scale 3', - ); + }, + 'client:1:profile_picture(scale:1.5)': { + __id: 'client:1:profile_picture(scale:1.5)', + uri: 'scale 1.5', + }, + }); + const environment = createEnvironment(source); - // Re-render fragment with missing data, to make sure we correctly use cached value - TestRenderer.act(() => { - renderer.update(); - }); - expect(renderer.toJSON()).toEqual('Alice: undefined'); - - TestRenderer.act(() => { - renderer.update(); - }); - // And we are rendering the data with the new scale - expect(renderer.toJSON()).toEqual( - 'Alice: Hello, Alice! Picture Url: scale 3', + TestRenderer.act(() => { + renderer = TestRenderer.create( + , ); - - // Now, the global store should have the data - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }); - TestRenderer.act(() => jest.runAllImmediates()); - - // Now, again we are suspending, because the global state is still not ready - expect(renderer.toJSON()).toEqual('Loading...'); }); + expect(renderer.toJSON()).toEqual('Loading...'); + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }); + TestRenderer.act(() => jest.runAllImmediates()); + expect(renderer.toJSON()).toEqual( + 'Alice: Hello, Alice! Picture Url: scale 1.5', + ); }); - test('Live Resolver with Missing Data and @required', () => { - function InnerTestComponent({id}: {id: string}) { - const data = useLazyLoadQuery( - graphql` - query LiveResolversTest8Query($id: ID!) { - node(id: $id) { - ... on User { - name - resolver_that_throws - } - } - } - `, - {id}, - {fetchPolicy: 'store-only'}, - ); - return `${data.node?.name ?? 'Unknown name'}: ${ - data.node?.resolver_that_throws ?? 'Unknown resolver_that_throws value' - }`; - } - - function TestComponent({ - environment, - ...rest - }: { - environment: RelayModernEnvironment, - id: string, - }) { - return ( - - - - - - ); - } - const requiredFieldLogger = jest.fn< - | $FlowFixMe - | [ - | {+fieldPath: string, +kind: 'missing_field.log', +owner: string} - | {+fieldPath: string, +kind: 'missing_field.throw', +owner: string} - | { - +error: Error, - +fieldPath: string, - +kind: 'relay_resolver.error', - +owner: string, - }, - ], - void, - >(); - function createEnvironment(source: MutableRecordSource) { - return new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store: new LiveResolverStore(source), - requiredFieldLogger, - }); - } - + it('should render undefined value for missing data in live resolver field', () => { const source = RelayRecordSource.create({ 'client:root': { __id: 'client:root', __typename: '__Root', 'node(id:"1")': {__ref: '1'}, - 'node(id:"2")': {__ref: '2'}, }, '1': { __id: '1', __typename: 'User', name: 'Alice', - // username is missing id: '1', + 'profile_picture(scale:1.5)': { + __ref: 'client:1:profile_picture(scale:1.5)', + }, + 'profile_picture(scale:2)': { + __ref: 'client:1:profile_picture(scale:2)', + }, }, - '2': { - __id: '2', - __typename: 'User', - name: 'Bob', - username: 'bob', - id: '2', + 'client:1:profile_picture(scale:1.5)': { + __id: 'client:1:profile_picture(scale:1.5)', + uri: 'scale 1.5', + }, + 'client:1:profile_picture(scale:2)': { + __id: 'client:1:profile_picture(scale:2)', + // missing data for uri }, }); const environment = createEnvironment(source); - - // First, render with missing data - expect(() => { - TestRenderer.create(); - }).toThrow( - "Relay: Missing @required value at path 'username' in 'ResolverThatThrows'.", - ); - expect(requiredFieldLogger.mock.calls).toEqual([ - [ - { - kind: 'missing_field.throw', - owner: 'ResolverThatThrows', - fieldPath: 'username', - }, - ], - ]); - requiredFieldLogger.mockReset(); - - // Render with complete data - expect(() => { - TestRenderer.create(); - }).toThrow( - 'The resolver should throw earlier. It should have missing data.', + TestRenderer.act(() => { + renderer = TestRenderer.create( + , + ); + }); + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }); + TestRenderer.act(() => jest.runAllImmediates()); + expect(renderer.toJSON()).toEqual( + 'Alice: Hello, Alice! Picture Url: scale 1.5', ); - expect(requiredFieldLogger.mock.calls).toEqual([]); + TestRenderer.act(() => { + renderer.update(); + }); + // the data for scale 2 is missing in the store + expect(renderer.toJSON()).toEqual('Alice: undefined'); }); - test('apply optimistic updates to live resolver field', () => { - let renderer; - - function InnerTestComponent({scale}: {scale: number}) { - const data = useLazyLoadQuery( - graphql` - query LiveResolversTest9Query($id: ID!, $scale: Float!) { - node(id: $id) { - ... on User { - profile_picture_uri: user_profile_picture_uri_suspends_when_the_counter_is_odd( - scale: $scale - ) - } - } - } - `, - {id: '1', scale}, - {fetchPolicy: 'store-only'}, - ); - return data.node?.profile_picture_uri; - } - - function TestComponent({ - environment, - ...rest - }: { - environment: RelayModernEnvironment, - scale: number, - }) { - return ( - - - - - - ); - } - - function createEnvironment(source: MutableRecordSource) { - return new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store: new LiveResolverStore(source), - }); - } - + it('should render undefined value for missing data in live resolver field and trigger different states of suspense ', () => { const source = RelayRecordSource.create({ 'client:root': { __id: 'client:root', @@ -1056,11 +789,25 @@ describe.each([ 'profile_picture(scale:1.5)': { __ref: 'client:1:profile_picture(scale:1.5)', }, + 'profile_picture(scale:2)': { + __ref: 'client:1:profile_picture(scale:2)', + }, + 'profile_picture(scale:3)': { + __ref: 'client:1:profile_picture(scale:3)', + }, }, 'client:1:profile_picture(scale:1.5)': { __id: 'client:1:profile_picture(scale:1.5)', uri: 'scale 1.5', }, + 'client:1:profile_picture(scale:2)': { + __id: 'client:1:profile_picture(scale:2)', + // missing data for uri + }, + 'client:1:profile_picture(scale:3)': { + __id: 'client:1:profile_picture(scale:3)', + uri: 'scale 3', + }, }); const environment = createEnvironment(source); @@ -1070,514 +817,756 @@ describe.each([ ); }); - if (renderer == null) { - throw new Error('Renderer is expected to be defined.'); - } - expect(renderer.toJSON()).toEqual('Loading...'); + // This should trigger the re-render with the missing data in the fragment + TestRenderer.act(() => { + renderer.update(); + }); + // Now, the whole live field became undefined, as some of + // the data in the live field resolver fragment is missing + expect(renderer.toJSON()).toEqual('Alice: undefined'); TestRenderer.act(() => { GLOBAL_STORE.dispatch({type: 'INCREMENT'}); }); TestRenderer.act(() => jest.runAllImmediates()); - expect(renderer.toJSON()).toEqual('Hello, Alice! Picture Url: scale 1.5'); + expect(renderer.toJSON()).toEqual('Alice: undefined'); - let update; + // Next, we're re-rendering with new `scale`, and for this value (3) we have the data in + // the store (no missing data) TestRenderer.act(() => { - update = environment.applyUpdate({ - storeUpdater: store => { - const alice = store.get('1'); - if (alice == null) { - throw new Error('Expected to have record "1"'); - } - alice.setValue('Alicia', 'name'); - }, - }); + renderer.update(); }); - expect(renderer.toJSON()).toEqual('Hello, Alicia! Picture Url: scale 1.5'); + // And we are rendering the data with the new scale + expect(renderer.toJSON()).toEqual( + 'Alice: Hello, Alice! Picture Url: scale 3', + ); + + // Re-render fragment with missing data, to make sure we correctly use cached value + TestRenderer.act(() => { + renderer.update(); + }); + expect(renderer.toJSON()).toEqual('Alice: undefined'); + + TestRenderer.act(() => { + renderer.update(); + }); + // And we are rendering the data with the new scale + expect(renderer.toJSON()).toEqual( + 'Alice: Hello, Alice! Picture Url: scale 3', + ); + // Now, the global store should have the data TestRenderer.act(() => { GLOBAL_STORE.dispatch({type: 'INCREMENT'}); }); TestRenderer.act(() => jest.runAllImmediates()); - expect(renderer.toJSON()).toEqual('Loading...'); - // Revering optimistic update - TestRenderer.act(() => update.dispose()); - // Reverting optimistic update should - // not change suspense state of the live-resolver - // this should still be `Loading...` + // Now, again we are suspending, because the global state is still not ready expect(renderer.toJSON()).toEqual('Loading...'); + }); +}); - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); +test('Live Resolver with Missing Data and @required', async () => { + function InnerTestComponent({id}: {id: string}) { + const data = useLazyLoadQuery( + graphql` + query LiveResolversTest8Query($id: ID!) { + node(id: $id) { + ... on User { + name + resolver_that_throws + } + } + } + `, + {id}, + {fetchPolicy: 'store-only'}, + ); + return `${data.node?.name ?? 'Unknown name'}: ${ + data.node?.resolver_that_throws ?? 'Unknown resolver_that_throws value' + }`; + } + + function TestComponent({ + environment, + ...rest + }: { + environment: RelayModernEnvironment, + id: string, + }) { + return ( + + + + + + ); + } + const relayFieldLogger = jest.fn< + $FlowFixMe | [RelayFieldLoggerEvent], + void, + >(); + function createEnvironment(source: MutableRecordSource) { + return new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store: new LiveResolverStore(source), + relayFieldLogger, }); - expect(renderer.toJSON()).toEqual('Hello, Alice! Picture Url: scale 1.5'); + } + + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + 'node(id:"1")': {__ref: '1'}, + 'node(id:"2")': {__ref: '2'}, + }, + '1': { + __id: '1', + __typename: 'User', + name: 'Alice', + // username is missing + id: '1', + }, + '2': { + __id: '2', + __typename: 'User', + name: 'Bob', + username: 'bob', + id: '2', + }, }); + const environment = createEnvironment(source); - // Regression test for a case where we were resetting the parent snapshot's - // `isMissingData` to false when reading a live resolver field. - test('Missing data is not clobbered by non-null empty missingLiveResolverFields on snapshot', () => { - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - me: {__ref: '1'}, + // First, render with missing data + await expect(async () => { + await TestRenderer.act(() => { + TestRenderer.create(); + }); + }).rejects.toThrow( + "Relay: Missing @required value at path 'username' in 'ResolverThatThrows'.", + ); + expect(relayFieldLogger.mock.calls).toEqual([ + [ + { + kind: 'missing_field.throw', + owner: 'ResolverThatThrows', + fieldPath: 'username', }, - '1': { - __id: '1', - __typename: 'User', - id: '1', + ], + ]); + relayFieldLogger.mockReset(); + + // Render with complete data + let renderer; + TestRenderer.act(() => { + renderer = TestRenderer.create( + , + ); + }); + + if (renderer == null) { + throw new Error('Renderer is expected to be defined.'); + } + + expect(relayFieldLogger.mock.calls).toEqual([ + [ + { + error: new Error( + 'The resolver should throw earlier. It should have missing data.', + ), + fieldPath: 'node.resolver_that_throws', + kind: 'relay_resolver.error', + owner: 'LiveResolversTest8Query', + }, + ], + ]); + + expect(renderer.toJSON()).toEqual('Bob: Unknown resolver_that_throws value'); +}); + +test('apply optimistic updates to live resolver field', () => { + let renderer; + + function InnerTestComponent({scale}: {scale: number}) { + const data = useLazyLoadQuery( + graphql` + query LiveResolversTest9Query($id: ID!, $scale: Float!) { + node(id: $id) { + ... on User { + profile_picture_uri: user_profile_picture_uri_suspends_when_the_counter_is_odd( + scale: $scale + ) + } + } + } + `, + {id: '1', scale}, + {fetchPolicy: 'store-only'}, + ); + return data.node?.profile_picture_uri; + } + + function TestComponent({ + environment, + ...rest + }: { + environment: RelayModernEnvironment, + scale: number, + }) { + return ( + + + + + + ); + } + + function createEnvironment(source: MutableRecordSource) { + return new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store: new LiveResolverStore(source), + }); + } + + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + 'node(id:"1")': {__ref: '1'}, + }, + '1': { + __id: '1', + __typename: 'User', + name: 'Alice', + id: '1', + 'profile_picture(scale:1.5)': { + __ref: 'client:1:profile_picture(scale:1.5)', + }, + }, + 'client:1:profile_picture(scale:1.5)': { + __id: 'client:1:profile_picture(scale:1.5)', + uri: 'scale 1.5', + }, + }); + const environment = createEnvironment(source); + + TestRenderer.act(() => { + renderer = TestRenderer.create( + , + ); + }); + + if (renderer == null) { + throw new Error('Renderer is expected to be defined.'); + } + + expect(renderer.toJSON()).toEqual('Loading...'); + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }); + TestRenderer.act(() => jest.runAllImmediates()); + expect(renderer.toJSON()).toEqual('Hello, Alice! Picture Url: scale 1.5'); + + let update; + TestRenderer.act(() => { + update = environment.applyUpdate({ + storeUpdater: store => { + const alice = store.get('1'); + if (alice == null) { + throw new Error('Expected to have record "1"'); + } + alice.setValue('Alicia', 'name'); }, }); - const FooQuery = graphql` - query LiveResolversTest10Query { - me { - # Should be tracked as missing data - name + }); + expect(renderer.toJSON()).toEqual('Hello, Alicia! Picture Url: scale 1.5'); + + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }); + TestRenderer.act(() => jest.runAllImmediates()); + expect(renderer.toJSON()).toEqual('Loading...'); + + // Revering optimistic update + TestRenderer.act(() => update.dispose()); + // Reverting optimistic update should + // not change suspense state of the live-resolver + // this should still be `Loading...` + expect(renderer.toJSON()).toEqual('Loading...'); + + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }); + expect(renderer.toJSON()).toEqual('Hello, Alice! Picture Url: scale 1.5'); +}); + +// Regression test for a case where we were resetting the parent snapshot's +// `isMissingData` to false when reading a live resolver field. +test('Missing data is not clobbered by non-null empty missingLiveResolverFields on snapshot', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + __typename: 'User', + id: '1', + }, + }); + const FooQuery = graphql` + query LiveResolversTest10Query { + me { + # Should be tracked as missing data + name + } + counter + } + `; + + const operation = createOperationDescriptor(FooQuery, {}); + const store = new LiveResolverStore(source, { + gcReleaseBufferSize: 0, + }); + + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); + + const snapshot = environment.lookup(operation.fragment); + expect(snapshot.missingLiveResolverFields).toEqual([]); + expect(snapshot.isMissingData).toBe(true); +}); + +test('with client-only field', () => { + let renderer; + + function InnerTestComponent() { + const data = useClientQuery( + graphql` + query LiveResolversTest11Query { + counter_no_fragment + } + `, + {}, + ); + return data.counter_no_fragment; + } + + function TestComponent({environment}: {environment: IEnvironment}) { + return ( + + + + + + ); + } + + function createEnvironment(source: MutableRecordSource) { + return new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store: new LiveResolverStore(source), + }); + } + + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + }, + }); + const environment = createEnvironment(source); + + TestRenderer.act(() => { + renderer = TestRenderer.create(); + }); + + if (renderer == null) { + throw new Error('Renderer is expected to be defined.'); + } + + expect(renderer.toJSON()).toEqual('0'); + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }); + expect(renderer.toJSON()).toEqual('1'); + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }); + expect(renderer.toJSON()).toEqual('2'); +}); + +test('with client-only field and args', () => { + let renderer; + + function InnerTestComponent({prefix}: {prefix: string}) { + const data = useClientQuery( + graphql` + query LiveResolversTest12Query($prefix: String!) { + counter_no_fragment_with_arg(prefix: $prefix) } - counter - } - `; + `, + {prefix}, + ); + return data.counter_no_fragment_with_arg; + } - const operation = createOperationDescriptor(FooQuery, {}); - const store = new LiveResolverStore(source, { - gcReleaseBufferSize: 0, - }); + function TestComponent({ + environment, + ...rest + }: { + environment: IEnvironment, + prefix: string, + }) { + return ( + + + + + + ); + } - const environment = new RelayModernEnvironment({ + function createEnvironment(source: MutableRecordSource) { + return new RelayModernEnvironment({ network: RelayNetwork.create(jest.fn()), - store, + store: new LiveResolverStore(source), }); + } + + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + }, + }); + const environment = createEnvironment(source); - const snapshot = environment.lookup(operation.fragment); - expect(snapshot.missingLiveResolverFields).toEqual([]); - expect(snapshot.isMissingData).toBe(true); + TestRenderer.act(() => { + renderer = TestRenderer.create( + , + ); }); - test('with client-only field', () => { - let renderer; + if (renderer == null) { + throw new Error('Renderer is expected to be defined.'); + } - function InnerTestComponent() { - const data = useClientQuery( - graphql` - query LiveResolversTest11Query { - counter_no_fragment - } - `, - {}, - ); - return data.counter_no_fragment; - } + expect(renderer.toJSON()).toEqual('Counter is 0'); + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }); + expect(renderer.toJSON()).toEqual('Counter is 1'); + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }); + expect(renderer.toJSON()).toEqual('Counter is 2'); +}); - function TestComponent({environment}: {environment: IEnvironment}) { - return ( - - - - - - ); - } +test('Can read a live client edge without a fragment', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + }, + '1338': { + __id: '1338', + id: '1338', + __typename: 'User', + name: 'Elizabeth', + }, + }); - function createEnvironment(source: MutableRecordSource) { - return new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store: new LiveResolverStore(source), - }); + const FooQuery = graphql` + query LiveResolversTest13Query { + live_constant_client_edge @waterfall { + name + } } + `; - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - }, - }); - const environment = createEnvironment(source); + const operation = createOperationDescriptor(FooQuery, {}); + const store = new LiveResolverStore(source, { + gcReleaseBufferSize: 0, + }); - TestRenderer.act(() => { - renderer = TestRenderer.create( - , - ); - }); + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); - if (renderer == null) { - throw new Error('Renderer is expected to be defined.'); - } + const data = environment.lookup(operation.fragment).data; + expect(data).toEqual({ + live_constant_client_edge: { + name: 'Elizabeth', + }, + }); +}); - expect(renderer.toJSON()).toEqual('0'); - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }); - expect(renderer.toJSON()).toEqual('1'); - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }); - expect(renderer.toJSON()).toEqual('2'); +test('live resolver with the edge that always suspend', () => { + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store: new LiveResolverStore( + RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + }, + }), + ), }); - test('with client-only field and args', () => { - let renderer; + function Environment({children}: {children: React.Node}) { + return ( + + {children} + + ); + } - function InnerTestComponent({prefix}: {prefix: string}) { - const data = useClientQuery( - graphql` - query LiveResolversTest12Query($prefix: String!) { - counter_no_fragment_with_arg(prefix: $prefix) + function TestComponent() { + const data = useClientQuery( + graphql` + query LiveResolversTest15Query { + live_user_resolver_always_suspend @waterfall { + name } - `, - {prefix}, - ); - return data.counter_no_fragment_with_arg; - } + } + `, + {}, + ); + return data.live_user_resolver_always_suspend?.name; + } - function TestComponent({ - environment, - ...rest - }: { - environment: IEnvironment, - prefix: string, - }) { - return ( - - - - - - ); - } + const renderer = TestRenderer.create( + + + , + ); + + expect(renderer.toJSON()).toBe('Loading...'); +}); - function createEnvironment(source: MutableRecordSource) { - return new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store: new LiveResolverStore(source), - }); +describe('client-only fragments', () => { + const LiveResolversTestCounterUserFragment = graphql` + fragment LiveResolversTestCounterUserFragment on User { + counter_suspends_when_odd } + `; - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - }, - }); - const environment = createEnvironment(source); + const LiveResolversTestLiveResolverSuspenseQuery = graphql` + query LiveResolversTestLiveResolverSuspenseQuery($id: ID!) { + node(id: $id) { + ...LiveResolversTestCounterUserFragment + } + } + `; - TestRenderer.act(() => { - renderer = TestRenderer.create( - , - ); - }); + function Environment({ + children, + environment, + }: { + children: React.Node, + environment: RelayModernEnvironment, + }) { + return ( + + {children} + + ); + } - if (renderer == null) { - throw new Error('Renderer is expected to be defined.'); - } + function TestComponent(props: {id: string}) { + const queryData = useLazyLoadQuery( + LiveResolversTestLiveResolverSuspenseQuery, + {id: props.id}, + ); + const fragmentData = useFragment( + LiveResolversTestCounterUserFragment, + queryData.node, + ); + return fragmentData?.counter_suspends_when_odd; + } - expect(renderer.toJSON()).toEqual('Counter is 0'); + test('correctly suspend on fragments with client-only data', () => { + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store: new LiveResolverStore(RelayRecordSource.create()), + }); + environment.commitPayload( + createOperationDescriptor(LiveResolversTestLiveResolverSuspenseQuery, { + id: '1', + }), + { + node: {id: '1', __typename: 'User'}, + }, + ); + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('0'); TestRenderer.act(() => { GLOBAL_STORE.dispatch({type: 'INCREMENT'}); }); - expect(renderer.toJSON()).toEqual('Counter is 1'); + TestRenderer.act(() => jest.runAllImmediates()); + expect(renderer.toJSON()).toEqual('Loading...'); TestRenderer.act(() => { GLOBAL_STORE.dispatch({type: 'INCREMENT'}); }); - expect(renderer.toJSON()).toEqual('Counter is 2'); + expect(renderer.toJSON()).toEqual('2'); }); - test('Can read a live client edge without a fragment', () => { - const source = RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - }, - '1338': { - __id: '1338', - id: '1338', - __typename: 'User', - name: 'Elizabeth', - }, - }); - - const FooQuery = graphql` - query LiveResolversTest13Query { - live_constant_client_edge @waterfall { - name - } - } - `; - - const operation = createOperationDescriptor(FooQuery, {}); - const store = new LiveResolverStore(source, { - gcReleaseBufferSize: 0, - }); - + test('invariant for invalid liveState value in the Relay store.', () => { const environment = new RelayModernEnvironment({ network: RelayNetwork.create(jest.fn()), - store, + store: new LiveResolverStore(RelayRecordSource.create()), }); - - const data = environment.lookup(operation.fragment).data; - expect(data).toEqual({ - live_constant_client_edge: { - name: 'Elizabeth', + environment.commitPayload( + createOperationDescriptor(LiveResolversTestLiveResolverSuspenseQuery, { + id: '1', + }), + { + node: {id: '1', __typename: 'User'}, }, + ); + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('0'); + TestRenderer.act(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); }); - }); - - test('live resolver with the edge that always suspend', () => { - const environment = new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store: new LiveResolverStore( - RelayRecordSource.create({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - }, - }), - ), + TestRenderer.act(() => jest.runAllImmediates()); + expect(renderer.toJSON()).toEqual('Loading...'); + environment.applyUpdate({ + storeUpdater: store => { + const record = store.get('client:1:counter_suspends_when_odd'); + // this will force the invalid `liveState` value` in the resolver record + record?.setValue(undefined, '__resolverLiveStateValue'); + }, }); + expect(() => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + }).toThrowError( + 'Unexpected LiveState value returned from Relay Resolver internal field `RELAY_RESOLVER_LIVE_STATE_VALUE`. It is likely a bug in Relay, or a corrupt state of the relay store state Field Path `counter_suspends_when_odd`. Record `{"__id":"client:1:counter_suspends_when_odd","__typename":"__RELAY_RESOLVER__","__resolverError":null,"__resolverValue":{"__LIVE_RESOLVER_SUSPENSE_SENTINEL":true},"__resolverLiveStateDirty":true}`.', + ); + expect(renderer.toJSON()).toEqual('Loading...'); + }); +}); - function Environment({children}: {children: React.Node}) { - return ( - - {children} - - ); - } +test('Subscriptions cleaned up correctly after GC', () => { + const store = new LiveResolverStore(RelayRecordSource.create(), { + gcReleaseBufferSize: 0, + }); + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); - function TestComponent() { - const data = useClientQuery( + // We're adding some data for `me { id } ` query so the initial read for + // `live_counter_with_possible_missing_fragment_data` won't have any missing data + // and we will be able to create a valid live resolver record for it. + function publishMeData() { + environment.commitPayload( + createOperationDescriptor( graphql` - query LiveResolversTest15Query { - live_user_resolver_always_suspend @waterfall { - name + query LiveResolversTestWithGCUserQuery { + me { + id } } `, {}, - ); - return data.live_user_resolver_always_suspend?.name; - } - - const renderer = TestRenderer.create( - - - , + ), + { + me: { + id: '1', + }, + }, ); + } + publishMeData(); - expect(renderer.toJSON()).toBe('Loading...'); - }); - - describe('client-only fragments', () => { - const LiveResolversTestCounterUserFragment = graphql` - fragment LiveResolversTestCounterUserFragment on User { - counter_suspends_when_odd - } - `; - - const LiveResolversTestLiveResolverSuspenseQuery = graphql` - query LiveResolversTestLiveResolverSuspenseQuery($id: ID!) { - node(id: $id) { - ...LiveResolversTestCounterUserFragment - } + const operation = createOperationDescriptor( + graphql` + query LiveResolversTestWithGCQuery { + live_counter_with_possible_missing_fragment_data } - `; - - function Environment({ - children, - environment, - }: { - children: React.Node, - environment: RelayModernEnvironment, - }) { - return ( - - {children} - - ); - } - - function TestComponent(props: {id: string}) { - const queryData = useLazyLoadQuery( - LiveResolversTestLiveResolverSuspenseQuery, - {id: props.id}, - ); - const fragmentData = useFragment( - LiveResolversTestCounterUserFragment, - queryData.node, - ); - return fragmentData?.counter_suspends_when_odd; - } - - test('correctly suspend on fragments with client-only data', () => { - const environment = new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store: new LiveResolverStore(RelayRecordSource.create()), - }); - environment.commitPayload( - createOperationDescriptor(LiveResolversTestLiveResolverSuspenseQuery, { - id: '1', - }), - { - node: {id: '1', __typename: 'User'}, - }, - ); - const renderer = TestRenderer.create( - - - , - ); - expect(renderer.toJSON()).toEqual('0'); - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }); - TestRenderer.act(() => jest.runAllImmediates()); - expect(renderer.toJSON()).toEqual('Loading...'); - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }); - expect(renderer.toJSON()).toEqual('2'); - }); + `, + {}, + ); - test('invariant for invalid liveState value in the Relay store.', () => { - const environment = new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store: new LiveResolverStore(RelayRecordSource.create()), - }); - environment.commitPayload( - createOperationDescriptor(LiveResolversTestLiveResolverSuspenseQuery, { - id: '1', - }), - { - node: {id: '1', __typename: 'User'}, - }, - ); - const renderer = TestRenderer.create( - - - , - ); - expect(renderer.toJSON()).toEqual('0'); - TestRenderer.act(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }); - TestRenderer.act(() => jest.runAllImmediates()); - expect(renderer.toJSON()).toEqual('Loading...'); - environment.applyUpdate({ - storeUpdater: store => { - const record = store.get('client:1:counter_suspends_when_odd'); - // this will force the invalid `liveState` value` in the resolver record - record?.setValue(undefined, '__resolverLiveStateValue'); - }, - }); - expect(() => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }).toThrowError( - 'Unexpected LiveState value returned from Relay Resolver internal field `RELAY_RESOLVER_LIVE_STATE_VALUE`. It is likely a bug in Relay, or a corrupt state of the relay store state Field Path `counter_suspends_when_odd`. Record `{"__id":"client:1:counter_suspends_when_odd","__typename":"__RELAY_RESOLVER__","__resolverValue":{"__LIVE_RESOLVER_SUSPENSE_SENTINEL":true},"__resolverLiveStateDirty":true,"__resolverError":null}`', - ); - expect(renderer.toJSON()).toEqual('Loading...'); - }); + // The first time we read `live_counter_with_possible_missing_fragment_data` we will + // create live resolver record and subscribe to the external store for updates + let snapshot = environment.lookup(operation.fragment); + expect(snapshot.data).toEqual({ + live_counter_with_possible_missing_fragment_data: 0, }); - - test('Subscriptions cleaned up correctly after GC', () => { - const store = new LiveResolverStore(RelayRecordSource.create(), { - gcReleaseBufferSize: 0, - }); - const environment = new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store, - }); - - // We're adding some data for `me { id } ` query so the initial read for - // `live_counter_with_possible_missing_fragment_data` won't have any missing data - // and we will be able to create a valid live resolver record for it. - function publishMeData() { - environment.commitPayload( - createOperationDescriptor( - graphql` - query LiveResolversTestWithGCUserQuery { - me { - id - } - } - `, - {}, - ), - { - me: { - id: '1', - }, - }, - ); - } - publishMeData(); - - const operation = createOperationDescriptor( + expect(snapshot.isMissingData).toBe(false); + + // Note: this is another issue with GC here. + // Our GC will remove **all** records from the store(including __ROOT__) if they are not retained. + // + // So in this test we need to retain some unrelevant records in the store to keep the __ROOT__ + // record arount after GC run. + environment.retain( + createOperationDescriptor( graphql` - query LiveResolversTestWithGCQuery { - live_counter_with_possible_missing_fragment_data + query LiveResolversTestWithGCCounterQuery { + counter_no_fragment } `, {}, - ); + ), + ); - // The first time we read `live_counter_with_possible_missing_fragment_data` we will - // create live resolver record and subscribe to the external store for updates - let snapshot = environment.lookup(operation.fragment); - expect(snapshot.data).toEqual({ - live_counter_with_possible_missing_fragment_data: 0, - }); - expect(snapshot.isMissingData).toBe(false); - - // Note: this is another issue with GC here. - // Our GC will remove **all** records from the store(including __ROOT__) if they are not retained. - // - // So in this test we need to retain some unrelevant records in the store to keep the __ROOT__ - // record arount after GC run. - environment.retain( - createOperationDescriptor( - graphql` - query LiveResolversTestWithGCCounterQuery { - counter_no_fragment - } - `, - {}, - ), - ); + const subscriptionsCountBeforeGCRun = GLOBAL_STORE.getSubscriptionsCount(); - // Go-go-go! Clean the store! - store.scheduleGC(); - jest.runAllImmediates(); - // This will clean the store, but won't unsubscribe from the external states + // Go-go-go! Clean the store! + store.scheduleGC(); + jest.runAllImmediates(); + // This will clean the store, and unsubscribe from the external states - // Re-reading resolvers will create new records for them (but) the - // `live_counter_with_possible_missing_fragment_data` will have missing required data at this - // point so we won't be able to create a fully-valid live-resolver record for it (and subscribe/read) - // from the external state. - environment.lookup(operation.fragment); + const subscriptionsCountAfterGCRun = GLOBAL_STORE.getSubscriptionsCount(); - // this will dispatch an action from the extenrnal store and the callback that was created before GC - expectToWarn( - 'Unexpected callback for a incomplete live resolver record', - () => { - GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - }, - ); + // this will verify that we unsubscribed from the external store + expect(subscriptionsCountAfterGCRun).toEqual( + subscriptionsCountBeforeGCRun - 1, + ); - // The data for the live resolver is missing (it has missing dependecies) - snapshot = environment.lookup(operation.fragment); - expect(snapshot.data).toEqual({ - live_counter_with_possible_missing_fragment_data: undefined, - }); - expect(snapshot.isMissingData).toBe(true); + // Re-reading resolvers will create new records for them (but) the + // `live_counter_with_possible_missing_fragment_data` will have missing required data at this + // point so we won't be able to create a fully-valid live-resolver record for it (and subscribe/read) + // from the external state. + environment.lookup(operation.fragment); - // We should be able to re-read the data once the missing data in avaialbe again - publishMeData(); + // this will dispatch an action from the external store and the callback that was created before GC + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); - snapshot = environment.lookup(operation.fragment); - expect(snapshot.data).toEqual({ - live_counter_with_possible_missing_fragment_data: 1, - }); - expect(snapshot.isMissingData).toBe(false); + // The data for the live resolver is missing (it has missing dependecies) + snapshot = environment.lookup(operation.fragment); + expect(snapshot.data).toEqual({ + live_counter_with_possible_missing_fragment_data: undefined, + }); + expect(snapshot.isMissingData).toBe(true); + + // We should be able to re-read the data once the missing data in avaialbe again + publishMeData(); + + snapshot = environment.lookup(operation.fragment); + expect(snapshot.data).toEqual({ + live_counter_with_possible_missing_fragment_data: 1, }); + expect(snapshot.isMissingData).toBe(false); }); test('Errors when reading a @live resolver that does not return a LiveState object', () => { diff --git a/packages/react-relay/__tests__/QueryResource-ClientEdges-test.js b/packages/react-relay/__tests__/QueryResource-ClientEdges-test.js index 31e0fcdd81ef4..9af0c089fbede 100644 --- a/packages/react-relay/__tests__/QueryResource-ClientEdges-test.js +++ b/packages/react-relay/__tests__/QueryResource-ClientEdges-test.js @@ -12,7 +12,7 @@ 'use strict'; const { getFragmentResourceForEnvironment, -} = require('react-relay/relay-hooks/FragmentResource'); +} = require('react-relay/relay-hooks/legacy/FragmentResource'); const { getQueryResourceForEnvironment, } = require('react-relay/relay-hooks/QueryResource'); @@ -35,12 +35,10 @@ disallowConsoleErrors(); beforeEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = true; }); afterEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = false; }); const FRAGMENT_1 = graphql` @@ -140,7 +138,7 @@ describe('QueryResource Client Edges behavior', () => { 'componentDisplayName', ); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (result.data: any); + const {me}: any = result.data; const clientEdgeFragmentRef = me.client_edge; expect(environment.mock.getAllOperations().length).toBe(0); diff --git a/packages/react-relay/__tests__/ReactRelayFragmentContainer-WithFragmentOwnership-test.js b/packages/react-relay/__tests__/ReactRelayFragmentContainer-WithFragmentOwnership-test.js index 04bd4ecb22050..be122588dafaa 100644 --- a/packages/react-relay/__tests__/ReactRelayFragmentContainer-WithFragmentOwnership-test.js +++ b/packages/react-relay/__tests__/ReactRelayFragmentContainer-WithFragmentOwnership-test.js @@ -117,7 +117,10 @@ describe('ReactRelayFragmentContainer with fragment ownership', () => { user: UserFragment, }; variables = {rootVariable: 'root'}; - TestComponent = render; + TestComponent = ({ref, ...props}) => { + // Omit `ref` for forward-compatibility with `enableRefAsProp`. + return render(props); + }; TestComponent.displayName = 'TestComponent'; TestContainer = ReactRelayFragmentContainer.createContainer( TestComponent, @@ -180,7 +183,6 @@ describe('ReactRelayFragmentContainer with fragment ownership', () => { {}, }, __fragmentOwner: ownerUser1.request, - __isWithinUnmatchedTypeRefinement: false, }, }); // Subscribes for updates @@ -195,9 +197,9 @@ describe('ReactRelayFragmentContainer with fragment ownership', () => { {}, }, __fragmentOwner: ownerUser1.request, - __isWithinUnmatchedTypeRefinement: false, }, missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -240,7 +242,6 @@ describe('ReactRelayFragmentContainer with fragment ownership', () => { {}, }, __fragmentOwner: ownerUser1.request, - __isWithinUnmatchedTypeRefinement: false, }, relayResolverErrors: [], seenRecords: {}, @@ -265,7 +266,6 @@ describe('ReactRelayFragmentContainer with fragment ownership', () => { {}, }, __fragmentOwner: ownerUser1.request, - __isWithinUnmatchedTypeRefinement: false, }, }); }); @@ -302,7 +302,6 @@ describe('ReactRelayFragmentContainer with fragment ownership', () => { {}, }, __fragmentOwner: ownerUser2.request, - __isWithinUnmatchedTypeRefinement: false, }, }); @@ -318,9 +317,9 @@ describe('ReactRelayFragmentContainer with fragment ownership', () => { {}, }, __fragmentOwner: ownerUser2.request, - __isWithinUnmatchedTypeRefinement: false, }, missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -370,7 +369,6 @@ describe('ReactRelayFragmentContainer with fragment ownership', () => { {}, }, __fragmentOwner: ownerUser1WithCondVar.request, - __isWithinUnmatchedTypeRefinement: false, }, }); // Container subscribes for updates on new props @@ -385,9 +383,9 @@ describe('ReactRelayFragmentContainer with fragment ownership', () => { {}, }, __fragmentOwner: ownerUser1WithCondVar.request, - __isWithinUnmatchedTypeRefinement: false, }, missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, diff --git a/packages/react-relay/__tests__/ReactRelayFragmentContainer-test.js b/packages/react-relay/__tests__/ReactRelayFragmentContainer-test.js index c5b0847ab32c6..d0b659f37c1d8 100644 --- a/packages/react-relay/__tests__/ReactRelayFragmentContainer-test.js +++ b/packages/react-relay/__tests__/ReactRelayFragmentContainer-test.js @@ -82,9 +82,7 @@ describe('ReactRelayFragmentContainer', () => { } beforeEach(() => { - jest.mock('scheduler', () => { - return jest.requireActual('scheduler/unstable_mock'); - }); + jest.mock('scheduler', () => require('./mockScheduler')); environment = createMockEnvironment(); UserQuery = graphql` query ReactRelayFragmentContainerTestUserQuery($id: ID!) { @@ -119,7 +117,10 @@ describe('ReactRelayFragmentContainer', () => { user: UserFragment, }; - TestComponent = render; + TestComponent = ({ref, ...props}) => { + // Omit `ref` for forward-compatibility with `enableRefAsProp`. + return render(props); + }; TestComponent.displayName = 'TestComponent'; TestContainer = ReactRelayFragmentContainer.createContainer( TestComponent, @@ -264,6 +265,7 @@ describe('ReactRelayFragmentContainer', () => { name: 'Zuck', }, missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -357,6 +359,7 @@ describe('ReactRelayFragmentContainer', () => { }, isMissingData: false, missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -410,6 +413,7 @@ describe('ReactRelayFragmentContainer', () => { }, isMissingData: false, missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -640,7 +644,7 @@ describe('ReactRelayFragmentContainer', () => { describe('concurrent mode', () => { function assertYieldsWereCleared(_scheduler) { - const actualYields = _scheduler.unstable_clearYields(); + const actualYields = _scheduler.unstable_clearLog(); if (actualYields.length !== 0) { throw new Error( 'Log of yielded values is not empty. ' + @@ -653,7 +657,7 @@ describe('ReactRelayFragmentContainer', () => { const Scheduler = require('scheduler'); assertYieldsWereCleared(Scheduler); Scheduler.unstable_flushAllWithoutAsserting(); - const actualYields = Scheduler.unstable_clearYields(); + const actualYields = Scheduler.unstable_clearLog(); expect(actualYields).toEqual(expectedYields); } @@ -661,14 +665,24 @@ describe('ReactRelayFragmentContainer', () => { const Scheduler = require('scheduler'); assertYieldsWereCleared(Scheduler); Scheduler.unstable_flushNumberOfYields(expectedYields.length); - const actualYields = Scheduler.unstable_clearYields(); + const actualYields = Scheduler.unstable_clearLog(); expect(actualYields).toEqual(expectedYields); } it('upon commit, it should pick up changes in data that happened before comitting', () => { + // Requires the `allowConcurrentByDefault` feature flag. Only run if + // we detect support for `unstable_concurrentUpdatesByDefault`. + if ( + !ReactTestRenderer.create + .toString() + .includes('unstable_concurrentUpdatesByDefault') + ) { + return; + } + const Scheduler = require('scheduler'); const YieldChild = props => { - Scheduler.unstable_yieldValue(props.children); + Scheduler.log(props.children); return props.children; }; const YieldyUserComponent = ({user}) => { diff --git a/packages/react-relay/__tests__/ReactRelayPaginationContainer-WithFragmentOwnership-test.js b/packages/react-relay/__tests__/ReactRelayPaginationContainer-WithFragmentOwnership-test.js index 4b04d44aed5c0..2df42a1f817ec 100644 --- a/packages/react-relay/__tests__/ReactRelayPaginationContainer-WithFragmentOwnership-test.js +++ b/packages/react-relay/__tests__/ReactRelayPaginationContainer-WithFragmentOwnership-test.js @@ -183,7 +183,10 @@ describe('ReactRelayPaginationContainer with fragment ownership', () => { count, }; }); - TestComponent = render; + TestComponent = ({ref, ...props}) => { + // Omit `ref` for forward-compatibility with `enableRefAsProp`. + return render(props); + }; TestComponent.displayName = 'TestComponent'; TestContainer = ReactRelayPaginationContainer.createContainer( TestComponent, @@ -262,7 +265,6 @@ describe('ReactRelayPaginationContainer with fragment ownership', () => { }, }, __fragmentOwner: ownerUser1.request, - __isWithinUnmatchedTypeRefinement: false, }, }, ], @@ -317,7 +319,6 @@ describe('ReactRelayPaginationContainer with fragment ownership', () => { }, }, __fragmentOwner: ownerUser1.request, - __isWithinUnmatchedTypeRefinement: false, }, }, ], @@ -376,7 +377,7 @@ describe('ReactRelayPaginationContainer with fragment ownership', () => { loadMore(1, jest.fn()); expect(render.mock.calls.length).toBe(1); - TestComponent.mockClear(); + render.mockClear(); TestChildComponent.mockClear(); ReactTestRenderer.act(() => { environment.mock.resolve(UserQuery, { @@ -432,7 +433,6 @@ describe('ReactRelayPaginationContainer with fragment ownership', () => { }, }, __fragmentOwner: expectedOwner.request, - __isWithinUnmatchedTypeRefinement: false, }, }, { @@ -448,7 +448,6 @@ describe('ReactRelayPaginationContainer with fragment ownership', () => { }, }, __fragmentOwner: expectedOwner.request, - __isWithinUnmatchedTypeRefinement: false, }, }, ]); @@ -511,7 +510,6 @@ describe('ReactRelayPaginationContainer with fragment ownership', () => { }, }, __fragmentOwner: ownerUser1.request, - __isWithinUnmatchedTypeRefinement: false, }, }, ], @@ -635,7 +633,6 @@ describe('ReactRelayPaginationContainer with fragment ownership', () => { }, }, __fragmentOwner: expectedOwner.request, - __isWithinUnmatchedTypeRefinement: false, }, }, ], @@ -810,7 +807,6 @@ describe('ReactRelayPaginationContainer with fragment ownership', () => { }, }, __fragmentOwner: expectedFragmentOwner.request, - __isWithinUnmatchedTypeRefinement: false, }, }, ], @@ -893,7 +889,6 @@ describe('ReactRelayPaginationContainer with fragment ownership', () => { }, }, __fragmentOwner: expectedFragmentOwner.request, - __isWithinUnmatchedTypeRefinement: false, }, }, ], @@ -923,7 +918,7 @@ describe('ReactRelayPaginationContainer with fragment ownership', () => { }; expect(environment.mock.isLoading(UserQuery, variables)).toBe(true); - TestComponent.mockClear(); + render.mockClear(); TestChildComponent.mockClear(); environment.mock.resolve(UserQuery, { data: { @@ -983,7 +978,6 @@ describe('ReactRelayPaginationContainer with fragment ownership', () => { }, }, __fragmentOwner: expectedFragmentOwner.request, - __isWithinUnmatchedTypeRefinement: false, }, }, { @@ -999,7 +993,6 @@ describe('ReactRelayPaginationContainer with fragment ownership', () => { }, }, __fragmentOwner: expectedFragmentOwner.request, - __isWithinUnmatchedTypeRefinement: false, }, }, ], diff --git a/packages/react-relay/__tests__/ReactRelayPaginationContainer-test.js b/packages/react-relay/__tests__/ReactRelayPaginationContainer-test.js index dc8bbdb017de6..3f3233a6f047e 100644 --- a/packages/react-relay/__tests__/ReactRelayPaginationContainer-test.js +++ b/packages/react-relay/__tests__/ReactRelayPaginationContainer-test.js @@ -148,7 +148,10 @@ describe('ReactRelayPaginationContainer', () => { count, }; }); - TestComponent = render; + TestComponent = ({ref, ...props}) => { + // Omit `ref` for forward-compatibility with `enableRefAsProp`. + return render(props); + }; TestComponent.displayName = 'TestComponent'; TestContainer = ReactRelayPaginationContainer.createContainer( TestComponent, @@ -333,6 +336,7 @@ describe('ReactRelayPaginationContainer', () => { data: expect.any(Object), isMissingData: false, missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -441,6 +445,7 @@ describe('ReactRelayPaginationContainer', () => { data: expect.any(Object), isMissingData: false, missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -508,6 +513,7 @@ describe('ReactRelayPaginationContainer', () => { data: expect.any(Object), isMissingData: false, missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -606,6 +612,7 @@ describe('ReactRelayPaginationContainer', () => { data: expect.any(Object), isMissingData: false, missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, diff --git a/packages/react-relay/__tests__/ReactRelayQueryRenderer-test.js b/packages/react-relay/__tests__/ReactRelayQueryRenderer-test.js index 21fff48e813d5..17a80ca9ebb25 100644 --- a/packages/react-relay/__tests__/ReactRelayQueryRenderer-test.js +++ b/packages/react-relay/__tests__/ReactRelayQueryRenderer-test.js @@ -10,7 +10,7 @@ 'use strict'; -jest.mock('scheduler', () => require('scheduler/unstable_mock')); +jest.mock('scheduler', () => require('./mockScheduler')); const ReactRelayContext = require('../ReactRelayContext'); const ReactRelayQueryRenderer = require('../ReactRelayQueryRenderer'); @@ -140,11 +140,21 @@ describe('ReactRelayQueryRenderer', () => { describe('when constructor fires multiple times', () => { describe('when store does not have snapshot and fetch does not return snapshot', () => { it('fetches the query only once, renders loading state', () => { + // Requires the `allowConcurrentByDefault` feature flag. Only run if + // we detect support for `unstable_concurrentUpdatesByDefault`. + if ( + !ReactTestRenderer.create + .toString() + .includes('unstable_concurrentUpdatesByDefault') + ) { + return; + } + environment.mockClear(); function Child(props) { - // NOTE the unstable_yield method will move to the static renderer. + // NOTE the log method will move to the static renderer. // When React sync runs we need to update this. - Scheduler.unstable_yieldValue(props.children); + Scheduler.log(props.children); return props.children; } @@ -173,7 +183,7 @@ describe('ReactRelayQueryRenderer', () => { // Flush some of the changes, but don't commit Scheduler.unstable_flushNumberOfYields(2); - expect(Scheduler.unstable_clearYields()).toEqual(['A', 'B']); + expect(Scheduler.unstable_clearLog()).toEqual(['A', 'B']); expect(renderer.toJSON()).toEqual(null); expect({ error: null, @@ -197,6 +207,16 @@ describe('ReactRelayQueryRenderer', () => { }); describe('when store has a snapshot', () => { it('fetches the query only once, renders snapshot from store', () => { + // Requires the `allowConcurrentByDefault` feature flag. Only run if + // we detect support for `unstable_concurrentUpdatesByDefault`. + if ( + !ReactTestRenderer.create + .toString() + .includes('unstable_concurrentUpdatesByDefault') + ) { + return; + } + environment.mockClear(); environment.applyUpdate({ storeUpdater: _store => { @@ -214,7 +234,7 @@ describe('ReactRelayQueryRenderer', () => { function Child(props) { // NOTE the unstable_yield method will move to the static renderer. // When React sync runs we need to update this. - Scheduler.unstable_yieldValue(props.children); + Scheduler.log(props.children); return props.children; } @@ -244,7 +264,7 @@ describe('ReactRelayQueryRenderer', () => { // Flush some of the changes, but don't commit Scheduler.unstable_flushNumberOfYields(2); - expect(Scheduler.unstable_clearYields()).toEqual(['A', 'B']); + expect(Scheduler.unstable_clearLog()).toEqual(['A', 'B']); expect(renderer.toJSON()).toEqual(null); expect({ error: null, @@ -257,7 +277,6 @@ describe('ReactRelayQueryRenderer', () => { }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __id: '4', }, }, @@ -282,7 +301,6 @@ describe('ReactRelayQueryRenderer', () => { }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __id: '4', }, }, @@ -292,6 +310,16 @@ describe('ReactRelayQueryRenderer', () => { }); describe('when fetch returns a response synchronously first time', () => { it('fetches the query once, always renders snapshot returned by fetch', () => { + // Requires the `allowConcurrentByDefault` feature flag. Only run if + // we detect support for `unstable_concurrentUpdatesByDefault`. + if ( + !ReactTestRenderer.create + .toString() + .includes('unstable_concurrentUpdatesByDefault') + ) { + return; + } + const fetch = jest.fn().mockReturnValueOnce(response); store = new Store(new RecordSource()); environment = new Environment({ @@ -300,9 +328,9 @@ describe('ReactRelayQueryRenderer', () => { }); function Child(props) { - // NOTE the unstable_yieldValue method will move to the static renderer. + // NOTE the log method will move to the static renderer. // When React sync runs we need to update this. - Scheduler.unstable_yieldValue(props.children); + Scheduler.log(props.children); return props.children; } @@ -332,7 +360,7 @@ describe('ReactRelayQueryRenderer', () => { // Flush some of the changes, but don't commit Scheduler.unstable_flushNumberOfYields(2); - expect(Scheduler.unstable_clearYields()).toEqual(['A', 'B']); + expect(Scheduler.unstable_clearLog()).toEqual(['A', 'B']); expect(renderer.toJSON()).toEqual(null); expect({ error: null, @@ -345,7 +373,6 @@ describe('ReactRelayQueryRenderer', () => { }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __id: '4', }, }, @@ -370,7 +397,6 @@ describe('ReactRelayQueryRenderer', () => { }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __id: '4', }, }, @@ -424,7 +450,6 @@ describe('ReactRelayQueryRenderer', () => { }, __fragmentOwner: firstOwner.request, - __isWithinUnmatchedTypeRefinement: false, __id: '4', }, }, @@ -484,7 +509,6 @@ describe('ReactRelayQueryRenderer', () => { }, __fragmentOwner: thirdOwner.request, - __isWithinUnmatchedTypeRefinement: false, __id: '6', }, }, @@ -561,7 +585,6 @@ describe('ReactRelayQueryRenderer', () => { }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __id: '4', }, }, @@ -597,7 +620,6 @@ describe('ReactRelayQueryRenderer', () => { }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __id: '4', }, }, @@ -774,7 +796,6 @@ describe('ReactRelayQueryRenderer', () => { ReactRelayQueryRendererTestFragment: {}, }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __id: '', }, }, @@ -1130,7 +1151,6 @@ describe('ReactRelayQueryRenderer', () => { }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __id: '4', }, }, @@ -1189,7 +1209,6 @@ describe('ReactRelayQueryRenderer', () => { __fragmentOwner: owner.request, __id: '4', - __isWithinUnmatchedTypeRefinement: false, }, }, retry: expect.any(Function), @@ -1209,7 +1228,6 @@ describe('ReactRelayQueryRenderer', () => { }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __id: '4', }, }, @@ -1264,7 +1282,6 @@ describe('ReactRelayQueryRenderer', () => { }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __id: '4', }, }, diff --git a/packages/react-relay/__tests__/ReactRelayRefetchContainer-WithFragmentOwnership-test.js b/packages/react-relay/__tests__/ReactRelayRefetchContainer-WithFragmentOwnership-test.js index f5d3f8ced2331..84fdad81eb571 100644 --- a/packages/react-relay/__tests__/ReactRelayRefetchContainer-WithFragmentOwnership-test.js +++ b/packages/react-relay/__tests__/ReactRelayRefetchContainer-WithFragmentOwnership-test.js @@ -225,7 +225,6 @@ describe('ReactRelayRefetchContainer with fragment ownership', () => { }, }, __fragmentOwner: ownerUser1.request, - __isWithinUnmatchedTypeRefinement: false, }); expect(TestChildComponent.mock.calls.length).toBe(1); expect(TestChildComponent.mock.calls[0][0].user).toEqual({ @@ -278,7 +277,6 @@ describe('ReactRelayRefetchContainer with fragment ownership', () => { }, }, __fragmentOwner: expectedOwner.request, - __isWithinUnmatchedTypeRefinement: false, }); expect(render.mock.calls[0][0].user.name).toBe(undefined); @@ -305,7 +303,6 @@ describe('ReactRelayRefetchContainer with fragment ownership', () => { }, }, __fragmentOwner: ownerUser1.request, - __isWithinUnmatchedTypeRefinement: false, }); expect(TestChildComponent.mock.calls.length).toBe(1); expect(TestChildComponent.mock.calls[0][0].user).toEqual({ @@ -364,7 +361,6 @@ describe('ReactRelayRefetchContainer with fragment ownership', () => { }, }, __fragmentOwner: expectedOwner.request, - __isWithinUnmatchedTypeRefinement: false, }); expect(render.mock.calls[0][0].user.name).toBe(undefined); diff --git a/packages/react-relay/__tests__/ReactRelayRefetchContainer-test.js b/packages/react-relay/__tests__/ReactRelayRefetchContainer-test.js index cafd5be640fcd..30c0a0b7a8f30 100644 --- a/packages/react-relay/__tests__/ReactRelayRefetchContainer-test.js +++ b/packages/react-relay/__tests__/ReactRelayRefetchContainer-test.js @@ -88,9 +88,7 @@ describe('ReactRelayRefetchContainer', () => { } } beforeEach(() => { - jest.mock('scheduler', () => { - return jest.requireActual('scheduler/unstable_mock'); - }); + jest.mock('scheduler', () => require('./mockScheduler')); environment = createMockEnvironment(); UserFragment = graphql` fragment ReactRelayRefetchContainerTestUserFragment on User @@ -128,7 +126,10 @@ describe('ReactRelayRefetchContainer', () => { return ; }); variables = {}; - TestComponent = render; + TestComponent = ({ref, ...props}) => { + // Omit `ref` for forward-compatibility with `enableRefAsProp`. + return render(props); + }; TestComponent.displayName = 'TestComponent'; TestContainer = ReactRelayRefetchContainer.createContainer( TestComponent, @@ -272,6 +273,7 @@ describe('ReactRelayRefetchContainer', () => { }, isMissingData: false, missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -366,6 +368,7 @@ describe('ReactRelayRefetchContainer', () => { }, isMissingData: false, missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -420,6 +423,7 @@ describe('ReactRelayRefetchContainer', () => { }, isMissingData: false, missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -498,6 +502,7 @@ describe('ReactRelayRefetchContainer', () => { // Name is excluded since value of cond is now false }, missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -1087,7 +1092,7 @@ describe('ReactRelayRefetchContainer', () => { describe('concurrent mode', () => { function assertYieldsWereCleared(_scheduler) { - const actualYields = _scheduler.unstable_clearYields(); + const actualYields = _scheduler.unstable_clearLog(); if (actualYields.length !== 0) { throw new Error( 'Log of yielded values is not empty. ' + @@ -1100,7 +1105,7 @@ describe('ReactRelayRefetchContainer', () => { const Scheduler = require('scheduler'); assertYieldsWereCleared(Scheduler); Scheduler.unstable_flushAllWithoutAsserting(); - const actualYields = Scheduler.unstable_clearYields(); + const actualYields = Scheduler.unstable_clearLog(); expect(actualYields).toEqual(expectedYields); } @@ -1108,14 +1113,24 @@ describe('ReactRelayRefetchContainer', () => { const Scheduler = require('scheduler'); assertYieldsWereCleared(Scheduler); Scheduler.unstable_flushNumberOfYields(expectedYields.length); - const actualYields = Scheduler.unstable_clearYields(); + const actualYields = Scheduler.unstable_clearLog(); expect(actualYields).toEqual(expectedYields); } it('upon commit, it should pick up changes in data that happened before comitting', () => { + // Requires the `allowConcurrentByDefault` feature flag. Only run if + // we detect support for `unstable_concurrentUpdatesByDefault`. + if ( + !ReactTestRenderer.create + .toString() + .includes('unstable_concurrentUpdatesByDefault') + ) { + return; + } + const Scheduler = require('scheduler'); const YieldChild = props => { - Scheduler.unstable_yieldValue(props.children); + Scheduler.log(props.children); return props.children; }; const YieldyUserComponent = ({user, relay}) => { diff --git a/packages/react-relay/__tests__/RelayResolverInterface-test.js b/packages/react-relay/__tests__/RelayResolverInterface-test.js new file mode 100644 index 0000000000000..5abeb2aab4fe7 --- /dev/null +++ b/packages/react-relay/__tests__/RelayResolverInterface-test.js @@ -0,0 +1,446 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; + +import type {RelayResolverInterfaceTestAnimalLegsFragment$key} from './__generated__/RelayResolverInterfaceTestAnimalLegsFragment.graphql'; +import type {RelayResolverInterfaceTestWeakAnimalColorFragment$key} from './__generated__/RelayResolverInterfaceTestWeakAnimalColorFragment.graphql'; + +const React = require('react'); +const {useFragment} = require('react-relay'); +const {RelayEnvironmentProvider, useClientQuery} = require('react-relay'); +const TestRenderer = require('react-test-renderer'); +const {RecordSource} = require('relay-runtime'); +const {RelayFeatureFlags} = require('relay-runtime'); +const RelayNetwork = require('relay-runtime/network/RelayNetwork'); +const {graphql} = require('relay-runtime/query/GraphQLTag'); +const LiveResolverStore = require('relay-runtime/store/experimental-live-resolvers/LiveResolverStore.js'); +const RelayModernEnvironment = require('relay-runtime/store/RelayModernEnvironment'); +const { + disallowConsoleErrors, + disallowWarnings, +} = require('relay-test-utils-internal'); + +disallowWarnings(); +disallowConsoleErrors(); + +beforeEach(() => { + RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; +}); + +afterEach(() => { + RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; +}); + +function EnvironmentWrapper({ + children, + environment, +}: { + children: React.Node, + environment: RelayModernEnvironment, +}) { + return ( + + {children} + + ); +} + +let environment; +let store; +beforeEach(() => { + store = new LiveResolverStore( + new RecordSource({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + chicken: {__ref: 'greeneggsandham'}, + }, + greeneggsandham: { + __id: 'greeneggsandham', + __typename: 'Chicken', + legs: '2', + greeting: 'Hello, greeneggsandham!', + }, + }), + {}, + ); + environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); +}); + +function AnimalLegsComponent(props: { + animal: ?RelayResolverInterfaceTestAnimalLegsFragment$key, +}) { + const animal = useFragment( + graphql` + fragment RelayResolverInterfaceTestAnimalLegsFragment on IAnimal { + legs + } + `, + props.animal, + ); + return animal?.legs; +} + +test('should read the legs of a cat', () => { + function CatLegsRootComponent() { + const data = useClientQuery( + graphql` + query RelayResolverInterfaceTestCatLegsQuery { + cat { + ...RelayResolverInterfaceTestAnimalLegsFragment + } + } + `, + {}, + ); + + return ; + } + + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('4'); +}); + +test('should read the legs of a fish', () => { + function FishLegsRootComponent() { + const data = useClientQuery( + graphql` + query RelayResolverInterfaceTestFishLegsQuery { + fish { + ...RelayResolverInterfaceTestAnimalLegsFragment + } + } + `, + {}, + ); + + return ; + } + + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('0'); +}); + +test('should read the legs of a chicken (client schema extension type)', () => { + function ChickenLegsRootComponent() { + const data = useClientQuery( + graphql` + query RelayResolverInterfaceTestChickenLegsQuery { + chicken { + ...RelayResolverInterfaceTestAnimalLegsFragment + } + } + `, + {}, + ); + + return ; + } + + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('2'); +}); + +function WeakAnimalColorFragmentComponent(props: { + animal: ?RelayResolverInterfaceTestWeakAnimalColorFragment$key, +}) { + const animal = useFragment( + graphql` + fragment RelayResolverInterfaceTestWeakAnimalColorFragment on IWeakAnimal { + color + } + `, + props.animal, + ); + return animal?.color; +} + +test('should read the color of a red octopus (weak model type)', () => { + function RedOctopusColorRootComponent() { + const data = useClientQuery( + graphql` + query RelayResolverInterfaceTestRedOctopusColorQuery { + red_octopus { + ...RelayResolverInterfaceTestWeakAnimalColorFragment + } + } + `, + {}, + ); + + return ; + } + + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('red'); +}); + +function AnimalGreetingQueryComponent(props: { + request: {ofType: string, returnValidID: boolean}, +}) { + const data = useClientQuery( + graphql` + query RelayResolverInterfaceTestAnimalGreetingQuery( + $request: AnimalRequest! + ) { + animal(request: $request) { + greeting + } + } + `, + {request: props.request}, + ); + if (data.animal == null) { + return 'NULL'; + } + return data.animal.greeting; +} + +function WeakAnimalGreetingQueryComponent(props: {request: {ofType: string}}) { + const data = useClientQuery( + graphql` + query RelayResolverInterfaceTestWeakAnimalGreetingQuery( + $request: WeakAnimalRequest! + ) { + weak_animal(request: $request) { + greeting + } + } + `, + {request: props.request}, + ); + if (data.weak_animal == null) { + return 'NULL'; + } + return data.weak_animal.greeting; +} + +describe.each([ + { + inputAnimalType: 'Fish', + id: '12redblue', + }, + { + inputAnimalType: 'Cat', + id: '1234567890', + }, +])( + 'resolvers can read resolver on an interface where all implementors are strong model types: %s', + ({inputAnimalType, id}) => { + test(`should read the greeting of a ${inputAnimalType}`, () => { + const animalRenderer = TestRenderer.create( + + + , + ); + + expect(animalRenderer.toJSON()).toEqual(`Hello, ${id}!`); + }); + + test(`should return null for nonexistent ${inputAnimalType}`, () => { + const nullRenderer = TestRenderer.create( + + + , + ); + expect(nullRenderer.toJSON()).toEqual('NULL'); + }); + }, +); + +describe.each([ + { + inputAnimalType: 'RedOctopus', + name: 'Shiny', + }, + { + inputAnimalType: 'PurpleOctopus', + name: 'Glowing', + }, +])( + 'resolvers can read resolver on an interface where all implementors are weak model types: %s', + ({inputAnimalType, name}) => { + test(`should read the greeting of a ${inputAnimalType}`, () => { + const animalRenderer = TestRenderer.create( + + + , + ); + + expect(animalRenderer.toJSON()).toEqual(`Hello, ${name}!`); + }); + }, +); + +describe.each([ + { + animalType: 'RedOctopus', + color: 'red', + }, + { + animalType: 'PurpleOctopus', + color: 'purple', + }, +])( + 'resolvers can return an interface where all implementors are weak model types: %s', + ({animalType, color}) => { + function WeakAnimalColorQueryComponent(props: {request: {ofType: string}}) { + const data = useClientQuery( + graphql` + query RelayResolverInterfaceTestWeakAnimalColorQuery( + $request: WeakAnimalRequest! + ) { + weak_animal(request: $request) { + ...RelayResolverInterfaceTestWeakAnimalColorFragment + } + } + `, + {request: props.request}, + ); + return ; + } + + test(`should read the color of a ${animalType}`, () => { + const animalRenderer = TestRenderer.create( + + + , + ); + expect(animalRenderer.toJSON()).toEqual(color); + }); + }, +); + +test('resolvers can return a list of interfaces where all implementors are strong model types', () => { + function AnimalsLegsQueryComponent(props: { + requests: Array<{ofType: string, returnValidID: boolean}>, + }) { + const data = useClientQuery( + graphql` + query RelayResolverInterfaceTestAnimalsLegsQuery( + $requests: [AnimalRequest!]! + ) { + animals(requests: $requests) { + id + ...RelayResolverInterfaceTestAnimalLegsFragment + } + } + `, + {requests: props.requests}, + ); + + return data.animals?.map((animal, index) => { + if (animal == null) { + return 'NULL'; + } + return ; + }); + } + + const animalRenderer = TestRenderer.create( + + + , + ); + expect(animalRenderer.toJSON()).toEqual(['0', 'NULL', '4', 'NULL']); +}); + +function AnimalLegsQueryComponent(props: { + request: {ofType: string, returnValidID: boolean}, +}) { + const data = useClientQuery( + graphql` + query RelayResolverInterfaceTestAnimalLegsQuery( + $request: AnimalRequest! + ) { + animal(request: $request) { + ...RelayResolverInterfaceTestAnimalLegsFragment + } + } + `, + {request: props.request}, + ); + if (data.animal == null) { + return 'NULL'; + } + + return ; +} + +describe.each([ + { + inputAnimalType: 'Fish', + expectedLegs: '0', + }, + { + inputAnimalType: 'Cat', + expectedLegs: '4', + }, +])( + 'resolvers can return an interface where all implementors are strong model types: %s', + ({inputAnimalType, expectedLegs}) => { + test(`should read the legs of a ${inputAnimalType}`, () => { + const animalRenderer = TestRenderer.create( + + + , + ); + + expect(animalRenderer.toJSON()).toEqual(expectedLegs); + }); + + test(`should return null for nonexistent ${inputAnimalType}`, () => { + const nullRenderer = TestRenderer.create( + + + , + ); + expect(nullRenderer.toJSON()).toEqual('NULL'); + }); + }, +); diff --git a/packages/react-relay/__tests__/RelayResolverModel-test.js b/packages/react-relay/__tests__/RelayResolverModel-test.js index aad8600fbed9c..b7a4e89dd6ec5 100644 --- a/packages/react-relay/__tests__/RelayResolverModel-test.js +++ b/packages/react-relay/__tests__/RelayResolverModel-test.js @@ -16,24 +16,32 @@ import type {RelayResolverModelTestFragment$key} from './__generated__/RelayReso import type {RelayResolverModelTestInterfaceFragment$key} from './__generated__/RelayResolverModelTestInterfaceFragment.graphql'; import type {RelayResolverModelTestWithPluralFragment$key} from './__generated__/RelayResolverModelTestWithPluralFragment.graphql'; +const invariant = require('invariant'); const React = require('react'); const { RelayEnvironmentProvider, useClientQuery, - useFragment: useFragment_LEGACY, - useLazyLoadQuery: useLazyLoadQuery_LEGACY, + useFragment, } = require('react-relay'); -const useFragment_REACT_CACHE = require('react-relay/relay-hooks/react-cache/useFragment_REACT_CACHE'); -const useLazyLoadQuery_REACT_CACHE = require('react-relay/relay-hooks/react-cache/useLazyLoadQuery_REACT_CACHE'); const TestRenderer = require('react-test-renderer'); const {RelayFeatureFlags} = require('relay-runtime'); const RelayNetwork = require('relay-runtime/network/RelayNetwork'); const {graphql} = require('relay-runtime/query/GraphQLTag'); const { addTodo, + changeDescription, completeTodo, + removeTodo, resetStore, } = require('relay-runtime/store/__tests__/resolvers/ExampleTodoStore'); +const { + chargeBattery, + resetModels, + setIsHuman, +} = require('relay-runtime/store/__tests__/resolvers/MutableModel'); +const { + LiveColorSubscriptions, +} = require('relay-runtime/store/__tests__/resolvers/TodoDescription'); const LiveResolverStore = require('relay-runtime/store/experimental-live-resolvers/LiveResolverStore.js'); const RelayModernEnvironment = require('relay-runtime/store/RelayModernEnvironment'); const RelayRecordSource = require('relay-runtime/store/RelayRecordSource'); @@ -52,26 +60,14 @@ function logFn(event: LogEvent): void { beforeEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = true; logEvents = []; resetStore(logFn); }); afterEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = false; }); -function createEnvironment() { - return new RelayModernEnvironment({ - network: RelayNetwork.create(jest.fn()), - store: new LiveResolverStore(RelayRecordSource.create(), { - log: logFn, - }), - log: logFn, - }); -} - function EnvironmentWrapper({ children, environment, @@ -86,112 +82,54 @@ function EnvironmentWrapper({ ); } -describe.each([ - ['React Cache', useLazyLoadQuery_REACT_CACHE, useFragment_REACT_CACHE], - ['Legacy', useLazyLoadQuery_LEGACY, useFragment_LEGACY], -])('Hook implementation: %s', (_hookName, useLazyLoadQuery, useFragment) => { - const usingReactCache = useLazyLoadQuery === useLazyLoadQuery_REACT_CACHE; - // Our open-source build is still on React 17, so we need to skip these tests there: - if (usingReactCache) { - // $FlowExpectedError[prop-missing] Cache not yet part of Flow types - if (React.unstable_getCacheForType === undefined) { - return; - } - } - let environment; - beforeEach(() => { - RelayFeatureFlags.USE_REACT_CACHE = usingReactCache; - environment = createEnvironment(); - }); - - function TodoComponent(props: { - fragmentKey: ?RelayResolverModelTestFragment$key, - }) { - const data = useFragment( - graphql` - fragment RelayResolverModelTestFragment on TodoModel { - id - fancy_description { - text - color - } - } - `, - props.fragmentKey, - ); - if (data == null) { - return null; - } - - // TODO: The `__relay_model_instance` will be hidden from the - // users and impossible to select. - return `${data.fancy_description?.text ?? 'unknown'} - ${ - data.fancy_description?.color ?? 'unknown' - }`; - } - - function TodoRootComponent(props: {todoID: string}) { - const data = useClientQuery( - graphql` - query RelayResolverModelTestTodoQuery($id: ID!) { - todo_model(todoID: $id) { - ...RelayResolverModelTestFragment - } - } - `, - {id: props.todoID}, - ); - if (data?.todo_model == null) { - return null; - } - - return ; - } - - test('should read title of the model', () => { - addTodo('Test todo'); - - const renderer = TestRenderer.create( - - - , - ); - expect(renderer.toJSON()).toEqual('Test todo - red'); - }); +describe.each([['New', useFragment]])( + 'Hook implementation: %s', + (_hookName, useFragment) => { + let environment; + let store; + beforeEach(() => { + store = new LiveResolverStore(RelayRecordSource.create(), { + log: logFn, + }); + environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + log: logFn, + }); + }); - test('should render `null` model.', () => { - function TodoNullComponent() { - const data = useClientQuery( + function TodoComponent(props: { + fragmentKey: ?RelayResolverModelTestFragment$key, + }) { + const data = useFragment( graphql` - query RelayResolverModelTestTodoNullQuery { - todo_model_null { - id + fragment RelayResolverModelTestFragment on TodoModel { + id + fancy_description { + text + color } } `, - {}, + props.fragmentKey, ); - if (data?.todo_model_null == null) { + if (data == null) { return null; } - return data?.todo_model_null.id; + // TODO: The `__relay_model_instance` will be hidden from the + // users and impossible to select. + return `${data.fancy_description?.text ?? 'unknown'} - ${ + data.fancy_description?.color ?? 'unknown' + }`; } - const renderer = TestRenderer.create( - - - , - ); - expect(renderer.toJSON()).toEqual(null); - }); - - test('read plural resolver field', () => { - function TodoComponentWithPluralResolverComponent(props: {todoID: string}) { + + function TodoRootComponent(props: {todoID: string}) { const data = useClientQuery( graphql` - query RelayResolverModelTestTodoWithPluralFieldQuery($id: ID!) { + query RelayResolverModelTestTodoQuery($id: ID!) { todo_model(todoID: $id) { - ...RelayResolverModelTestWithPluralFragment + ...RelayResolverModelTestFragment } } `, @@ -201,155 +139,545 @@ describe.each([ return null; } - return ( - - ); + return ; } - function TodoComponentWithPluralDescription(props: { - fragmentKey: ?RelayResolverModelTestWithPluralFragment$key, - }) { - const data = useFragment( - graphql` - fragment RelayResolverModelTestWithPluralFragment on TodoModel { - many_fancy_descriptions { - text - color + test('should read title of the model', () => { + addTodo('Test todo'); + + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('Test todo - red'); + }); + + test('should render `null` model.', () => { + function TodoNullComponent() { + const data = useClientQuery( + graphql` + query RelayResolverModelTestTodoNullQuery { + todo_model_null { + id + } } - } - `, - props.fragmentKey, + `, + {}, + ); + if (data?.todo_model_null == null) { + return null; + } + + return data?.todo_model_null.id; + } + const renderer = TestRenderer.create( + + + , ); - if (data == null) { - return null; + expect(renderer.toJSON()).toEqual(null); + }); + + test('read plural resolver field', () => { + function TodoComponentWithPluralResolverComponent(props: { + todoID: string, + }) { + const data = useClientQuery( + graphql` + query RelayResolverModelTestTodoWithPluralFieldQuery($id: ID!) { + todo_model(todoID: $id) { + ...RelayResolverModelTestWithPluralFragment + } + } + `, + {id: props.todoID}, + ); + if (data?.todo_model == null) { + return null; + } + + return ( + + ); } - return data.many_fancy_descriptions - ?.map( - item => `${item?.text ?? 'unknown'} - ${item?.color ?? 'unknown'}`, - ) - .join(','); - } + function TodoComponentWithPluralDescription(props: { + fragmentKey: ?RelayResolverModelTestWithPluralFragment$key, + }) { + const data = useFragment( + graphql` + fragment RelayResolverModelTestWithPluralFragment on TodoModel { + many_fancy_descriptions { + text + color + } + } + `, + props.fragmentKey, + ); + if (data == null) { + return null; + } + + return data.many_fancy_descriptions + ?.map( + item => `${item?.text ?? 'unknown'} - ${item?.color ?? 'unknown'}`, + ) + .join(','); + } - addTodo('Test todo'); + addTodo('Test todo'); - const renderer = TestRenderer.create( - - - , - ); - expect(renderer.toJSON()).toEqual('Test todo - red'); - }); + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('Test todo - red'); + }); - test('read live @weak resolver field', () => { - function TodoComponentWithPluralResolverComponent(props: {todoID: string}) { - const data = useClientQuery( - graphql` - query RelayResolverModelTestWeakLiveFieldQuery($id: ID!) { - live_todo_description(todoID: $id) { - text - color + test('read live @weak resolver field', () => { + function TodoComponentWithPluralResolverComponent(props: { + todoID: string, + }) { + const data = useClientQuery( + graphql` + query RelayResolverModelTestWeakLiveFieldQuery($id: ID!) { + live_todo_description(todoID: $id) { + text + color + } } - } - `, - {id: props.todoID}, + `, + {id: props.todoID}, + ); + if (data?.live_todo_description == null) { + return null; + } + + return `${data.live_todo_description?.text ?? 'unknown'} - ${ + data.live_todo_description?.color ?? 'unknown' + }`; + } + + addTodo('Test todo'); + + const renderer = TestRenderer.create( + + + , ); - if (data?.live_todo_description == null) { - return null; + expect(renderer.toJSON()).toEqual('Test todo - red'); + + TestRenderer.act(() => { + completeTodo('todo-1'); + jest.runAllImmediates(); + }); + expect(renderer.toJSON()).toEqual('Test todo - green'); + }); + + test('should correctly invalidate subscriptions on live fields when updating @weak models', () => { + LiveColorSubscriptions.activeSubscriptions = []; + function TodoComponentWithPluralResolverComponent(props: { + todoID: string, + }) { + const data = useClientQuery( + graphql` + query RelayResolverModelTestWeakLiveColorFieldQuery($id: ID!) { + live_todo_description(todoID: $id) { + text + live_color + } + } + `, + {id: props.todoID}, + ); + if (data?.live_todo_description == null) { + return null; + } + + return `${data.live_todo_description?.text ?? 'unknown'} - ${ + data.live_todo_description?.live_color ?? 'unknown' + }`; } + addTodo('Test todo'); + expect(LiveColorSubscriptions.activeSubscriptions.length).toBe(0); + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('Test todo - red'); + expect(LiveColorSubscriptions.activeSubscriptions.length).toBe(1); - return `${data.live_todo_description?.text ?? 'unknown'} - ${ - data.live_todo_description?.color ?? 'unknown' - }`; - } + TestRenderer.act(() => { + completeTodo('todo-1'); + jest.runAllImmediates(); + }); + expect(LiveColorSubscriptions.activeSubscriptions.length).toBe(1); - addTodo('Test todo'); + expect(renderer.toJSON()).toEqual('Test todo - green'); - const renderer = TestRenderer.create( - - - , - ); - expect(renderer.toJSON()).toEqual('Test todo - red'); + TestRenderer.act(() => { + removeTodo('todo-1'); + jest.runAllImmediates(); + }); - TestRenderer.act(() => { - completeTodo('todo-1'); + expect(renderer.toJSON()).toEqual(null); + // Run GC to will remove "orphan" records and unsubscribe if they have live resolver subscriptions + store.scheduleGC(); jest.runAllImmediates(); + + expect(LiveColorSubscriptions.activeSubscriptions.length).toBe(0); }); - expect(renderer.toJSON()).toEqual('Test todo - green'); - }); - test('read interface field', () => { - function TodoComponentWithInterfaceComponent(props: {todoID: string}) { - const data = useClientQuery( - graphql` - query RelayResolverModelTestTodoWithInterfaceQuery($id: ID!) { - todo_model(todoID: $id) { - ...RelayResolverModelTestInterfaceFragment + test('read a field with arguments', () => { + function TodoComponentWithFieldWithArgumentsComponent(props: { + todoID: string, + }) { + const data = useClientQuery( + graphql` + query RelayResolverModelTestFieldWithArgumentsQuery($id: ID!) { + todo_model(todoID: $id) { + fancy_description { + text_with_prefix(prefix: "[x]") + } + } } - } - `, - {id: props.todoID}, + `, + {id: props.todoID}, + ); + return data?.todo_model?.fancy_description?.text_with_prefix; + } + + addTodo('Test todo'); + + const renderer = TestRenderer.create( + + + , ); - if (data?.todo_model == null) { - return null; + expect(renderer.toJSON()).toEqual('[x] Test todo'); + + TestRenderer.act(() => { + changeDescription('todo-1', 'Changed todo description text'); + jest.runAllImmediates(); + }); + expect(renderer.toJSON()).toEqual('[x] Changed todo description text'); + }); + + // If a resolver that returns a weak model returns null, that should result in + // the edge beign null, not just the model field. + test('@weak model client edge returns null', () => { + function TodoComponentWithNullWeakClientEdge(props: {todoID: string}) { + const data = useClientQuery( + graphql` + query RelayResolverModelTestNullWeakClientEdgeQuery($id: ID!) { + todo_model(todoID: $id) { + fancy_description_null { + text_with_prefix(prefix: "[x]") + } + } + } + `, + {id: props.todoID}, + ); + invariant( + data.todo_model != null, + 'Expected todo model to be defiend.', + ); + return data.todo_model.fancy_description_null == null + ? 'NULL!' + : 'NOT NULL!'; } + addTodo('Test todo'); - return ; - } + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('NULL!'); + }); - function TodoComponentWithInterface(props: { - fragmentKey: ?RelayResolverModelTestInterfaceFragment$key, - }) { - const data = useFragment( - graphql` - fragment RelayResolverModelTestInterfaceFragment on TodoModel { - fancy_description { - some_interface { - __typename - description + // Ensure we don't: + // 1. Wrap a suspense value coming from a @weak model resolver + // 2. Don't try to normalize a suspense sentinel as a model value + test('@weak model client edge suspends', () => { + function TodoComponentWithNullWeakClientEdge(props: {todoID: string}) { + useClientQuery( + graphql` + query RelayResolverModelTestSuspendedWeakClientEdgeQuery($id: ID!) { + todo_model(todoID: $id) { + fancy_description_suspends { + text_with_prefix(prefix: "[x]") + } } - some_client_type_with_interface { - client_interface { - __typename - description + } + `, + {id: props.todoID}, + ); + invariant(false, 'Expected to suspend.'); + } + + addTodo('Test todo'); + + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('Loading...'); + }); + + test('null items in list of @weak models', () => { + function TodoComponentWithNullablePluralResolverComponent(props: { + todoID: string, + }) { + const data = useClientQuery( + graphql` + query RelayResolverModelTestTodoWithNullablePluralFieldQuery( + $id: ID! + ) { + todo_model(todoID: $id) { + many_fancy_descriptions_but_some_are_null { + text } } } - } - `, - props.fragmentKey, + `, + {id: props.todoID}, + ); + + const fancyDescriptions = + data?.todo_model?.many_fancy_descriptions_but_some_are_null; + if (fancyDescriptions == null) { + return null; + } + + return fancyDescriptions + .map(item => + item == null ? 'ITEM IS NULL' : item.text ?? 'TEXT IS NULL', + ) + .join(', '); + } + addTodo('Test todo'); + + const renderer = TestRenderer.create( + + + , ); - return JSON.stringify(data); - } - addTodo('Test todo'); - - const renderer = TestRenderer.create( - - - , - ); - // $FlowFixMe[incompatible-call] Yes, it is compatible... - const response = JSON.parse(renderer.toJSON() ?? '{}'); - jest.runAllImmediates(); - - // This incorrectly currently reads out just the typename from resolvers which - // return interface fields - expect(response.fancy_description?.some_interface).toEqual({ - __typename: 'ClientTypeImplementingClientInterface', - description: 'It was a magical place', + // TODO: T184433715 We currently break with the GraphQL spec and filter out null items in lists. + expect(renderer.toJSON()).toEqual('Test todo'); + }); + + test('read a field with its own root fragment', () => { + function TodoComponentWithFieldWithRootFragmentComponent(props: { + todoID: string, + }) { + const data = useClientQuery( + graphql` + query RelayResolverModelTestFieldWithRootFragmentQuery($id: ID!) { + todo_model(todoID: $id) { + capitalized_id + } + } + `, + {id: props.todoID}, + ); + return data?.todo_model?.capitalized_id; + } + + addTodo('Test todo'); + + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('TODO-1'); + }); + + test('read a field with its own root fragment defined using legacy non-terse syntax', () => { + function TodoComponentWithFieldWithRootFragmentComponent(props: { + todoID: string, + }) { + const data = useClientQuery( + graphql` + query RelayResolverModelTestFieldWithRootFragmentLegacyQuery( + $id: ID! + ) { + todo_model(todoID: $id) { + capitalized_id_legacy + } + } + `, + {id: props.todoID}, + ); + return data?.todo_model?.capitalized_id_legacy; + } + + addTodo('Test todo'); + + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('TODO-1'); }); - // However, for resolvers which return objects that contain interface fields, - // we correctly read out the data. - expect( - response?.fancy_description?.some_client_type_with_interface, - ).toEqual({ - client_interface: { + test('read interface field', () => { + function TodoComponentWithInterfaceComponent(props: {todoID: string}) { + const data = useClientQuery( + graphql` + query RelayResolverModelTestTodoWithInterfaceQuery($id: ID!) { + todo_model(todoID: $id) { + ...RelayResolverModelTestInterfaceFragment + } + } + `, + {id: props.todoID}, + ); + if (data?.todo_model == null) { + return null; + } + + return ; + } + + function TodoComponentWithInterface(props: { + fragmentKey: ?RelayResolverModelTestInterfaceFragment$key, + }) { + const data = useFragment( + graphql` + fragment RelayResolverModelTestInterfaceFragment on TodoModel { + fancy_description { + some_interface { + __typename + description + } + some_client_type_with_interface { + client_interface { + __typename + description + } + } + } + } + `, + props.fragmentKey, + ); + return JSON.stringify(data); + } + + addTodo('Test todo'); + + const renderer = TestRenderer.create( + + + , + ); + // $FlowFixMe[incompatible-call] Yes, it is compatible... + const response = JSON.parse(renderer.toJSON() ?? '{}'); + jest.runAllImmediates(); + + // This incorrectly currently reads out just the typename from resolvers which + // return interface fields + expect(response.fancy_description?.some_interface).toEqual({ __typename: 'ClientTypeImplementingClientInterface', description: 'It was a magical place', - }, + }); + + // However, for resolvers which return objects that contain interface fields, + // we correctly read out the data. + expect( + response?.fancy_description?.some_client_type_with_interface, + ).toEqual({ + client_interface: { + __typename: 'ClientTypeImplementingClientInterface', + description: 'It was a magical place', + }, + }); }); - }); -}); + + const getMutableEntityQuery = graphql` + query RelayResolverModelTestGetMutableEntityQuery { + mutable_entity + } + `; + test('should not mutate complex resolver values', () => { + resetModels(); + // Do not deep freeze + jest.mock('relay-runtime/util/deepFreeze'); + + TestRenderer.act(() => { + setIsHuman(true); + }); + + function GetMutableEntity() { + const data = useClientQuery(getMutableEntityQuery, {}); + if (data.mutable_entity == null) { + return null; + } + return `${data.mutable_entity.type}:${data.mutable_entity.props.battery}`; + } + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('human:0'); + + TestRenderer.act(() => { + setIsHuman(false); + jest.runAllImmediates(); + }); + expect(renderer.toJSON()).toEqual('robot:0'); + + TestRenderer.act(() => { + chargeBattery(); + setIsHuman(true); + jest.runAllImmediates(); + }); + expect(renderer.toJSON()).toEqual('human:0'); + + TestRenderer.act(() => { + renderer.unmount(); + }); + jest.unmock('relay-runtime/util/deepFreeze'); + }); + + test('should not freeze complex resolver values', () => { + resetModels(); + TestRenderer.act(() => { + setIsHuman(false); + }); + function GetMutableEntity() { + const data = useClientQuery(getMutableEntityQuery, {}); + if (data.mutable_entity == null) { + return null; + } + return `${data.mutable_entity.type}:${data.mutable_entity.props.battery}`; + } + + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('robot:0'); + + expect(() => { + chargeBattery(); + }).not.toThrow(); + + TestRenderer.act(() => { + renderer.unmount(); + }); + }); + }, +); diff --git a/packages/react-relay/__tests__/RelayResolverNullableModelClientEdge-test.js b/packages/react-relay/__tests__/RelayResolverNullableModelClientEdge-test.js new file mode 100644 index 0000000000000..aa191c5f06613 --- /dev/null +++ b/packages/react-relay/__tests__/RelayResolverNullableModelClientEdge-test.js @@ -0,0 +1,552 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +import type {DataID, LogEvent} from 'relay-runtime'; +import type {RelayMockEnvironment} from 'relay-test-utils'; + +const invariant = require('invariant'); +const React = require('react'); +const {RelayEnvironmentProvider, useClientQuery} = require('react-relay'); +const TestRenderer = require('react-test-renderer'); +const { + __internal, + Environment, + Network, + RecordSource, + RelayFeatureFlags, + createOperationDescriptor, + graphql, +} = require('relay-runtime'); +const { + addTodo, +} = require('relay-runtime/store/__tests__/resolvers/ExampleTodoStore'); +const LiveResolverStore = require('relay-runtime/store/experimental-live-resolvers/LiveResolverStore'); +const {createMockEnvironment} = require('relay-test-utils'); + +/** + * CLIENT EDGE TO PLURAL LIVE STRONG CLIENT OBJECT + */ + +/** + * @RelayResolver Query.edge_to_plural_live_objects_some_exist: [TodoModel] + */ +export function edge_to_plural_live_objects_some_exist(): $ReadOnlyArray<{ + id: DataID, +}> { + return [{id: 'todo-1'}, {id: 'THERE_IS_NO_TODO_WITH_THIS_ID'}]; +} + +/** + * @RelayResolver Query.edge_to_plural_live_objects_none_exist: [TodoModel] + */ +export function edge_to_plural_live_objects_none_exist(): $ReadOnlyArray<{ + id: DataID, +}> { + return [{id: 'NO_TODO_1'}, {id: 'NO_TODO_2'}]; +} + +/** + * CLIENT EDGE TO LIVE STRONG CLIENT OBJECT + */ + +/** + * @RelayResolver Query.edge_to_live_object_does_not_exist: TodoModel + */ +export function edge_to_live_object_does_not_exist(): {id: DataID} { + return {id: 'THERE_IS_NO_TODO_WITH_THIS_ID'}; +} + +/** + * CLIENT EDGE TO WEAK CLIENT OBJECT + */ + +/** + * @RelayResolver WeakModel + * @weak + */ +export type WeakModel = { + firstName: string, + lastName: string, +}; + +/** + * @RelayResolver Query.edge_to_null_weak_model: WeakModel + */ +export function edge_to_null_weak_model(): ?WeakModel { + return null; +} + +/** + * @RelayResolver WeakModel.first_name: String + */ +export function first_name(model: WeakModel): string { + return model.firstName; +} + +/** + * CLIENT EDGE TO STRONG CLIENT OBJECT + */ + +type StrongModelType = ?{ + id: string, + name: string, +}; + +/** + * @RelayResolver StrongModel + */ +export function StrongModel(id: string): StrongModelType { + return null; +} + +/** + * @RelayResolver StrongModel.name: String + */ +export function name(model: StrongModelType): ?string { + return model?.name; +} + +/** + * @RelayResolver Query.edge_to_strong_model_does_not_exist: StrongModel + */ +export function edge_to_strong_model_does_not_exist(): {id: DataID} { + return {id: 'THERE_IS_NO_STRONG_MODEL_WITH_THIS_ID'}; +} + +/** + * CLIENT EDGE TO SERVER OBJECT + */ + +/** + * @RelayResolver Query.edge_to_server_object_does_not_exist: Comment + */ +export function edge_to_server_object_does_not_exist(): {id: DataID} { + return {id: 'THERE_IS_NO_COMMENT_WITH_THIS_ID'}; +} + +/** + * ERROR CASES + */ + +const ERROR_ID = 'error'; +const ERROR_MESSAGE = `IDs containing ${ERROR_ID} will cause an error to be thrown`; + +type ErrorModelType = ?{ + id: string, +}; + +/** + * @RelayResolver ErrorModel + */ +export function ErrorModel(id: string): ErrorModelType { + if (!id.includes(ERROR_ID)) { + return {id}; + } + throw new Error(ERROR_MESSAGE); +} + +/** + * @RelayResolver Query.edge_to_model_that_throws: ErrorModel + */ +export function edge_to_model_that_throws(): {id: DataID} { + return {id: ERROR_ID}; +} + +/** + * @RelayResolver Query.edge_to_plural_models_that_throw: [ErrorModel] + */ +export function edge_to_plural_models_that_throw(): $ReadOnlyArray<{ + id: DataID, +}> { + return [{id: `${ERROR_ID}-1`}, {id: `${ERROR_ID}-2`}]; +} + +/** + * @RelayResolver Query.edge_to_plural_models_some_throw: [ErrorModel] + */ +export function edge_to_plural_models_some_throw(): $ReadOnlyArray<{ + id: DataID, +}> { + return [{id: ERROR_ID}, {id: 'a valid id!'}]; +} + +beforeEach(() => { + RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; +}); + +afterEach(() => { + RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; +}); + +const logEvents: Array = []; +function logFn(event: LogEvent): void { + logEvents.push(event); +} + +function createEnvironment() { + return new Environment({ + network: Network.create(jest.fn()), + store: new LiveResolverStore(RecordSource.create(), { + log: logFn, + }), + log: logFn, + }); +} + +function EnvironmentWrapper({ + children, + environment, +}: { + children: React.Node, + environment: Environment | RelayMockEnvironment, +}) { + return ( + + {children} + + ); +} + +let environment; +beforeEach(() => { + environment = createEnvironment(); +}); +test('client edge to plural IDs, none have corresponding live object', () => { + function TodoNullComponent() { + const data = useClientQuery( + graphql` + query RelayResolverNullableModelClientEdgeTest_PluralLiveModelNoneExist_Query { + edge_to_plural_live_objects_none_exist { + id + description + } + } + `, + {}, + ); + + invariant(data != null, 'Query response should be nonnull'); + expect(data.edge_to_plural_live_objects_none_exist).toHaveLength(2); + return data.edge_to_plural_live_objects_none_exist + ?.map(item => + item + ? `${item.id ?? 'unknown'} - ${item.description ?? 'unknown'}` + : 'unknown', + ) + .join(','); + } + + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('unknown,unknown'); +}); + +test('client edge to plural IDs, some with no corresponding live object', () => { + function TodoNullComponent() { + const data = useClientQuery( + graphql` + query RelayResolverNullableModelClientEdgeTest_PluralLiveModel_Query { + edge_to_plural_live_objects_some_exist { + id + description + } + } + `, + {}, + ); + + invariant(data != null, 'Query response should be nonnull'); + expect(data.edge_to_plural_live_objects_some_exist).toHaveLength(2); + return data.edge_to_plural_live_objects_some_exist + ?.map(item => + item + ? `${item.id ?? 'unknown'} - ${item.description ?? 'unknown'}` + : 'unknown', + ) + .join(','); + } + + addTodo('Test todo'); + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('todo-1 - Test todo,unknown'); +}); + +test('client edge to ID with no corresponding live object', () => { + function TodoNullComponent() { + const data = useClientQuery( + graphql` + query RelayResolverNullableModelClientEdgeTest_LiveModel_Query { + edge_to_live_object_does_not_exist { + id + fancy_description { + text + } + } + } + `, + {}, + ); + + invariant(data != null, 'Query response should be nonnull'); + + switch (data.edge_to_live_object_does_not_exist) { + case null: + return 'Todo was null'; + case undefined: + return 'Todo was undefined'; + default: + return 'Todo was not null or undefined'; + } + } + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('Todo was null'); +}); + +test('client edge to ID with no corresponding weak object', () => { + function NullWeakModelComponent() { + const data = useClientQuery( + graphql` + query RelayResolverNullableModelClientEdgeTest_WeakModel_Query { + edge_to_null_weak_model { + first_name + } + } + `, + {}, + ); + + invariant(data != null, 'Query response should be nonnull'); + + switch (data.edge_to_null_weak_model) { + case null: + return 'Weak model was null'; + case undefined: + return 'Weak model was undefined'; + default: + return 'Weak model was not null or undefined'; + } + } + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('Weak model was null'); +}); + +test('client edge to ID with no corresponding strong object', () => { + function NullStrongModelComponent() { + const data = useClientQuery( + graphql` + query RelayResolverNullableModelClientEdgeTest_StrongModel_Query { + edge_to_strong_model_does_not_exist { + name + } + } + `, + {}, + ); + + invariant(data != null, 'Query response should be nonnull'); + + switch (data.edge_to_strong_model_does_not_exist) { + case null: + return 'strong model was null'; + case undefined: + return 'strong model was undefined'; + default: + return 'strong model was not null or undefined'; + } + } + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('strong model was null'); +}); + +test('client edge to server ID with no corresponding server object', () => { + function NullServerObjectComponent() { + const data = useClientQuery( + graphql` + query RelayResolverNullableModelClientEdgeTest_ServerObject_Query { + edge_to_server_object_does_not_exist @waterfall { + name + } + } + `, + {}, + ); + + invariant(data != null, 'Query response should be nonnull'); + + switch (data.edge_to_server_object_does_not_exist) { + case null: + return 'server object was null'; + case undefined: + return 'server object was undefined'; + default: + return 'server object was not null or undefined'; + } + } + const mock_environment = createMockEnvironment(); + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('Loading...'); + TestRenderer.act(() => { + mock_environment.mock.resolveMostRecentOperation({data: {node: null}}); + jest.runAllImmediates(); + }); + // TODO T169274655 should this be 'server object was null'? + expect(renderer.toJSON()).toEqual('server object was undefined'); +}); + +test('client edge to server ID with no corresponding server object (read only id)', () => { + function NullServerObjectComponent() { + const data = useClientQuery( + graphql` + query RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query { + edge_to_server_object_does_not_exist @waterfall { + id + } + } + `, + {}, + ); + + invariant(data != null, 'Query response should be nonnull'); + + switch (data.edge_to_server_object_does_not_exist) { + case null: + return 'server object was null'; + case undefined: + return 'server object was undefined'; + default: + return 'server object was not null or undefined'; + } + } + const mock_environment = createMockEnvironment(); + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('Loading...'); + TestRenderer.act(() => { + mock_environment.mock.resolveMostRecentOperation({data: {node: null}}); + jest.runAllImmediates(); + }); + // TODO T169274655 should this be 'server object was null'? + expect(renderer.toJSON()).toEqual('server object was undefined'); +}); + +test('Errors thrown when reading the model a client edge points to are caught as resolver errors', () => { + const operation = createOperationDescriptor( + graphql` + query RelayResolverNullableModelClientEdgeTest_ErrorModel_Query { + edge_to_model_that_throws { + __typename + } + } + `, + {}, + ); + const snapshot = environment.lookup(operation.fragment); + expect(snapshot.relayResolverErrors).toEqual([ + { + error: Error(ERROR_MESSAGE), + field: { + owner: 'RelayResolverNullableModelClientEdgeTest_ErrorModel_Query', + path: 'edge_to_model_that_throws.__relay_model_instance', + }, + }, + ]); + const data: $FlowExpectedError = snapshot.data; + expect(data.edge_to_model_that_throws).toBe(null); +}); + +test('Errors thrown when reading plural client edge are caught as resolver errors', () => { + const operation = createOperationDescriptor( + graphql` + query RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query { + edge_to_plural_models_that_throw { + __typename + } + } + `, + {}, + ); + const snapshot = environment.lookup(operation.fragment); + expect(snapshot.relayResolverErrors).toEqual([ + { + error: Error(ERROR_MESSAGE), + field: { + owner: + 'RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query', + path: 'edge_to_plural_models_that_throw.__relay_model_instance', + }, + }, + { + error: Error(ERROR_MESSAGE), + field: { + owner: + 'RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query', + path: 'edge_to_plural_models_that_throw.__relay_model_instance', + }, + }, + ]); + const data: $FlowExpectedError = snapshot.data; + expect(data.edge_to_plural_models_that_throw).toStrictEqual([null, null]); +}); + +test('Errors thrown when reading plural client edge are caught as resolver errors and valid data is returned', () => { + const operation = createOperationDescriptor( + graphql` + query RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query { + edge_to_plural_models_some_throw { + id + } + } + `, + {}, + ); + const snapshot = environment.lookup(operation.fragment); + expect(snapshot.relayResolverErrors).toEqual([ + { + error: Error(ERROR_MESSAGE), + field: { + owner: + 'RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query', + path: 'edge_to_plural_models_some_throw.__relay_model_instance', + }, + }, + ]); + const data: $FlowExpectedError = snapshot.data; + expect(data.edge_to_plural_models_some_throw).toStrictEqual([ + null, + {id: 'a valid id!'}, + ]); +}); diff --git a/packages/react-relay/__tests__/RelayResolvers-withOutputType-test.js b/packages/react-relay/__tests__/RelayResolvers-withOutputType-test.js index 6230ad2ae370b..889c500f71563 100644 --- a/packages/react-relay/__tests__/RelayResolvers-withOutputType-test.js +++ b/packages/react-relay/__tests__/RelayResolvers-withOutputType-test.js @@ -18,14 +18,8 @@ import type {RelayResolversWithOutputTypeTestTextStyleComponentFragment$key} fro import type {RelayResolversWithOutputTypeTestTodoCompleteFragment$key} from './__generated__/RelayResolversWithOutputTypeTestTodoCompleteFragment.graphql'; const React = require('react'); -const { - RelayEnvironmentProvider, - useClientQuery, - useFragment: useFragment_LEGACY, - useLazyLoadQuery: useLazyLoadQuery_LEGACY, -} = require('react-relay'); -const useFragment_REACT_CACHE = require('react-relay/relay-hooks/react-cache/useFragment_REACT_CACHE'); -const useLazyLoadQuery_REACT_CACHE = require('react-relay/relay-hooks/react-cache/useLazyLoadQuery_REACT_CACHE'); +const {RelayEnvironmentProvider, useClientQuery} = require('react-relay'); +const useFragment = require('react-relay/relay-hooks/useFragment'); const TestRenderer = require('react-test-renderer'); const {RelayFeatureFlags} = require('relay-runtime'); const RelayNetwork = require('relay-runtime/network/RelayNetwork'); @@ -55,14 +49,12 @@ function logFn(event: LogEvent): void { beforeEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = true; logEvents = []; resetStore(logFn); }); afterEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = false; }); function createEnvironment() { @@ -89,633 +81,679 @@ function EnvironmentWrapper({ ); } -describe.each([ - ['React Cache', useLazyLoadQuery_REACT_CACHE, useFragment_REACT_CACHE], - ['Legacy', useLazyLoadQuery_LEGACY, useFragment_LEGACY], -])('Hook implementation: %s', (_hookName, useLazyLoadQuery, useFragment) => { - const usingReactCache = useLazyLoadQuery === useLazyLoadQuery_REACT_CACHE; - // Our open-source build is still on React 17, so we need to skip these tests there: - if (usingReactCache) { - // $FlowExpectedError[prop-missing] Cache not yet part of Flow types - if (React.unstable_getCacheForType === undefined) { - return; - } - } - let environment; - beforeEach(() => { - RelayFeatureFlags.USE_REACT_CACHE = usingReactCache; - environment = createEnvironment(); - }); +let environment; +beforeEach(() => { + environment = createEnvironment(); +}); - function TodoComponent(props: { - fragmentKey: ?RelayResolversWithOutputTypeTestFragment$key, - }) { - const data = useFragment( - graphql` - fragment RelayResolversWithOutputTypeTestFragment on Todo { - text { - content - style { - ...RelayResolversWithOutputTypeTestTextStyleComponentFragment - } +function TodoComponent(props: { + fragmentKey: ?RelayResolversWithOutputTypeTestFragment$key, +}) { + const data = useFragment( + graphql` + fragment RelayResolversWithOutputTypeTestFragment on Todo { + text { + content + style { + ...RelayResolversWithOutputTypeTestTextStyleComponentFragment } - ...RelayResolversWithOutputTypeTestTodoCompleteFragment } - `, - props.fragmentKey, - ); - if (data == null) { - return null; - } - - return ( - <> - {data.text?.content ?? 'no text'} - - - - ); + ...RelayResolversWithOutputTypeTestTodoCompleteFragment + } + `, + props.fragmentKey, + ); + if (data == null) { + return null; } - function TodoCompleteComponent(props: { - fragmentKey: ?RelayResolversWithOutputTypeTestTodoCompleteFragment$key, - }) { - const data = useFragment( - graphql` - fragment RelayResolversWithOutputTypeTestTodoCompleteFragment on Todo { - complete - } - `, - props.fragmentKey, - ); - let status = 'unknown'; - if (data?.complete != null) { - status = data?.complete ? 'is completed' : 'is not completed'; - } - return status; - } + return ( + <> + {data.text?.content ?? 'no text'} + + + + ); +} - function TodoTextStyleComponent(props: { - fragmentKey: ?RelayResolversWithOutputTypeTestTextStyleComponentFragment$key, - }) { - const data = useFragment( - graphql` - fragment RelayResolversWithOutputTypeTestTextStyleComponentFragment on TodoTextStyle { - font_style - color { - ...RelayResolversWithOutputTypeTestTextColorComponentFragment - } - } - `, - props.fragmentKey, - ); - if (data == null) { - return 'unknown style'; - } - return ( - <> - {`style: ${data.font_style ?? 'unknown font style'}`} - - - ); +function TodoCompleteComponent(props: { + fragmentKey: ?RelayResolversWithOutputTypeTestTodoCompleteFragment$key, +}) { + const data = useFragment( + graphql` + fragment RelayResolversWithOutputTypeTestTodoCompleteFragment on Todo { + complete + } + `, + props.fragmentKey, + ); + let status = 'unknown'; + if (data?.complete != null) { + status = data?.complete ? 'is completed' : 'is not completed'; } + return status; +} - function TodoTextColorComponent(props: { - fragmentKey: ?RelayResolversWithOutputTypeTestTextColorComponentFragment$key, - }) { - const data = useFragment( - graphql` - fragment RelayResolversWithOutputTypeTestTextColorComponentFragment on TodoTextColor { - human_readable_color +function TodoTextStyleComponent(props: { + fragmentKey: ?RelayResolversWithOutputTypeTestTextStyleComponentFragment$key, +}) { + const data = useFragment( + graphql` + fragment RelayResolversWithOutputTypeTestTextStyleComponentFragment on TodoTextStyle { + font_style + color { + ...RelayResolversWithOutputTypeTestTextColorComponentFragment } - `, - props.fragmentKey, - ); - return `color: ${data?.human_readable_color ?? 'unknown color'}`; + } + `, + props.fragmentKey, + ); + if (data == null) { + return 'unknown style'; } + return ( + <> + {`style: ${data.font_style ?? 'unknown font style'}`} + + + ); +} - function TodoListComponent() { - const data = useClientQuery( - graphql` - query RelayResolversWithOutputTypeTestExceptionalProjectQuery { - todos(first: 10) { - edges { - node { - ...RelayResolversWithOutputTypeTestFragment - } +function TodoTextColorComponent(props: { + fragmentKey: ?RelayResolversWithOutputTypeTestTextColorComponentFragment$key, +}) { + const data = useFragment( + graphql` + fragment RelayResolversWithOutputTypeTestTextColorComponentFragment on TodoTextColor { + human_readable_color + } + `, + props.fragmentKey, + ); + return `color: ${data?.human_readable_color ?? 'unknown color'}`; +} + +function TodoListComponent() { + const data = useClientQuery( + graphql` + query RelayResolversWithOutputTypeTestExceptionalProjectQuery { + todos(first: 10) { + edges { + node { + ...RelayResolversWithOutputTypeTestFragment } } } - `, - {}, - ); - if (data.todos?.edges?.length === 0) { - return 'No Items'; - } + } + `, + {}, + ); + if (data.todos?.edges?.length === 0) { + return 'No Items'; + } + + return data.todos?.edges?.map((edge, index) => { + return ; + }); +} - return data.todos?.edges?.map((edge, index) => { - return ; - }); +function TodoRootComponent(props: {todoID: string}) { + const data = useClientQuery( + graphql` + query RelayResolversWithOutputTypeTestTodoQuery($id: ID!) { + todo(todoID: $id) { + ...RelayResolversWithOutputTypeTestFragment + } + } + `, + {id: props.todoID}, + ); + if (data?.todo == null) { + return null; } - function TodoRootComponent(props: {todoID: string}) { - const data = useClientQuery( - graphql` - query RelayResolversWithOutputTypeTestTodoQuery($id: ID!) { - todo(todoID: $id) { + return ; +} + +function TodoRootWithBlockedComponent(props: {todoID: string}) { + const data = useClientQuery( + graphql` + query RelayResolversWithOutputTypeTestTodoWithBlockedQuery($id: ID!) { + todo(todoID: $id) { + blocked_by { ...RelayResolversWithOutputTypeTestFragment } } - `, - {id: props.todoID}, - ); - if (data?.todo == null) { - return null; - } - - return ; + } + `, + {id: props.todoID}, + ); + if (data?.todo == null) { + return null; } + return data?.todo.blocked_by?.map((blocking_todo, index) => { + return ; + }); +} - function TodoRootWithBlockedComponent(props: {todoID: string}) { - const data = useClientQuery( - graphql` - query RelayResolversWithOutputTypeTestTodoWithBlockedQuery($id: ID!) { - todo(todoID: $id) { - blocked_by { - ...RelayResolversWithOutputTypeTestFragment - } - } +function ManyTodosComponent(props: {todos: $ReadOnlyArray}) { + const data = useClientQuery( + graphql` + query RelayResolversWithOutputTypeTestManyTodosQuery($todos: [ID]!) { + many_todos(todo_ids: $todos) { + ...RelayResolversWithOutputTypeTestFragment } - `, - {id: props.todoID}, - ); - if (data?.todo == null) { - return null; - } - return data?.todo.blocked_by?.map((blocking_todo, index) => { - return ; - }); + } + `, + { + todos: props.todos, + }, + ); + if (data.many_todos?.length === 0) { + return 'No Items'; } - function ManyTodosComponent(props: {todos: $ReadOnlyArray}) { - const data = useClientQuery( - graphql` - query RelayResolversWithOutputTypeTestManyTodosQuery($todos: [ID]!) { - many_todos(todo_ids: $todos) { - ...RelayResolversWithOutputTypeTestFragment - } - } - `, - { - todos: props.todos, - }, - ); - if (data.many_todos?.length === 0) { - return 'No Items'; - } + return data.many_todos?.map((todo, index) => { + return ; + }); +} - return data.many_todos?.map((todo, index) => { - return ; - }); +function ManyLiveTodosComponent() { + const data = useClientQuery( + graphql` + query RelayResolversWithOutputTypeTestManyLiveTodosQuery { + many_live_todos { + ...RelayResolversWithOutputTypeTestFragment + } + } + `, + {}, + ); + if (data.many_live_todos?.length === 0) { + return 'No Items'; } - test('should render empty state', () => { - const renderer = TestRenderer.create( - - - , - ); - expect(renderer.toJSON()).toEqual('No Items'); + return data.many_live_todos?.map((todo, index) => { + return ; }); +} - test('add new item to the list', () => { - const renderer = TestRenderer.create( - - - , - ); +test('should render empty state', () => { + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual('No Items'); +}); - TestRenderer.act(() => { - addTodo('My first todo'); - jest.runAllImmediates(); - }); - - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is not completed', - 'style: bold', - 'color: color is red', - ]); - - TestRenderer.act(() => { - addTodo('My second todo'); - jest.runAllImmediates(); - }); - - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is not completed', - 'style: bold', - 'color: color is red', - 'My second todo', - 'is not completed', - 'style: bold', - 'color: color is red', - ]); - }); +test('add new item to the list', () => { + const renderer = TestRenderer.create( + + + , + ); - test('complete todo', () => { - const renderer = TestRenderer.create( - - - , - ); - TestRenderer.act(() => { - addTodo('My first todo'); - jest.runAllImmediates(); - }); - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is not completed', - 'style: bold', - 'color: color is red', - ]); - - TestRenderer.act(() => { - completeTodo('todo-1'); - jest.runAllImmediates(); - }); - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is completed', - 'style: normal', - 'color: color is green', - ]); + TestRenderer.act(() => { + addTodo('My first todo'); + jest.runAllImmediates(); }); - test('complete todo and add one more', () => { - const renderer = TestRenderer.create( - - - , - ); - TestRenderer.act(() => { - addTodo('My first todo'); - completeTodo('todo-1'); - jest.runAllImmediates(); - }); - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is completed', - 'style: normal', - 'color: color is green', - ]); - TestRenderer.act(() => { - addTodo('My second todo'); - jest.runAllImmediates(); - }); - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is completed', - 'style: normal', - 'color: color is green', - 'My second todo', - 'is not completed', - 'style: bold', - 'color: color is red', - ]); - }); + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); - test('query single todo item (item is missing)', () => { - const renderer = TestRenderer.create( - - - , - ); - expect(renderer.toJSON()).toBe(null); + TestRenderer.act(() => { + addTodo('My second todo'); + jest.runAllImmediates(); }); - test('query single todo item (item is present) and complete it', () => { - addTodo('My first todo'); + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is not completed', + 'style: bold', + 'color: color is red', + 'My second todo', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); +}); - const renderer = TestRenderer.create( - - - , - ); - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is not completed', - 'style: bold', - 'color: color is red', - ]); - TestRenderer.act(() => { - completeTodo('todo-1'); - jest.runAllImmediates(); - }); - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is completed', - 'style: normal', - 'color: color is green', - ]); +test('complete todo', () => { + const renderer = TestRenderer.create( + + + , + ); + TestRenderer.act(() => { + addTodo('My first todo'); + jest.runAllImmediates(); }); + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); + + TestRenderer.act(() => { + completeTodo('todo-1'); + jest.runAllImmediates(); + }); + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is completed', + 'style: normal', + 'color: color is green', + ]); +}); - test('render both list and item component', () => { +test('complete todo and add one more', () => { + const renderer = TestRenderer.create( + + + , + ); + TestRenderer.act(() => { addTodo('My first todo'); + completeTodo('todo-1'); + jest.runAllImmediates(); + }); + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is completed', + 'style: normal', + 'color: color is green', + ]); + TestRenderer.act(() => { + addTodo('My second todo'); + jest.runAllImmediates(); + }); + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is completed', + 'style: normal', + 'color: color is green', + 'My second todo', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); +}); - const renderer = TestRenderer.create( - - - - , - ); +test('query single todo item (item is missing)', () => { + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toBe(null); +}); - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is not completed', - 'style: bold', - 'color: color is red', - 'My first todo', - 'is not completed', - 'style: bold', - 'color: color is red', - ]); - - TestRenderer.act(() => { - addTodo('Second todo'); - jest.runAllImmediates(); - }); - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is not completed', - 'style: bold', - 'color: color is red', - 'Second todo', - 'is not completed', - 'style: bold', - 'color: color is red', - 'My first todo', - 'is not completed', - 'style: bold', - 'color: color is red', - ]); - - // complete the first item - TestRenderer.act(() => { - completeTodo('todo-1'); - jest.runAllImmediates(); - }); - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is completed', - 'style: normal', - 'color: color is green', - 'Second todo', - 'is not completed', - 'style: bold', - 'color: color is red', - 'My first todo', - 'is completed', - 'style: normal', - 'color: color is green', - ]); +test('query single todo item (item is present) and complete it', () => { + addTodo('My first todo'); + + const renderer = TestRenderer.create( + + + , + ); + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); + TestRenderer.act(() => { + completeTodo('todo-1'); + jest.runAllImmediates(); }); + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is completed', + 'style: normal', + 'color: color is green', + ]); +}); - test('removes item', () => { - addTodo('My first todo'); +test('render both list and item component', () => { + addTodo('My first todo'); + + const renderer = TestRenderer.create( + + + + , + ); + + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is not completed', + 'style: bold', + 'color: color is red', + 'My first todo', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); + + TestRenderer.act(() => { addTodo('Second todo'); + jest.runAllImmediates(); + }); + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is not completed', + 'style: bold', + 'color: color is red', + 'Second todo', + 'is not completed', + 'style: bold', + 'color: color is red', + 'My first todo', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); + + // complete the first item + TestRenderer.act(() => { completeTodo('todo-1'); + jest.runAllImmediates(); + }); + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is completed', + 'style: normal', + 'color: color is green', + 'Second todo', + 'is not completed', + 'style: bold', + 'color: color is red', + 'My first todo', + 'is completed', + 'style: normal', + 'color: color is green', + ]); +}); - const renderer = TestRenderer.create( - - - - , - ); +test('removes item', () => { + addTodo('My first todo'); + addTodo('Second todo'); + completeTodo('todo-1'); - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is completed', - 'style: normal', - 'color: color is green', - 'Second todo', - 'is not completed', - 'style: bold', - 'color: color is red', - 'My first todo', - 'is completed', - 'style: normal', - 'color: color is green', - ]); - - TestRenderer.act(() => { - removeTodo('todo-1'); - jest.runAllImmediates(); - }); - - expect(renderer.toJSON()).toEqual([ - 'Second todo', - 'is not completed', - 'style: bold', - 'color: color is red', - ]); + const renderer = TestRenderer.create( + + + + , + ); + + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is completed', + 'style: normal', + 'color: color is green', + 'Second todo', + 'is not completed', + 'style: bold', + 'color: color is red', + 'My first todo', + 'is completed', + 'style: normal', + 'color: color is green', + ]); + + TestRenderer.act(() => { + removeTodo('todo-1'); + jest.runAllImmediates(); }); - test('renders after GC', () => { - addTodo('My first todo'); + expect(renderer.toJSON()).toEqual([ + 'Second todo', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); +}); - const renderer = TestRenderer.create( - - - , - ); +test('renders after GC', () => { + addTodo('My first todo'); - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is not completed', - 'style: bold', - 'color: color is red', - ]); - - (environment.getStore(): $FlowFixMe).__gc(); - jest.runAllTimers(); - - expect(environment.getStore().getSource().toJSON()).toEqual({ - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'todos(first:10)': { - __ref: 'client:root:todos(first:10)', - }, + const renderer = TestRenderer.create( + + + , + ); + + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); + + (environment.getStore(): $FlowFixMe).__gc(); + jest.runAllTimers(); + + expect(environment.getStore().getSource().toJSON()).toEqual({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + 'todos(first:10)': { + __ref: 'client:root:todos(first:10)', + }, + }, + 'client:root:todos(first:10)': { + __id: 'client:root:todos(first:10)', + __resolverError: null, + __resolverLiveStateDirty: false, + __resolverLiveStateSubscription: expect.anything(), + __resolverLiveStateValue: { + read: expect.anything(), + subscribe: expect.anything(), }, - 'client:root:todos(first:10)': { - __id: 'client:root:todos(first:10)', - __resolverError: null, - __resolverLiveStateDirty: false, - __resolverLiveStateSubscription: expect.anything(), - __resolverLiveStateValue: { - read: expect.anything(), - subscribe: expect.anything(), - }, - __resolverOutputTypeRecordIDs: new Set([ - 'client:TodoConnection:client:root:todos(first:10)', - 'client:TodoConnection:client:root:todos(first:10):edges:0', - 'client:TodoConnection:client:root:todos(first:10):edges:0:node', - 'client:TodoConnection:client:root:todos(first:10):pageInfo', - ]), - __resolverSnapshot: undefined, - __resolverValue: 'client:TodoConnection:client:root:todos(first:10)', - __typename: '__RELAY_RESOLVER__', + __resolverOutputTypeRecordIDs: new Set([ + 'client:TodoConnection:client:root:todos(first:10)', + 'client:TodoConnection:client:root:todos(first:10):edges:0', + 'client:TodoConnection:client:root:todos(first:10):edges:0:node', + 'client:TodoConnection:client:root:todos(first:10):pageInfo', + ]), + __resolverSnapshot: undefined, + __resolverValue: 'client:TodoConnection:client:root:todos(first:10)', + __typename: '__RELAY_RESOLVER__', + }, + 'client:TodoConnection:client:root:todos(first:10)': { + __id: 'client:TodoConnection:client:root:todos(first:10)', + __typename: 'TodoConnection', + count: 1, + edges: { + __refs: ['client:TodoConnection:client:root:todos(first:10):edges:0'], }, - 'client:TodoConnection:client:root:todos(first:10)': { - __id: 'client:TodoConnection:client:root:todos(first:10)', - __typename: 'TodoConnection', - count: 1, - edges: { - __refs: ['client:TodoConnection:client:root:todos(first:10):edges:0'], - }, - pageInfo: { - __ref: 'client:TodoConnection:client:root:todos(first:10):pageInfo', - }, + pageInfo: { + __ref: 'client:TodoConnection:client:root:todos(first:10):pageInfo', }, - 'client:TodoConnection:client:root:todos(first:10):edges:0': { - __id: 'client:TodoConnection:client:root:todos(first:10):edges:0', - __typename: 'TodoEdge', - cursor: null, - node: { - __ref: - 'client:TodoConnection:client:root:todos(first:10):edges:0:node', - }, + }, + 'client:TodoConnection:client:root:todos(first:10):edges:0': { + __id: 'client:TodoConnection:client:root:todos(first:10):edges:0', + __typename: 'TodoEdge', + cursor: null, + node: { + __ref: 'client:TodoConnection:client:root:todos(first:10):edges:0:node', }, - 'client:TodoConnection:client:root:todos(first:10):edges:0:node': { - __id: 'client:TodoConnection:client:root:todos(first:10):edges:0:node', - __typename: 'Todo', - complete: { - __ref: - 'client:TodoConnection:client:root:todos(first:10):edges:0:node:complete', - }, - self: { - __ref: - 'client:TodoConnection:client:root:todos(first:10):edges:0:node:self', - }, - text: { - __ref: - 'client:TodoConnection:client:root:todos(first:10):edges:0:node:text', - }, - todo_id: 'todo-1', + }, + 'client:TodoConnection:client:root:todos(first:10):edges:0:node': { + __id: 'client:TodoConnection:client:root:todos(first:10):edges:0:node', + __typename: 'Todo', + complete: { + __ref: + 'client:TodoConnection:client:root:todos(first:10):edges:0:node:complete', }, - 'client:TodoConnection:client:root:todos(first:10):pageInfo': { - __id: 'client:TodoConnection:client:root:todos(first:10):pageInfo', - __typename: 'TodoConnectionPageInfo', - endCursor: null, - hasNextPage: false, - hasPreviousPage: false, - startCursor: null, + self: { + __ref: + 'client:TodoConnection:client:root:todos(first:10):edges:0:node:self', }, - }); - - expect(() => { - renderer.update( - - - , - ); - }).not.toThrow(); + text: { + __ref: + 'client:TodoConnection:client:root:todos(first:10):edges:0:node:text', + }, + todo_id: 'todo-1', + }, + 'client:TodoConnection:client:root:todos(first:10):pageInfo': { + __id: 'client:TodoConnection:client:root:todos(first:10):pageInfo', + __typename: 'TodoConnectionPageInfo', + endCursor: null, + hasNextPage: false, + hasPreviousPage: false, + startCursor: null, + }, }); - test('render with recursive resolvers (with blocked_by)', () => { - addTodo('My first todo'); - addTodo('My second todo'); - addTodo('My 3rd todo'); - - const renderer = TestRenderer.create( - + expect(() => { + renderer.update( + - , ); + }).not.toThrow(); +}); + +test('render with recursive resolvers (with blocked_by)', () => { + addTodo('My first todo'); + addTodo('My second todo'); + addTodo('My 3rd todo'); + + const renderer = TestRenderer.create( + + + + , + ); - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is not completed', - 'style: bold', - 'color: color is red', - 'My second todo', - 'is not completed', - 'style: bold', - 'color: color is red', - 'My 3rd todo', - 'is not completed', - 'style: bold', - 'color: color is red', - ]); - - TestRenderer.act(() => { - blockedBy('todo-1', 'todo-2'); - jest.runAllImmediates(); - }); - - expect(renderer.toJSON()).toEqual([ - 'My first todo', - 'is not completed', - 'style: bold', - 'color: color is red', - 'My second todo', - 'is not completed', - 'style: bold', - 'color: color is red', - 'My 3rd todo', - 'is not completed', - 'style: bold', - 'color: color is red', - 'My second todo', - 'is not completed', - 'style: bold', - 'color: color is red', - ]); + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is not completed', + 'style: bold', + 'color: color is red', + 'My second todo', + 'is not completed', + 'style: bold', + 'color: color is red', + 'My 3rd todo', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); + + TestRenderer.act(() => { + blockedBy('todo-1', 'todo-2'); + jest.runAllImmediates(); }); - test('rendering lists with nulls', () => { - addTodo('Todo 1'); - addTodo('Todo 2'); - addTodo('Todo 3'); + expect(renderer.toJSON()).toEqual([ + 'My first todo', + 'is not completed', + 'style: bold', + 'color: color is red', + 'My second todo', + 'is not completed', + 'style: bold', + 'color: color is red', + 'My 3rd todo', + 'is not completed', + 'style: bold', + 'color: color is red', + 'My second todo', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); +}); - const renderer = TestRenderer.create( - - - , - ); +// TODO: T184433715 We currently break with the GraphQL spec here and filter out null values. +test('rendering lists with nulls', () => { + addTodo('Todo 1'); + addTodo('Todo 2'); + addTodo('Todo 3'); - expect(renderer.toJSON()).toEqual([ - 'Todo 1', - 'is not completed', - 'style: bold', - 'color: color is red', - 'Todo 2', - 'is not completed', - 'style: bold', - 'color: color is red', - ]); + const renderer = TestRenderer.create( + + + , + ); - renderer.update( - - - , - ); - expect(renderer.toJSON()).toEqual([ - 'Todo 1', - 'is not completed', - 'style: bold', - 'color: color is red', - 'Todo 2', - 'is not completed', - 'style: bold', - 'color: color is red', - 'Todo 3', - 'is not completed', - 'style: bold', - 'color: color is red', - ]); + expect(renderer.toJSON()).toEqual([ + 'Todo 1', + 'is not completed', + 'style: bold', + 'color: color is red', + 'Todo 2', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); + + renderer.update( + + + , + ); + expect(renderer.toJSON()).toEqual([ + 'Todo 1', + 'is not completed', + 'style: bold', + 'color: color is red', + 'Todo 2', + 'is not completed', + 'style: bold', + 'color: color is red', + 'Todo 3', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); +}); + +test('rendering live list', () => { + addTodo('Todo 1'); + addTodo('Todo 2'); + addTodo('Todo 3'); + + const renderer = TestRenderer.create( + + + , + ); + + expect(renderer.toJSON()).toEqual([ + 'Todo 1', + 'is not completed', + 'style: bold', + 'color: color is red', + 'Todo 2', + 'is not completed', + 'style: bold', + 'color: color is red', + 'Todo 3', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); + + TestRenderer.act(() => { + removeTodo('todo-1'); + removeTodo('todo-2'); + jest.runAllImmediates(); }); + + expect(renderer.toJSON()).toEqual([ + 'Todo 3', + 'is not completed', + 'style: bold', + 'color: color is red', + ]); }); diff --git a/packages/react-relay/__tests__/__generated__/ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist.graphql.js b/packages/react-relay/__tests__/__generated__/ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist.graphql.js new file mode 100644 index 0000000000000..b042b1e398240 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist.graphql.js @@ -0,0 +1,129 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<9151cadaf4abeb5c370580eca29aa399>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$fragmentType } from "./RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist.graphql"; +export type ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$variables = {| + id: string, +|}; +export type ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$data = {| + +node: ?{| + +$fragmentSpreads: RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$fragmentType, + |}, +|}; +export type ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist = {| + response: ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$data, + variables: ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } +]; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "3b1b145feebb57a5f456912d1a263861", + "id": null, + "metadata": {}, + "name": "ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist", + "operationKind": "query", + "text": "query ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ...RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist\n id\n }\n}\n\nfragment RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist on Comment {\n id\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "6e6fc82ab5969e84d7c748516d16686b"; +} + +module.exports = ((node/*: any*/)/*: Query< + ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$variables, + ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist.graphql.js b/packages/react-relay/__tests__/__generated__/ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist.graphql.js new file mode 100644 index 0000000000000..bb0b4fa8e6d18 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist.graphql.js @@ -0,0 +1,143 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<95edd3dfa81abb8b0e4eb82a16139e95>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$fragmentType } from "./RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist.graphql"; +export type ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$variables = {| + id: string, +|}; +export type ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$data = {| + +node: ?{| + +$fragmentSpreads: RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$fragmentType, + |}, +|}; +export type ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist = {| + response: ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$data, + variables: ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } +]; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "Comment", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "661ce80f22fca05217a32fb3b9639774", + "id": null, + "metadata": {}, + "name": "ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist", + "operationKind": "query", + "text": "query ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ...RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist\n id\n }\n}\n\nfragment RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist on Comment {\n name\n id\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "3990dc068bf228226a21832b04bbd39a"; +} + +module.exports = ((node/*: any*/)/*: Query< + ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$variables, + ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/ClientEdgesTest4Query.graphql.js b/packages/react-relay/__tests__/__generated__/ClientEdgesTest4Query.graphql.js index 1e5dd9f2b4631..1e68d1401a156 100644 --- a/packages/react-relay/__tests__/__generated__/ClientEdgesTest4Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/ClientEdgesTest4Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<8b4d0c7a6aa69c38d1679dea5b1b1582>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,19 +18,17 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { DataID } from "relay-runtime"; import {client_object as userClientObjectResolverType} from "../../../relay-runtime/store/__tests__/resolvers/UserClientEdgeClientObjectResolver.js"; // Type assertion validating that `userClientObjectResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (userClientObjectResolverType: ( args: {| - id: string, + return_null: boolean, |}, -) => ?{| - +id: DataID, -|}); +) => ?User__client_object$normalization); +import type { User__client_object$normalization } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/User__client_object$normalization.graphql"; export type ClientEdgesTest4Query$variables = {| - id: string, + return_null: boolean, |}; export type ClientEdgesTest4Query$data = {| +me: ?{| @@ -50,14 +48,14 @@ var v0 = [ { "defaultValue": null, "kind": "LocalArgument", - "name": "id" + "name": "return_null" } ], v1 = [ { "kind": "Variable", - "name": "id", - "variableName": "id" + "name": "return_null", + "variableName": "return_null" } ], v2 = { @@ -100,6 +98,7 @@ return { "field": { "kind": "ClientEdgeToClientObject", "concreteType": "ClientObject", + "modelResolvers": null, "backingField": { "alias": null, "args": (v1/*: any*/), @@ -107,7 +106,13 @@ return { "kind": "RelayResolver", "name": "client_object", "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/UserClientEdgeClientObjectResolver').client_object, - "path": "me.client_object" + "path": "me.client_object", + "normalizationInfo": { + "kind": "OutputType", + "concreteType": "ClientObject", + "plural": false, + "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/User__client_object$normalization.graphql') + } }, "linkedField": (v2/*: any*/) }, @@ -143,7 +148,7 @@ return { "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, "linkedField": (v2/*: any*/) }, @@ -171,7 +176,7 @@ return { })(); if (__DEV__) { - (node/*: any*/).hash = "acb202133f9bd8681e05bbbba9508ae6"; + (node/*: any*/).hash = "0d0d4be86097cc63c772eac5a3a43409"; } module.exports = ((node/*: any*/)/*: Query< diff --git a/packages/react-relay/__tests__/__generated__/ClientOnlyQueriesTest2Query.graphql.js b/packages/react-relay/__tests__/__generated__/ClientOnlyQueriesTest2Query.graphql.js index b1bb2bc8701fc..13551588e898c 100644 --- a/packages/react-relay/__tests__/__generated__/ClientOnlyQueriesTest2Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/ClientOnlyQueriesTest2Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -25,10 +25,10 @@ import {hello as queryHelloResolverType} from "../../../relay-runtime/store/__te args: {| world: string, |}, -) => mixed); +) => ?string); export type ClientOnlyQueriesTest2Query$variables = {||}; export type ClientOnlyQueriesTest2Query$data = {| - +hello: ?$Call<((...empty[]) => R) => R, typeof queryHelloResolverType>, + +hello: ?string, |}; export type ClientOnlyQueriesTest2Query = {| response: ClientOnlyQueriesTest2Query$data, @@ -84,7 +84,7 @@ return { "fragment": null, "kind": "RelayResolver", "storageKey": "hello(world:\"World\")", - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/react-relay/__tests__/__generated__/ErrorModel____relay_model_instance.graphql.js b/packages/react-relay/__tests__/__generated__/ErrorModel____relay_model_instance.graphql.js new file mode 100644 index 0000000000000..e82eb3eb475b2 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/ErrorModel____relay_model_instance.graphql.js @@ -0,0 +1,68 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { ErrorModel__id$data } from "./ErrorModel__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {ErrorModel as errorModelRelayModelInstanceResolverType} from "../RelayResolverNullableModelClientEdge-test.js"; +// Type assertion validating that `errorModelRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(errorModelRelayModelInstanceResolverType: ( + id: ErrorModel__id$data['id'], +) => mixed); +declare export opaque type ErrorModel____relay_model_instance$fragmentType: FragmentType; +export type ErrorModel____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: ErrorModel____relay_model_instance$fragmentType, +|}; +export type ErrorModel____relay_model_instance$key = { + +$data?: ErrorModel____relay_model_instance$data, + +$fragmentSpreads: ErrorModel____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "ErrorModel____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "ErrorModel__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./ErrorModel__id.graphql'), require('./../RelayResolverNullableModelClientEdge-test').ErrorModel, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "ErrorModel", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + ErrorModel____relay_model_instance$fragmentType, + ErrorModel____relay_model_instance$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/ErrorModel__id.graphql.js b/packages/react-relay/__tests__/__generated__/ErrorModel__id.graphql.js new file mode 100644 index 0000000000000..df0a4a6041649 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/ErrorModel__id.graphql.js @@ -0,0 +1,60 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type ErrorModel__id$fragmentType: FragmentType; +export type ErrorModel__id$data = {| + +id: string, + +$fragmentType: ErrorModel__id$fragmentType, +|}; +export type ErrorModel__id$key = { + +$data?: ErrorModel__id$data, + +$fragmentSpreads: ErrorModel__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "ErrorModel__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "ErrorModel", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + ErrorModel__id$fragmentType, + ErrorModel__id$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/ExampleWithOutputTypeTestQuery.graphql.js b/packages/react-relay/__tests__/__generated__/ExampleWithOutputTypeTestQuery.graphql.js index 087ace1e92d97..d9f46d44216b3 100644 --- a/packages/react-relay/__tests__/__generated__/ExampleWithOutputTypeTestQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/ExampleWithOutputTypeTestQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<03326d860aa33d678191fbe7962934db>> + * @generated SignedSource<<60733524045b7557f96d427d5ba67f87>> * @flow * @lightSyntaxTransform * @nogrep @@ -66,6 +66,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "ClientObject", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -75,6 +76,7 @@ return { "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/ExampleClientObjectResolver').example_client_object, "path": "example_client_object", "normalizationInfo": { + "kind": "OutputType", "concreteType": "ClientObject", "plural": false, "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__example_client_object$normalization.graphql') diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest10Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest10Query.graphql.js index 9ff0e5c2cb47b..7161b47f63727 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest10Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest10Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<09e146b6eb189c5878eb9a254887b032>> + * @generated SignedSource<<8e876ed09b510787ed37e67348d41e68>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,17 +18,17 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { LiveCounterResolver$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/LiveCounterResolver.graphql"; import {counter as queryCounterResolverType} from "../../../relay-runtime/store/__tests__/resolvers/LiveCounterResolver.js"; // Type assertion validating that `queryCounterResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryCounterResolverType: ( rootKey: LiveCounterResolver$key, -) => LiveState); +) => LiveState); export type LiveResolversTest10Query$variables = {||}; export type LiveResolversTest10Query$data = {| - +counter: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterResolverType>["read"]>, + +counter: ?number, +me: ?{| +name: ?string, |}, @@ -145,7 +145,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest11Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest11Query.graphql.js index 8fe7ee105c557..f000f744fc078 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest11Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest11Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<43bbde08b0ba2e413ba6fa33df5a2b27>> + * @generated SignedSource<<2cffd1a80dede9d71d063af70dcaa2a8>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,14 +18,14 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import {counter_no_fragment as queryCounterNoFragmentResolverType} from "../../../relay-runtime/store/__tests__/resolvers/LiveCounterNoFragment.js"; // Type assertion validating that `queryCounterNoFragmentResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. -(queryCounterNoFragmentResolverType: () => LiveState); +(queryCounterNoFragmentResolverType: () => LiveState); export type LiveResolversTest11Query$variables = {||}; export type LiveResolversTest11Query$data = {| - +counter_no_fragment: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterNoFragmentResolverType>["read"]>, + +counter_no_fragment: ?number, |}; export type LiveResolversTest11Query = {| response: LiveResolversTest11Query$data, @@ -73,7 +73,7 @@ var node/*: ClientRequest*/ = { "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest12Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest12Query.graphql.js index 8c6e8a38a00a5..5d8ed7e09821b 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest12Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest12Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,7 +18,7 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import {counter_no_fragment_with_arg as queryCounterNoFragmentWithArgResolverType} from "../../../relay-runtime/store/__tests__/resolvers/LiveCounterNoFragmentWithArg.js"; // Type assertion validating that `queryCounterNoFragmentWithArgResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. @@ -26,12 +26,12 @@ import {counter_no_fragment_with_arg as queryCounterNoFragmentWithArgResolverTyp args: {| prefix: string, |}, -) => LiveState); +) => LiveState); export type LiveResolversTest12Query$variables = {| prefix: string, |}; export type LiveResolversTest12Query$data = {| - +counter_no_fragment_with_arg: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterNoFragmentWithArgResolverType>["read"]>, + +counter_no_fragment_with_arg: ?string, |}; export type LiveResolversTest12Query = {| response: LiveResolversTest12Query$data, @@ -94,7 +94,7 @@ return { "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest13Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest13Query.graphql.js index d5c05876d0ac8..32d16b50cb43a 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest13Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest13Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<1943cb4f897a7adfb802b75e83840da5>> + * @generated SignedSource<<1479afc61623ff39f26c6b2ebc95dd0d>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,8 +18,7 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { DataID } from "relay-runtime"; +import type { LiveState, DataID } from "relay-runtime"; import {live_constant_client_edge as queryLiveConstantClientEdgeResolverType} from "../../../relay-runtime/store/__tests__/resolvers/LiveConstantClientEdgeResolver.js"; // Type assertion validating that `queryLiveConstantClientEdgeResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest14Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest14Query.graphql.js index d818de93e12a6..61abace846eec 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest14Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest14Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<664be45a433955c4ca45dc99535fc6a6>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,14 +18,14 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import {counter_no_fragment as queryCounterNoFragmentResolverType} from "../../../relay-runtime/store/__tests__/resolvers/LiveCounterNoFragment.js"; // Type assertion validating that `queryCounterNoFragmentResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. -(queryCounterNoFragmentResolverType: () => LiveState); +(queryCounterNoFragmentResolverType: () => LiveState); export type LiveResolversTest14Query$variables = {||}; export type LiveResolversTest14Query$data = {| - +counter_no_fragment: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterNoFragmentResolverType>["read"]>, + +counter_no_fragment: ?number, +me: ?{| +__typename: "User", |}, @@ -115,7 +115,7 @@ return { "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest15Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest15Query.graphql.js index 248c86b93ee5c..22a521258f196 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest15Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest15Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,8 +18,7 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { DataID } from "relay-runtime"; +import type { LiveState, DataID } from "relay-runtime"; import {live_user_resolver_always_suspend as queryLiveUserResolverAlwaysSuspendResolverType} from "../../../relay-runtime/store/__tests__/resolvers/LiveUserAlwaysSuspendResolver.js"; // Type assertion validating that `queryLiveUserResolverAlwaysSuspendResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest16Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest16Query.graphql.js index ab1528dce6589..fa392cdcb4e59 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest16Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest16Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<30278339f9062038e770e9c949c0b72b>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,14 +18,14 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import {live_resolver_with_bad_return_value as queryLiveResolverWithBadReturnValueResolverType} from "../../../relay-runtime/store/__tests__/resolvers/QueryLiveResolverWithBadReturnValue.js"; // Type assertion validating that `queryLiveResolverWithBadReturnValueResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. -(queryLiveResolverWithBadReturnValueResolverType: () => LiveState); +(queryLiveResolverWithBadReturnValueResolverType: () => LiveState); export type LiveResolversTest16Query$variables = {||}; export type LiveResolversTest16Query$data = {| - +live_resolver_with_bad_return_value: ?$Call<$Call<((...empty[]) => R) => R, typeof queryLiveResolverWithBadReturnValueResolverType>["read"]>, + +live_resolver_with_bad_return_value: ?string, |}; export type LiveResolversTest16Query = {| response: LiveResolversTest16Query$data, @@ -73,7 +73,7 @@ var node/*: ClientRequest*/ = { "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest17Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest17Query.graphql.js index 44081a05abad5..9d98a4d1761e6 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest17Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest17Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<4423dde7a22f9042be77f264d49bd26f>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -21,10 +21,10 @@ import type { ClientRequest, ClientQuery } from 'relay-runtime'; import {non_live_resolver_with_live_return_value as queryNonLiveResolverWithLiveReturnValueResolverType} from "../../../relay-runtime/store/__tests__/resolvers/QueryNonLiveResolverWithLiveReturnValue.js"; // Type assertion validating that `queryNonLiveResolverWithLiveReturnValueResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. -(queryNonLiveResolverWithLiveReturnValueResolverType: () => mixed); +(queryNonLiveResolverWithLiveReturnValueResolverType: () => ?string); export type LiveResolversTest17Query$variables = {||}; export type LiveResolversTest17Query$data = {| - +non_live_resolver_with_live_return_value: ?$Call<((...empty[]) => R) => R, typeof queryNonLiveResolverWithLiveReturnValueResolverType>, + +non_live_resolver_with_live_return_value: ?string, |}; export type LiveResolversTest17Query = {| response: LiveResolversTest17Query$data, @@ -72,7 +72,7 @@ var node/*: ClientRequest*/ = { "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest18Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest18Query.graphql.js index 7b14f3966c9a6..45292aa1353a6 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest18Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest18Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<1f6387b6c380c0346d62c5e930c07fc7>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,14 +18,14 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import {live_resolver_throws as queryLiveResolverThrowsResolverType} from "../../../relay-runtime/store/__tests__/resolvers/QueryLiveResolverThrows.js"; // Type assertion validating that `queryLiveResolverThrowsResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. -(queryLiveResolverThrowsResolverType: () => LiveState); +(queryLiveResolverThrowsResolverType: () => LiveState); export type LiveResolversTest18Query$variables = {||}; export type LiveResolversTest18Query$data = {| - +live_resolver_throws: ?$Call<$Call<((...empty[]) => R) => R, typeof queryLiveResolverThrowsResolverType>["read"]>, + +live_resolver_throws: ?ReturnType["read"]>, |}; export type LiveResolversTest18Query = {| response: LiveResolversTest18Query$data, @@ -73,7 +73,7 @@ var node/*: ClientRequest*/ = { "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest19Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest19Query.graphql.js index 52f00f07975a4..5a7cd8654c8e0 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest19Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest19Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<3f1ab95503021f37760ce3e491b69877>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,14 +18,14 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import {live_resolver_return_undefined as queryLiveResolverReturnUndefinedResolverType} from "../../../relay-runtime/store/__tests__/resolvers/QueryLiveResolverReturnsUndefined.js"; // Type assertion validating that `queryLiveResolverReturnUndefinedResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. -(queryLiveResolverReturnUndefinedResolverType: () => LiveState); +(queryLiveResolverReturnUndefinedResolverType: () => LiveState); export type LiveResolversTest19Query$variables = {||}; export type LiveResolversTest19Query$data = {| - +live_resolver_return_undefined: ?$Call<$Call<((...empty[]) => R) => R, typeof queryLiveResolverReturnUndefinedResolverType>["read"]>, + +live_resolver_return_undefined: ?ReturnType["read"]>, |}; export type LiveResolversTest19Query = {| response: LiveResolversTest19Query$data, @@ -73,7 +73,7 @@ var node/*: ClientRequest*/ = { "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest1Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest1Query.graphql.js index c2adb16e26e2a..98373b1719320 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest1Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest1Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<9672a014e484fff8696f9f4665f57524>> + * @generated SignedSource<<458292d69f28e72ba6081f953d31393b>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,17 +18,17 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { LiveCounterResolver$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/LiveCounterResolver.graphql"; import {counter as queryCounterResolverType} from "../../../relay-runtime/store/__tests__/resolvers/LiveCounterResolver.js"; // Type assertion validating that `queryCounterResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryCounterResolverType: ( rootKey: LiveCounterResolver$key, -) => LiveState); +) => LiveState); export type LiveResolversTest1Query$variables = {||}; export type LiveResolversTest1Query$data = {| - +counter: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterResolverType>["read"]>, + +counter: ?number, |}; export type LiveResolversTest1Query = {| response: LiveResolversTest1Query$data, @@ -108,7 +108,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest2Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest2Query.graphql.js index a92395d5070f3..1b6d3a4bc78de 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest2Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest2Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<435445042a64bbf8d7c260445e31477d>> + * @generated SignedSource<<4bd1d847ded8134986f5fde3a9ef8428>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,17 +18,17 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { LiveCounterResolver$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/LiveCounterResolver.graphql"; import {counter as queryCounterResolverType} from "../../../relay-runtime/store/__tests__/resolvers/LiveCounterResolver.js"; // Type assertion validating that `queryCounterResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryCounterResolverType: ( rootKey: LiveCounterResolver$key, -) => LiveState); +) => LiveState); export type LiveResolversTest2Query$variables = {||}; export type LiveResolversTest2Query$data = {| - +counter: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterResolverType>["read"]>, + +counter: ?number, |}; export type LiveResolversTest2Query = {| response: LiveResolversTest2Query$data, @@ -108,7 +108,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest3Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest3Query.graphql.js index 99e3aca1d207b..b15503c69a357 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest3Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest3Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<3b8a1c48f183b9db0808ac916027a762>> + * @generated SignedSource<<50c1ac6d807f995864839d14f9e7601b>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,10 +24,10 @@ import {counter_plus_one as queryCounterPlusOneResolverType} from "../../../rela // A type error here indicates that the type signature of the resolver module is incorrect. (queryCounterPlusOneResolverType: ( rootKey: CounterPlusOneResolver$key, -) => mixed); +) => ?number); export type LiveResolversTest3Query$variables = {||}; export type LiveResolversTest3Query$data = {| - +counter_plus_one: ?$Call<((...empty[]) => R) => R, typeof queryCounterPlusOneResolverType>, + +counter_plus_one: ?number, |}; export type LiveResolversTest3Query = {| response: LiveResolversTest3Query$data, @@ -113,7 +113,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "Query", @@ -121,7 +121,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest4Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest4Query.graphql.js index 211b9e8e26ade..6cf3e0bfb2591 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest4Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest4Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<68b5ecdb85b95668183936789164933a>> + * @generated SignedSource<<481692e24a62f0976368a66f7dfa7770>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,17 +18,17 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { LivePingPongResolver$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/LivePingPongResolver.graphql"; import {ping as queryPingResolverType} from "../../../relay-runtime/store/__tests__/resolvers/LivePingPongResolver.js"; // Type assertion validating that `queryPingResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryPingResolverType: ( rootKey: LivePingPongResolver$key, -) => LiveState); +) => LiveState); export type LiveResolversTest4Query$variables = {||}; export type LiveResolversTest4Query$data = {| - +ping: ?$Call<$Call<((...empty[]) => R) => R, typeof queryPingResolverType>["read"]>, + +ping: ?string, |}; export type LiveResolversTest4Query = {| response: LiveResolversTest4Query$data, @@ -108,7 +108,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest5Fragment.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest5Fragment.graphql.js index e37794e6e889e..d038a8d614d66 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest5Fragment.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest5Fragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<4b834dd16f59d8b359cb004472f35e6f>> + * @generated SignedSource<<13cf98ec079b3f82c005c0ae50a8b326>> * @flow * @lightSyntaxTransform * @nogrep @@ -19,17 +19,16 @@ /*:: import type { Fragment, ReaderFragment } from 'relay-runtime'; import type { CounterSuspendsWhenOdd$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/CounterSuspendsWhenOdd.graphql"; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { FragmentType } from "relay-runtime"; +import type { LiveState, FragmentType } from "relay-runtime"; import {counter_suspends_when_odd as queryCounterSuspendsWhenOddResolverType} from "../../../relay-runtime/store/__tests__/resolvers/CounterSuspendsWhenOdd.js"; // Type assertion validating that `queryCounterSuspendsWhenOddResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryCounterSuspendsWhenOddResolverType: ( rootKey: CounterSuspendsWhenOdd$key, -) => LiveState); +) => LiveState); declare export opaque type LiveResolversTest5Fragment$fragmentType: FragmentType; export type LiveResolversTest5Fragment$data = {| - +counter_suspends_when_odd: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterSuspendsWhenOddResolverType>["read"]>, + +counter_suspends_when_odd: ?number, +$fragmentType: LiveResolversTest5Fragment$fragmentType, |}; export type LiveResolversTest5Fragment$key = { diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest5Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest5Query.graphql.js index 421d5b8e2fb06..f615e35b39a1b 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest5Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest5Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<555657342c11fea2591931ac5632f137>> * @flow * @lightSyntaxTransform * @nogrep @@ -93,7 +93,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest6Fragment.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest6Fragment.graphql.js index f7439c73c3e63..045956330662e 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest6Fragment.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest6Fragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<78640a08149f7e97835048ff5ea2e664>> + * @generated SignedSource<<96215027532c2a33fd53ac1f1cb27f9a>> * @flow * @lightSyntaxTransform * @nogrep @@ -25,10 +25,10 @@ import {user_name_and_counter_suspends_when_odd as queryUserNameAndCounterSuspen // A type error here indicates that the type signature of the resolver module is incorrect. (queryUserNameAndCounterSuspendsWhenOddResolverType: ( rootKey: UserNameAndCounterSuspendsWhenOdd$key, -) => mixed); +) => ?string); declare export opaque type LiveResolversTest6Fragment$fragmentType: FragmentType; export type LiveResolversTest6Fragment$data = {| - +user_name_and_counter_suspends_when_odd: ?$Call<((...empty[]) => R) => R, typeof queryUserNameAndCounterSuspendsWhenOddResolverType>, + +user_name_and_counter_suspends_when_odd: ?string, +$fragmentType: LiveResolversTest6Fragment$fragmentType, |}; export type LiveResolversTest6Fragment$key = { diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest6Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest6Query.graphql.js index 4dac1b3174be5..d89daa26018ff 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest6Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest6Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<328545432bb4a27ae857c424abf680e5>> + * @generated SignedSource<<9dcf6c9ce34df52a5fbef93ae2d23fc7>> * @flow * @lightSyntaxTransform * @nogrep @@ -120,7 +120,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "Query", @@ -128,7 +128,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest7Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest7Query.graphql.js index 7851184e3e89a..d85c32377a477 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest7Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest7Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<516dc929f97fad9c780c5541582feb7a>> + * @generated SignedSource<<4e3e2c65c514f6b248c2734bdf354de6>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,7 +18,7 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { UserProfilePictureUriSuspendsWhenTheCounterIsOdd$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/UserProfilePictureUriSuspendsWhenTheCounterIsOdd.graphql"; import {user_profile_picture_uri_suspends_when_the_counter_is_odd as userUserProfilePictureUriSuspendsWhenTheCounterIsOddResolverType} from "../../../relay-runtime/store/__tests__/resolvers/UserProfilePictureUriSuspendsWhenTheCounterIsOdd.js"; // Type assertion validating that `userUserProfilePictureUriSuspendsWhenTheCounterIsOddResolverType` resolver is correctly implemented. @@ -28,7 +28,7 @@ import {user_profile_picture_uri_suspends_when_the_counter_is_odd as userUserPro args: {| scale: ?number, |}, -) => LiveState); +) => LiveState); export type LiveResolversTest7Query$variables = {| id: string, scale: number, @@ -36,7 +36,7 @@ export type LiveResolversTest7Query$variables = {| export type LiveResolversTest7Query$data = {| +node: ?{| +name?: ?string, - +user_profile_picture_uri_suspends_when_the_counter_is_odd?: ?$Call<$Call<((...empty[]) => R) => R, typeof userUserProfilePictureUriSuspendsWhenTheCounterIsOddResolverType>["read"]>, + +user_profile_picture_uri_suspends_when_the_counter_is_odd?: ?string, |}, |}; export type LiveResolversTest7Query = {| @@ -100,7 +100,7 @@ return { (v2/*: any*/), { "alias": null, - "args": null, + "args": [], "fragment": { "args": (v3/*: any*/), "kind": "FragmentSpread", @@ -166,7 +166,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "name": "user_profile_picture_uri_with_scale", @@ -198,7 +198,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "User", @@ -206,7 +206,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "User", diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest8Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest8Query.graphql.js index 93b45e73ad0fc..be9e9bdc6760b 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest8Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest8Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<67a36241918af609fd8c418891066e31>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,21 +18,21 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { ResolverThatThrows$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/ResolverThatThrows.graphql"; import {resolver_that_throws as userResolverThatThrowsResolverType} from "../../../relay-runtime/store/__tests__/resolvers/ResolverThatThrows.js"; // Type assertion validating that `userResolverThatThrowsResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (userResolverThatThrowsResolverType: ( rootKey: ResolverThatThrows$key, -) => LiveState); +) => LiveState); export type LiveResolversTest8Query$variables = {| id: string, |}; export type LiveResolversTest8Query$data = {| +node: ?{| +name?: ?string, - +resolver_that_throws?: ?$Call<$Call<((...empty[]) => R) => R, typeof userResolverThatThrowsResolverType>["read"]>, + +resolver_that_throws?: ?ReturnType["read"]>, |}, |}; export type LiveResolversTest8Query = {| @@ -150,7 +150,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "User", diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTest9Query.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTest9Query.graphql.js index 8f42ff0990dab..2ae63248ff406 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTest9Query.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTest9Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<8ae5e76947e74633ba85751b4400e334>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,7 +18,7 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { UserProfilePictureUriSuspendsWhenTheCounterIsOdd$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/UserProfilePictureUriSuspendsWhenTheCounterIsOdd.graphql"; import {user_profile_picture_uri_suspends_when_the_counter_is_odd as userUserProfilePictureUriSuspendsWhenTheCounterIsOddResolverType} from "../../../relay-runtime/store/__tests__/resolvers/UserProfilePictureUriSuspendsWhenTheCounterIsOdd.js"; // Type assertion validating that `userUserProfilePictureUriSuspendsWhenTheCounterIsOddResolverType` resolver is correctly implemented. @@ -28,14 +28,14 @@ import {user_profile_picture_uri_suspends_when_the_counter_is_odd as userUserPro args: {| scale: ?number, |}, -) => LiveState); +) => LiveState); export type LiveResolversTest9Query$variables = {| id: string, scale: number, |}; export type LiveResolversTest9Query$data = {| +node: ?{| - +profile_picture_uri?: ?$Call<$Call<((...empty[]) => R) => R, typeof userUserProfilePictureUriSuspendsWhenTheCounterIsOddResolverType>["read"]>, + +profile_picture_uri?: ?string, |}, |}; export type LiveResolversTest9Query = {| @@ -91,7 +91,7 @@ return { "selections": [ { "alias": "profile_picture_uri", - "args": null, + "args": [], "fragment": { "args": (v2/*: any*/), "kind": "FragmentSpread", @@ -162,7 +162,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "name": "user_profile_picture_uri_with_scale", @@ -194,7 +194,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "User", @@ -202,7 +202,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "User", diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTestCounterUserFragment.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTestCounterUserFragment.graphql.js index f8f1281f0ba97..45f388e2c4b5d 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTestCounterUserFragment.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTestCounterUserFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<6cd42d0b858656e3aa500b1570f46aaf>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,15 +18,14 @@ /*:: import type { Fragment, ReaderFragment } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { FragmentType } from "relay-runtime"; +import type { LiveState, FragmentType } from "relay-runtime"; import {counter_suspends_when_odd as userCounterSuspendsWhenOddResolverType} from "../../../relay-runtime/store/__tests__/resolvers/CounterSuspendsWhenOddOnUser.js"; // Type assertion validating that `userCounterSuspendsWhenOddResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. -(userCounterSuspendsWhenOddResolverType: () => LiveState); +(userCounterSuspendsWhenOddResolverType: () => LiveState); declare export opaque type LiveResolversTestCounterUserFragment$fragmentType: FragmentType; export type LiveResolversTestCounterUserFragment$data = {| - +counter_suspends_when_odd: ?$Call<$Call<((...empty[]) => R) => R, typeof userCounterSuspendsWhenOddResolverType>["read"]>, + +counter_suspends_when_odd: ?number, +$fragmentType: LiveResolversTestCounterUserFragment$fragmentType, |}; export type LiveResolversTestCounterUserFragment$key = { diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTestLiveResolverSuspenseQuery.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTestLiveResolverSuspenseQuery.graphql.js index 73fb97d42e523..effbd0722a5a9 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTestLiveResolverSuspenseQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTestLiveResolverSuspenseQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<9c526503810e8356431d34d4bb60779a>> * @flow * @lightSyntaxTransform * @nogrep @@ -108,7 +108,7 @@ return { "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTestNestedQuery.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTestNestedQuery.graphql.js index a2ec429067389..00d1f36f371a7 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTestNestedQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTestNestedQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<6702d893d5893fde158d0a6977a74203>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,7 +18,7 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { InnerResolver$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/InnerResolver.graphql"; import type { OuterResolver$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/OuterResolver.graphql"; import {inner as queryInnerResolverType} from "../../../relay-runtime/store/__tests__/resolvers/InnerResolver.js"; @@ -26,17 +26,17 @@ import {inner as queryInnerResolverType} from "../../../relay-runtime/store/__te // A type error here indicates that the type signature of the resolver module is incorrect. (queryInnerResolverType: ( rootKey: InnerResolver$key, -) => LiveState); +) => LiveState); import {outer as queryOuterResolverType} from "../../../relay-runtime/store/__tests__/resolvers/OuterResolver.js"; // Type assertion validating that `queryOuterResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryOuterResolverType: ( rootKey: OuterResolver$key, -) => mixed); +) => ?number); export type LiveResolversTestNestedQuery$variables = {||}; export type LiveResolversTestNestedQuery$data = {| - +inner: ?$Call<$Call<((...empty[]) => R) => R, typeof queryInnerResolverType>["read"]>, - +outer: ?$Call<((...empty[]) => R) => R, typeof queryOuterResolverType>, + +inner: ?number, + +outer: ?number, |}; export type LiveResolversTestNestedQuery = {| response: LiveResolversTestNestedQuery$data, @@ -82,7 +82,7 @@ var v0 = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }; return { "fragment": { @@ -140,7 +140,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, (v0/*: any*/) ] diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTestOptimisticUpdateQuery.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTestOptimisticUpdateQuery.graphql.js index cfd14fd25e633..576a8491a1c1c 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTestOptimisticUpdateQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTestOptimisticUpdateQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,17 +18,17 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { LiveCounterResolver$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/LiveCounterResolver.graphql"; import {counter as queryCounterResolverType} from "../../../relay-runtime/store/__tests__/resolvers/LiveCounterResolver.js"; // Type assertion validating that `queryCounterResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryCounterResolverType: ( rootKey: LiveCounterResolver$key, -) => LiveState); +) => LiveState); export type LiveResolversTestOptimisticUpdateQuery$variables = {||}; export type LiveResolversTestOptimisticUpdateQuery$data = {| - +counter: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterResolverType>["read"]>, + +counter: ?number, |}; export type LiveResolversTestOptimisticUpdateQuery = {| response: LiveResolversTestOptimisticUpdateQuery$data, @@ -108,7 +108,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTestWithGCCounterQuery.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTestWithGCCounterQuery.graphql.js index 69647115ab17f..e9ccf74ef1efc 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTestWithGCCounterQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTestWithGCCounterQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<0fc098b6dbf06245d08c96a100f66728>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,14 +18,14 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import {counter_no_fragment as queryCounterNoFragmentResolverType} from "../../../relay-runtime/store/__tests__/resolvers/LiveCounterNoFragment.js"; // Type assertion validating that `queryCounterNoFragmentResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. -(queryCounterNoFragmentResolverType: () => LiveState); +(queryCounterNoFragmentResolverType: () => LiveState); export type LiveResolversTestWithGCCounterQuery$variables = {||}; export type LiveResolversTestWithGCCounterQuery$data = {| - +counter_no_fragment: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterNoFragmentResolverType>["read"]>, + +counter_no_fragment: ?number, |}; export type LiveResolversTestWithGCCounterQuery = {| response: LiveResolversTestWithGCCounterQuery$data, @@ -73,7 +73,7 @@ var node/*: ClientRequest*/ = { "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTestWithGCQuery.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTestWithGCQuery.graphql.js index 71f6d262fd715..cae7b7547469d 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTestWithGCQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTestWithGCQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<681933700db2255151299a7a8c82c6bd>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,17 +18,17 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { LiveCounterWithPossibleMissingFragmentDataResolverFragment$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/LiveCounterWithPossibleMissingFragmentDataResolverFragment.graphql"; import {live_counter_with_possible_missing_fragment_data as queryLiveCounterWithPossibleMissingFragmentDataResolverType} from "../../../relay-runtime/store/__tests__/resolvers/LiveCounterWithPossibleMissingFragmentDataResolver.js"; // Type assertion validating that `queryLiveCounterWithPossibleMissingFragmentDataResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryLiveCounterWithPossibleMissingFragmentDataResolverType: ( rootKey: LiveCounterWithPossibleMissingFragmentDataResolverFragment$key, -) => LiveState); +) => LiveState); export type LiveResolversTestWithGCQuery$variables = {||}; export type LiveResolversTestWithGCQuery$data = {| - +live_counter_with_possible_missing_fragment_data: ?$Call<$Call<((...empty[]) => R) => R, typeof queryLiveCounterWithPossibleMissingFragmentDataResolverType>["read"]>, + +live_counter_with_possible_missing_fragment_data: ?number, |}; export type LiveResolversTestWithGCQuery = {| response: LiveResolversTestWithGCQuery$data, @@ -96,7 +96,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/react-relay/__tests__/__generated__/LiveResolversTestWithProvidedVariablesQuery.graphql.js b/packages/react-relay/__tests__/__generated__/LiveResolversTestWithProvidedVariablesQuery.graphql.js index 58e1eb49bf55b..47c97eeeddf8f 100644 --- a/packages/react-relay/__tests__/__generated__/LiveResolversTestWithProvidedVariablesQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/LiveResolversTestWithProvidedVariablesQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<8d6c5ac2a09022435358e381b8fde2ae>> + * @generated SignedSource<<9af76fa2cc8b4009ca1a3937a6f65434>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,26 +24,24 @@ import {hello_world_with_provided_variable as queryHelloWorldWithProvidedVariabl // A type error here indicates that the type signature of the resolver module is incorrect. (queryHelloWorldWithProvidedVariableResolverType: ( rootKey: HelloWorldResolverWithProvidedVariable$key, -) => mixed); +) => ?string); export type LiveResolversTestWithProvidedVariablesQuery$variables = {||}; export type LiveResolversTestWithProvidedVariablesQuery$data = {| - +hello_world_with_provided_variable: ?$Call<((...empty[]) => R) => R, typeof queryHelloWorldWithProvidedVariableResolverType>, + +hello_world_with_provided_variable: ?string, |}; export type LiveResolversTestWithProvidedVariablesQuery = {| response: LiveResolversTestWithProvidedVariablesQuery$data, variables: LiveResolversTestWithProvidedVariablesQuery$variables, |}; -type ProvidedVariablesType = {| - +__relay_internal__pv__HelloWorldProviderjs: {| +({ + "__relay_internal__pv__HelloWorldProviderrelayprovider": require('./../../../relay-runtime/store/__tests__/resolvers/HelloWorldProvider.relayprovider') +}: {| + +__relay_internal__pv__HelloWorldProviderrelayprovider: {| +get: () => string, |}, -|}; +|}); */ -var providedVariablesDefinition/*: ProvidedVariablesType*/ = { - "__relay_internal__pv__HelloWorldProviderjs": require('./../../../relay-runtime/store/__tests__/resolvers/HelloWorldProvider') -}; - var node/*: ClientRequest*/ = { "fragment": { "argumentDefinitions": [], @@ -74,7 +72,7 @@ var node/*: ClientRequest*/ = { { "defaultValue": null, "kind": "LocalArgument", - "name": "__relay_internal__pv__HelloWorldProviderjs" + "name": "__relay_internal__pv__HelloWorldProviderrelayprovider" } ], "kind": "Operation", @@ -95,13 +93,13 @@ var node/*: ClientRequest*/ = { { "kind": "Variable", "name": "world", - "variableName": "__relay_internal__pv__HelloWorldProviderjs" + "variableName": "__relay_internal__pv__HelloWorldProviderrelayprovider" } ], "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] } @@ -111,7 +109,7 @@ var node/*: ClientRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, @@ -122,7 +120,9 @@ var node/*: ClientRequest*/ = { "name": "LiveResolversTestWithProvidedVariablesQuery", "operationKind": "query", "text": null, - "providedVariables": providedVariablesDefinition + "providedVariables": { + "__relay_internal__pv__HelloWorldProviderrelayprovider": require('./../../../relay-runtime/store/__tests__/resolvers/HelloWorldProvider.relayprovider') + } } }; diff --git a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientEdgesTest1Query_me__client_node.graphql.js b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientEdgesTest1Query_me__client_node.graphql.js index 587d40d97d031..2c3e1178f1834 100644 --- a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientEdgesTest1Query_me__client_node.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientEdgesTest1Query_me__client_node.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<8b7e2ebba0da9d4dc59a9c5001ca2112>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_ClientEdgesTest1Query_me__client_node.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_ClientEdgesTest1Query_me__client_node", diff --git a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientEdgesTest2Query_me__client_node.graphql.js b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientEdgesTest2Query_me__client_node.graphql.js index e805db54a739c..9608d9d6f6816 100644 --- a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientEdgesTest2Query_me__client_node.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientEdgesTest2Query_me__client_node.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<27b1fed880ee37fc241ddb4b04ac60c3>> + * @generated SignedSource<<9172cd1c8f829c7dce78fbd22a72f555>> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_ClientEdgesTest2Query_me__client_node.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_ClientEdgesTest2Query_me__client_node", diff --git a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientEdgesTest3Query_me__client_node.graphql.js b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientEdgesTest3Query_me__client_node.graphql.js index 059f6b7e86d0f..ca0ab94c18acc 100644 --- a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientEdgesTest3Query_me__client_node.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientEdgesTest3Query_me__client_node.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<368f2734b28b10a0e4ba2453a1e1407c>> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_ClientEdgesTest3Query_me__client_node.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_ClientEdgesTest3Query_me__client_node", diff --git a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientOnlyQueriesTest3Query_hello_user.graphql.js b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientOnlyQueriesTest3Query_hello_user.graphql.js index 983580e521c9a..425a860018594 100644 --- a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientOnlyQueriesTest3Query_hello_user.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_ClientOnlyQueriesTest3Query_hello_user.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<38119d2859c697aa13c27778fa54d347>> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_ClientOnlyQueriesTest3Query_hello_user.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_ClientOnlyQueriesTest3Query_hello_user", diff --git a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_LiveResolversTest13Query_live_constant_client_edge.graphql.js b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_LiveResolversTest13Query_live_constant_client_edge.graphql.js index 62cab892da755..afd5df10827f7 100644 --- a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_LiveResolversTest13Query_live_constant_client_edge.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_LiveResolversTest13Query_live_constant_client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_LiveResolversTest13Query_live_constant_client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_LiveResolversTest13Query_live_constant_client_edge", diff --git a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_LiveResolversTest15Query_live_user_resolver_always_suspend.graphql.js b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_LiveResolversTest15Query_live_user_resolver_always_suspend.graphql.js index 53ea975b62a9f..c28ee47b333a9 100644 --- a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_LiveResolversTest15Query_live_user_resolver_always_suspend.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_LiveResolversTest15Query_live_user_resolver_always_suspend.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<2e5f3c3a15e68521b574ca5f72684bf6>> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_LiveResolversTest15Query_live_user_resolver_always_suspend.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_LiveResolversTest15Query_live_user_resolver_always_suspend", diff --git a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_QueryResourceClientEdgesTest2Query_me__client_edge.graphql.js b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_QueryResourceClientEdgesTest2Query_me__client_edge.graphql.js index 91bbc3e4ca20e..f8a4ca3fe4b21 100644 --- a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_QueryResourceClientEdgesTest2Query_me__client_edge.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_QueryResourceClientEdgesTest2Query_me__client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<294905380e1d78aa11cb855086aa97bc>> + * @generated SignedSource<<1bbbb7efcf5b7be9c316b87d91fff210>> * @flow * @lightSyntaxTransform * @nogrep @@ -45,7 +45,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_QueryResourceClientEdgesTest2Query_me__client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_QueryResourceClientEdgesTest2Query_me__client_edge", diff --git a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist.graphql.js b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist.graphql.js new file mode 100644 index 0000000000000..eb3c4898a1870 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist.graphql.js @@ -0,0 +1,73 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ReaderFragment, RefetchableFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$fragmentType: FragmentType; +type ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$variables = any; +export type RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$data = {| + +id: string, + +$fragmentType: RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$fragmentType, +|}; +export type RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$key = { + +$data?: RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$data, + +$fragmentSpreads: RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "refetch": { + "connection": null, + "fragmentPathInResult": [ + "node" + ], + "operation": require('./ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist.graphql'), + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } + } + }, + "name": "RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "type": "Comment", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "6e6fc82ab5969e84d7c748516d16686b"; +} + +module.exports = ((node/*: any*/)/*: RefetchableFragment< + RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$fragmentType, + RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$data, + ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist$variables, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist.graphql.js b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist.graphql.js new file mode 100644 index 0000000000000..55d3325a38ba2 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist.graphql.js @@ -0,0 +1,81 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<011095a988ef3a992d94871df730045c>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ReaderFragment, RefetchableFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$fragmentType: FragmentType; +type ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$variables = any; +export type RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$data = {| + +id: string, + +name: ?string, + +$fragmentType: RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$fragmentType, +|}; +export type RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$key = { + +$data?: RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$data, + +$fragmentSpreads: RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "refetch": { + "connection": null, + "fragmentPathInResult": [ + "node" + ], + "operation": require('./ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist.graphql'), + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } + } + }, + "name": "RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "type": "Comment", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "3990dc068bf228226a21832b04bbd39a"; +} + +module.exports = ((node/*: any*/)/*: RefetchableFragment< + RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$fragmentType, + RefetchableClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$data, + ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist$variables, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestAnimalGreetingQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestAnimalGreetingQuery.graphql.js new file mode 100644 index 0000000000000..a2c27547f26f5 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestAnimalGreetingQuery.graphql.js @@ -0,0 +1,329 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<178d8ce18788fe4e15689bfb64c2512c>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { Cat____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/Cat____relay_model_instance.graphql"; +import type { Fish____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/Fish____relay_model_instance.graphql"; +import {greeting as iAnimalGreetingResolverType} from "../../../relay-runtime/store/__tests__/resolvers/AnimalQueryResolvers.js"; +// Type assertion validating that `iAnimalGreetingResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(iAnimalGreetingResolverType: ( + model: Cat____relay_model_instance$data['__relay_model_instance'] | Fish____relay_model_instance$data['__relay_model_instance'], +) => ?string); +import {animal as queryAnimalResolverType} from "../../../relay-runtime/store/__tests__/resolvers/AnimalQueryResolvers.js"; +// Type assertion validating that `queryAnimalResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryAnimalResolverType: ( + args: {| + request: AnimalRequest, + |}, +) => ?{| + +__typename: "Cat" | "Chicken" | "Fish", + +id: DataID, +|}); +export type AnimalRequest = {| + ofType: string, + returnValidID: boolean, +|}; +export type RelayResolverInterfaceTestAnimalGreetingQuery$variables = {| + request: AnimalRequest, +|}; +export type RelayResolverInterfaceTestAnimalGreetingQuery$data = {| + +animal: ?{| + +greeting: ?string, + |}, +|}; +export type RelayResolverInterfaceTestAnimalGreetingQuery = {| + response: RelayResolverInterfaceTestAnimalGreetingQuery$data, + variables: RelayResolverInterfaceTestAnimalGreetingQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "request" + } +], +v1 = [ + { + "kind": "Variable", + "name": "request", + "variableName": "request" + } +], +v2 = { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "greeting", + "storageKey": null + } + ], + "type": "Chicken", + "abstractKey": null +}, +v3 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}, +v4 = [ + (v3/*: any*/) +]; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverInterfaceTestAnimalGreetingQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": null, + "modelResolvers": { + "Cat": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Cat__id" + }, + "kind": "RelayResolver", + "name": "animal", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Cat__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/CatResolvers').Cat, 'id', true), + "path": "animal.__relay_model_instance" + }, + "Fish": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Fish__id" + }, + "kind": "RelayResolver", + "name": "animal", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Fish__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/FishResolvers').Fish, 'id', true), + "path": "animal.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "name": "animal", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/AnimalQueryResolvers').animal, + "path": "animal" + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "animal", + "plural": false, + "selections": [ + (v2/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Cat____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "greeting", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Cat____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/AnimalQueryResolvers').greeting, '__relay_model_instance', true), + "path": "animal.greeting" + } + ], + "type": "Cat", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Fish____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "greeting", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Fish____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/AnimalQueryResolvers').greeting, '__relay_model_instance', true), + "path": "animal.greeting" + } + ], + "type": "Fish", + "abstractKey": null + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResolverInterfaceTestAnimalGreetingQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "animal", + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "animal", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + (v2/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + { + "name": "greeting", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v4/*: any*/), + "type": "Cat", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "Cat", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "Cat", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "greeting", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v4/*: any*/), + "type": "Fish", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "Fish", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "Fish", + "abstractKey": null + }, + (v3/*: any*/) + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "f2e014879af0e8c3a48b9048970e4c13", + "id": null, + "metadata": {}, + "name": "RelayResolverInterfaceTestAnimalGreetingQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "d35e1f7778da7bfc45d34903bd3ca96c"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverInterfaceTestAnimalGreetingQuery$variables, + RelayResolverInterfaceTestAnimalGreetingQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestAnimalLegsFragment.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestAnimalLegsFragment.graphql.js new file mode 100644 index 0000000000000..a319d7ec5c660 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestAnimalLegsFragment.graphql.js @@ -0,0 +1,111 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<438b9cfa39ef26639040bed14b5b7274>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayResolverInterfaceTestAnimalLegsFragment$fragmentType: FragmentType; +export type RelayResolverInterfaceTestAnimalLegsFragment$data = {| + +legs: ?number, + +$fragmentType: RelayResolverInterfaceTestAnimalLegsFragment$fragmentType, +|}; +export type RelayResolverInterfaceTestAnimalLegsFragment$key = { + +$data?: RelayResolverInterfaceTestAnimalLegsFragment$data, + +$fragmentSpreads: RelayResolverInterfaceTestAnimalLegsFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayResolverInterfaceTestAnimalLegsFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "legs", + "storageKey": null + } + ], + "type": "Chicken", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Cat____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "legs", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Cat____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/CatResolvers').legs, '__relay_model_instance', true), + "path": "legs" + } + ], + "type": "Cat", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Fish____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "legs", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Fish____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/FishResolvers').legs, '__relay_model_instance', true), + "path": "legs" + } + ], + "type": "Fish", + "abstractKey": null + } + ] + } + ], + "type": "IAnimal", + "abstractKey": "__isIAnimal" +}; + +if (__DEV__) { + (node/*: any*/).hash = "30ed1535c07e810fa0c28b4a7be25a81"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayResolverInterfaceTestAnimalLegsFragment$fragmentType, + RelayResolverInterfaceTestAnimalLegsFragment$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestAnimalLegsQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestAnimalLegsQuery.graphql.js new file mode 100644 index 0000000000000..133ee4b6554bd --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestAnimalLegsQuery.graphql.js @@ -0,0 +1,292 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<105f73cb11426e65f57f8fc1eeed8992>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { RelayResolverInterfaceTestAnimalLegsFragment$fragmentType } from "./RelayResolverInterfaceTestAnimalLegsFragment.graphql"; +import {animal as queryAnimalResolverType} from "../../../relay-runtime/store/__tests__/resolvers/AnimalQueryResolvers.js"; +// Type assertion validating that `queryAnimalResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryAnimalResolverType: ( + args: {| + request: AnimalRequest, + |}, +) => ?{| + +__typename: "Cat" | "Chicken" | "Fish", + +id: DataID, +|}); +export type AnimalRequest = {| + ofType: string, + returnValidID: boolean, +|}; +export type RelayResolverInterfaceTestAnimalLegsQuery$variables = {| + request: AnimalRequest, +|}; +export type RelayResolverInterfaceTestAnimalLegsQuery$data = {| + +animal: ?{| + +$fragmentSpreads: RelayResolverInterfaceTestAnimalLegsFragment$fragmentType, + |}, +|}; +export type RelayResolverInterfaceTestAnimalLegsQuery = {| + response: RelayResolverInterfaceTestAnimalLegsQuery$data, + variables: RelayResolverInterfaceTestAnimalLegsQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "request" + } +], +v1 = [ + { + "kind": "Variable", + "name": "request", + "variableName": "request" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}, +v3 = [ + (v2/*: any*/) +]; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverInterfaceTestAnimalLegsQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": null, + "modelResolvers": { + "Cat": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Cat__id" + }, + "kind": "RelayResolver", + "name": "animal", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Cat__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/CatResolvers').Cat, 'id', true), + "path": "animal.__relay_model_instance" + }, + "Fish": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Fish__id" + }, + "kind": "RelayResolver", + "name": "animal", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Fish__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/FishResolvers').Fish, 'id', true), + "path": "animal.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "name": "animal", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/AnimalQueryResolvers').animal, + "path": "animal" + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "animal", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayResolverInterfaceTestAnimalLegsFragment" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResolverInterfaceTestAnimalLegsQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "animal", + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "animal", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "legs", + "storageKey": null + } + ], + "type": "Chicken", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "legs", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v3/*: any*/), + "type": "Cat", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "Cat", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "Cat", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "legs", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v3/*: any*/), + "type": "Fish", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "Fish", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "Fish", + "abstractKey": null + }, + (v2/*: any*/) + ], + "storageKey": null + } + } + ], + "clientAbstractTypes": { + "__isIAnimal": [ + "Cat", + "Chicken", + "Fish" + ] + } + }, + "params": { + "cacheID": "f240e73f777c2a51a0622d85c36fbcb9", + "id": null, + "metadata": {}, + "name": "RelayResolverInterfaceTestAnimalLegsQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "30ac22964e5c349d28a0d6c199793b8f"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverInterfaceTestAnimalLegsQuery$variables, + RelayResolverInterfaceTestAnimalLegsQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestAnimalsLegsQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestAnimalsLegsQuery.graphql.js new file mode 100644 index 0000000000000..d5f14f43ba314 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestAnimalsLegsQuery.graphql.js @@ -0,0 +1,294 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<83df13c334d14559d2e8bc4c2cfa660d>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { RelayResolverInterfaceTestAnimalLegsFragment$fragmentType } from "./RelayResolverInterfaceTestAnimalLegsFragment.graphql"; +import {animals as queryAnimalsResolverType} from "../../../relay-runtime/store/__tests__/resolvers/AnimalQueryResolvers.js"; +// Type assertion validating that `queryAnimalsResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryAnimalsResolverType: ( + args: {| + requests: $ReadOnlyArray, + |}, +) => ?$ReadOnlyArray); +export type AnimalRequest = {| + ofType: string, + returnValidID: boolean, +|}; +export type RelayResolverInterfaceTestAnimalsLegsQuery$variables = {| + requests: $ReadOnlyArray, +|}; +export type RelayResolverInterfaceTestAnimalsLegsQuery$data = {| + +animals: ?$ReadOnlyArray, +|}; +export type RelayResolverInterfaceTestAnimalsLegsQuery = {| + response: RelayResolverInterfaceTestAnimalsLegsQuery$data, + variables: RelayResolverInterfaceTestAnimalsLegsQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "requests" + } +], +v1 = [ + { + "kind": "Variable", + "name": "requests", + "variableName": "requests" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}, +v3 = [ + (v2/*: any*/) +]; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverInterfaceTestAnimalsLegsQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": null, + "modelResolvers": { + "Cat": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Cat__id" + }, + "kind": "RelayResolver", + "name": "animals", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Cat__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/CatResolvers').Cat, 'id', true), + "path": "animals.__relay_model_instance" + }, + "Fish": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Fish__id" + }, + "kind": "RelayResolver", + "name": "animals", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Fish__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/FishResolvers').Fish, 'id', true), + "path": "animals.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "name": "animals", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/AnimalQueryResolvers').animals, + "path": "animals" + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "animals", + "plural": true, + "selections": [ + (v2/*: any*/), + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayResolverInterfaceTestAnimalLegsFragment" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResolverInterfaceTestAnimalsLegsQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "animals", + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "animals", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + (v2/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "legs", + "storageKey": null + } + ], + "type": "Chicken", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "legs", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v3/*: any*/), + "type": "Cat", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "Cat", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "Cat", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "legs", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v3/*: any*/), + "type": "Fish", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "Fish", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "Fish", + "abstractKey": null + } + ], + "storageKey": null + } + } + ], + "clientAbstractTypes": { + "__isIAnimal": [ + "Cat", + "Chicken", + "Fish" + ] + } + }, + "params": { + "cacheID": "26afdfc336da8d51fb00e39cc6026d8d", + "id": null, + "metadata": {}, + "name": "RelayResolverInterfaceTestAnimalsLegsQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "04bfa405c479917657a2d15f6b8b3780"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverInterfaceTestAnimalsLegsQuery$variables, + RelayResolverInterfaceTestAnimalsLegsQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestCatLegsQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestCatLegsQuery.graphql.js new file mode 100644 index 0000000000000..a2ead79fbe62c --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestCatLegsQuery.graphql.js @@ -0,0 +1,254 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { RelayResolverInterfaceTestAnimalLegsFragment$fragmentType } from "./RelayResolverInterfaceTestAnimalLegsFragment.graphql"; +import {cat as queryCatResolverType} from "../../../relay-runtime/store/__tests__/resolvers/CatResolvers.js"; +// Type assertion validating that `queryCatResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryCatResolverType: () => ?{| + +id: DataID, +|}); +export type RelayResolverInterfaceTestCatLegsQuery$variables = {||}; +export type RelayResolverInterfaceTestCatLegsQuery$data = {| + +cat: ?{| + +$fragmentSpreads: RelayResolverInterfaceTestAnimalLegsFragment$fragmentType, + |}, +|}; +export type RelayResolverInterfaceTestCatLegsQuery = {| + response: RelayResolverInterfaceTestCatLegsQuery$data, + variables: RelayResolverInterfaceTestCatLegsQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}, +v1 = [ + (v0/*: any*/) +]; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverInterfaceTestCatLegsQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "Cat", + "modelResolvers": { + "Cat": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Cat__id" + }, + "kind": "RelayResolver", + "name": "cat", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Cat__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/CatResolvers').Cat, 'id', true), + "path": "cat.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "cat", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/CatResolvers').cat, + "path": "cat" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "Cat", + "kind": "LinkedField", + "name": "cat", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayResolverInterfaceTestAnimalLegsFragment" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverInterfaceTestCatLegsQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "cat", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "Cat", + "kind": "LinkedField", + "name": "cat", + "plural": false, + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "legs", + "storageKey": null + } + ], + "type": "Chicken", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "legs", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v1/*: any*/), + "type": "Cat", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "Cat", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "Cat", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "legs", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v1/*: any*/), + "type": "Fish", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "Fish", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "Fish", + "abstractKey": null + } + ], + "type": "IAnimal", + "abstractKey": "__isIAnimal" + }, + (v0/*: any*/) + ], + "storageKey": null + } + } + ], + "clientAbstractTypes": { + "__isIAnimal": [ + "Cat", + "Chicken", + "Fish" + ] + } + }, + "params": { + "cacheID": "c6aa3ef8cb6b1c33d9507d8ebdefa8bd", + "id": null, + "metadata": {}, + "name": "RelayResolverInterfaceTestCatLegsQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "bfa07f8bbeb86466f11b1e960da49893"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverInterfaceTestCatLegsQuery$variables, + RelayResolverInterfaceTestCatLegsQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestChickenLegsQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestChickenLegsQuery.graphql.js new file mode 100644 index 0000000000000..bbfa0e8effe90 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestChickenLegsQuery.graphql.js @@ -0,0 +1,216 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<85b3c0d307881b88115daa1d9131ec6d>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { RelayResolverInterfaceTestAnimalLegsFragment$fragmentType } from "./RelayResolverInterfaceTestAnimalLegsFragment.graphql"; +export type RelayResolverInterfaceTestChickenLegsQuery$variables = {||}; +export type RelayResolverInterfaceTestChickenLegsQuery$data = {| + +chicken: ?{| + +$fragmentSpreads: RelayResolverInterfaceTestAnimalLegsFragment$fragmentType, + |}, +|}; +export type RelayResolverInterfaceTestChickenLegsQuery = {| + response: RelayResolverInterfaceTestChickenLegsQuery$data, + variables: RelayResolverInterfaceTestChickenLegsQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}, +v1 = [ + (v0/*: any*/) +]; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayResolverInterfaceTestChickenLegsQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "Chicken", + "kind": "LinkedField", + "name": "chicken", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayResolverInterfaceTestAnimalLegsFragment" + } + ], + "storageKey": null + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverInterfaceTestChickenLegsQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "Chicken", + "kind": "LinkedField", + "name": "chicken", + "plural": false, + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "legs", + "storageKey": null + } + ], + "type": "Chicken", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "legs", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v1/*: any*/), + "type": "Cat", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "Cat", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "Cat", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "legs", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v1/*: any*/), + "type": "Fish", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "Fish", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "Fish", + "abstractKey": null + } + ], + "type": "IAnimal", + "abstractKey": "__isIAnimal" + }, + (v0/*: any*/) + ], + "storageKey": null + } + ] + } + ], + "clientAbstractTypes": { + "__isIAnimal": [ + "Cat", + "Chicken", + "Fish" + ] + } + }, + "params": { + "cacheID": "86041f6438414769ce790da9623ff157", + "id": null, + "metadata": {}, + "name": "RelayResolverInterfaceTestChickenLegsQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "faf70b0c71846f082798a4d7ad8b760b"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverInterfaceTestChickenLegsQuery$variables, + RelayResolverInterfaceTestChickenLegsQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestFishLegsQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestFishLegsQuery.graphql.js new file mode 100644 index 0000000000000..42cd2636ebade --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestFishLegsQuery.graphql.js @@ -0,0 +1,254 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { RelayResolverInterfaceTestAnimalLegsFragment$fragmentType } from "./RelayResolverInterfaceTestAnimalLegsFragment.graphql"; +import {fish as queryFishResolverType} from "../../../relay-runtime/store/__tests__/resolvers/FishResolvers.js"; +// Type assertion validating that `queryFishResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryFishResolverType: () => ?{| + +id: DataID, +|}); +export type RelayResolverInterfaceTestFishLegsQuery$variables = {||}; +export type RelayResolverInterfaceTestFishLegsQuery$data = {| + +fish: ?{| + +$fragmentSpreads: RelayResolverInterfaceTestAnimalLegsFragment$fragmentType, + |}, +|}; +export type RelayResolverInterfaceTestFishLegsQuery = {| + response: RelayResolverInterfaceTestFishLegsQuery$data, + variables: RelayResolverInterfaceTestFishLegsQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}, +v1 = [ + (v0/*: any*/) +]; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverInterfaceTestFishLegsQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "Fish", + "modelResolvers": { + "Fish": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Fish__id" + }, + "kind": "RelayResolver", + "name": "fish", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Fish__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/FishResolvers').Fish, 'id', true), + "path": "fish.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "fish", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/FishResolvers').fish, + "path": "fish" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "Fish", + "kind": "LinkedField", + "name": "fish", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayResolverInterfaceTestAnimalLegsFragment" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverInterfaceTestFishLegsQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "fish", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "Fish", + "kind": "LinkedField", + "name": "fish", + "plural": false, + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "legs", + "storageKey": null + } + ], + "type": "Chicken", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "legs", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v1/*: any*/), + "type": "Cat", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "Cat", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "Cat", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "legs", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v1/*: any*/), + "type": "Fish", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "Fish", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "Fish", + "abstractKey": null + } + ], + "type": "IAnimal", + "abstractKey": "__isIAnimal" + }, + (v0/*: any*/) + ], + "storageKey": null + } + } + ], + "clientAbstractTypes": { + "__isIAnimal": [ + "Cat", + "Chicken", + "Fish" + ] + } + }, + "params": { + "cacheID": "4eb710efdd47984f9682d5255b5560b2", + "id": null, + "metadata": {}, + "name": "RelayResolverInterfaceTestFishLegsQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "91fa8cc8364ed0222107f376f8a072f9"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverInterfaceTestFishLegsQuery$variables, + RelayResolverInterfaceTestFishLegsQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestRedOctopusColorQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestRedOctopusColorQuery.graphql.js new file mode 100644 index 0000000000000..a83ec64ed8f8e --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestRedOctopusColorQuery.graphql.js @@ -0,0 +1,198 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { RelayResolverInterfaceTestWeakAnimalColorFragment$fragmentType } from "./RelayResolverInterfaceTestWeakAnimalColorFragment.graphql"; +import {red_octopus as queryRedOctopusResolverType} from "../../../relay-runtime/store/__tests__/resolvers/RedOctopusResolvers.js"; +// Type assertion validating that `queryRedOctopusResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryRedOctopusResolverType: () => ?RedOctopus); +import type { RedOctopus } from "../../../relay-runtime/store/__tests__/resolvers/RedOctopusResolvers.js"; +export type RelayResolverInterfaceTestRedOctopusColorQuery$variables = {||}; +export type RelayResolverInterfaceTestRedOctopusColorQuery$data = {| + +red_octopus: ?{| + +$fragmentSpreads: RelayResolverInterfaceTestWeakAnimalColorFragment$fragmentType, + |}, +|}; +export type RelayResolverInterfaceTestRedOctopusColorQuery = {| + response: RelayResolverInterfaceTestRedOctopusColorQuery$data, + variables: RelayResolverInterfaceTestRedOctopusColorQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } +]; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverInterfaceTestRedOctopusColorQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "RedOctopus", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "red_octopus", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/RedOctopusResolvers').red_octopus, + "path": "red_octopus", + "normalizationInfo": { + "kind": "WeakModel", + "concreteType": "RedOctopus", + "plural": false + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "RedOctopus", + "kind": "LinkedField", + "name": "red_octopus", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayResolverInterfaceTestWeakAnimalColorFragment" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverInterfaceTestRedOctopusColorQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "red_octopus", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "RedOctopus", + "kind": "LinkedField", + "name": "red_octopus", + "plural": false, + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "name": "color", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v0/*: any*/), + "type": "PurpleOctopus", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "PurpleOctopus", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "color", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v0/*: any*/), + "type": "RedOctopus", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "RedOctopus", + "abstractKey": null + } + ], + "type": "IWeakAnimal", + "abstractKey": "__isIWeakAnimal" + } + ], + "storageKey": null + } + } + ], + "clientAbstractTypes": { + "__isIWeakAnimal": [ + "PurpleOctopus", + "RedOctopus" + ] + } + }, + "params": { + "cacheID": "fe03faa56132552c947fd62dd0e8ce24", + "id": null, + "metadata": {}, + "name": "RelayResolverInterfaceTestRedOctopusColorQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "ffbde5e537add11a8fa22b95fcd6c23c"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverInterfaceTestRedOctopusColorQuery$variables, + RelayResolverInterfaceTestRedOctopusColorQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestWeakAnimalColorFragment.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestWeakAnimalColorFragment.graphql.js new file mode 100644 index 0000000000000..d8e1408342e0a --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestWeakAnimalColorFragment.graphql.js @@ -0,0 +1,97 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<07373ffe396325694b75d4819686d994>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayResolverInterfaceTestWeakAnimalColorFragment$fragmentType: FragmentType; +export type RelayResolverInterfaceTestWeakAnimalColorFragment$data = {| + +color: ?string, + +$fragmentType: RelayResolverInterfaceTestWeakAnimalColorFragment$fragmentType, +|}; +export type RelayResolverInterfaceTestWeakAnimalColorFragment$key = { + +$data?: RelayResolverInterfaceTestWeakAnimalColorFragment$data, + +$fragmentSpreads: RelayResolverInterfaceTestWeakAnimalColorFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayResolverInterfaceTestWeakAnimalColorFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "PurpleOctopus____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "color", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/PurpleOctopus____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/PurpleOctopusResolvers').color, '__relay_model_instance', true), + "path": "color" + } + ], + "type": "PurpleOctopus", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "RedOctopus____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "color", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/RedOctopus____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/RedOctopusResolvers').color, '__relay_model_instance', true), + "path": "color" + } + ], + "type": "RedOctopus", + "abstractKey": null + } + ] + } + ], + "type": "IWeakAnimal", + "abstractKey": "__isIWeakAnimal" +}; + +if (__DEV__) { + (node/*: any*/).hash = "3f50d51aac998df03ff67ac9a677b0c5"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayResolverInterfaceTestWeakAnimalColorFragment$fragmentType, + RelayResolverInterfaceTestWeakAnimalColorFragment$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestWeakAnimalColorQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestWeakAnimalColorQuery.graphql.js new file mode 100644 index 0000000000000..cca00789b8756 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestWeakAnimalColorQuery.graphql.js @@ -0,0 +1,222 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<13ecc0c25db33887559a2d0b0ff5bd5e>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { RelayResolverInterfaceTestWeakAnimalColorFragment$fragmentType } from "./RelayResolverInterfaceTestWeakAnimalColorFragment.graphql"; +import {weak_animal as queryWeakAnimalResolverType} from "../../../relay-runtime/store/__tests__/resolvers/WeakAnimalQueryResolvers.js"; +// Type assertion validating that `queryWeakAnimalResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryWeakAnimalResolverType: ( + args: {| + request: WeakAnimalRequest, + |}, +) => ?Query__weak_animal$normalization); +import type { Query__weak_animal$normalization } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__weak_animal$normalization.graphql"; +export type WeakAnimalRequest = {| + ofType: string, +|}; +export type RelayResolverInterfaceTestWeakAnimalColorQuery$variables = {| + request: WeakAnimalRequest, +|}; +export type RelayResolverInterfaceTestWeakAnimalColorQuery$data = {| + +weak_animal: ?{| + +$fragmentSpreads: RelayResolverInterfaceTestWeakAnimalColorFragment$fragmentType, + |}, +|}; +export type RelayResolverInterfaceTestWeakAnimalColorQuery = {| + response: RelayResolverInterfaceTestWeakAnimalColorQuery$data, + variables: RelayResolverInterfaceTestWeakAnimalColorQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "request" + } +], +v1 = [ + { + "kind": "Variable", + "name": "request", + "variableName": "request" + } +], +v2 = [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } +]; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverInterfaceTestWeakAnimalColorQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": null, + "modelResolvers": null, + "backingField": { + "alias": null, + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "name": "weak_animal", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/WeakAnimalQueryResolvers').weak_animal, + "path": "weak_animal", + "normalizationInfo": { + "kind": "OutputType", + "concreteType": null, + "plural": false, + "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__weak_animal$normalization.graphql') + } + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "weak_animal", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayResolverInterfaceTestWeakAnimalColorFragment" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResolverInterfaceTestWeakAnimalColorQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "weak_animal", + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "weak_animal", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "color", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v2/*: any*/), + "type": "PurpleOctopus", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "PurpleOctopus", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "color", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v2/*: any*/), + "type": "RedOctopus", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "RedOctopus", + "abstractKey": null + } + ], + "storageKey": null + } + } + ], + "clientAbstractTypes": { + "__isIWeakAnimal": [ + "PurpleOctopus", + "RedOctopus" + ] + } + }, + "params": { + "cacheID": "fc56803374dcf9a2294d2787a7390568", + "id": null, + "metadata": {}, + "name": "RelayResolverInterfaceTestWeakAnimalColorQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "b042ac45639bb10096cef34450b19b77"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverInterfaceTestWeakAnimalColorQuery$variables, + RelayResolverInterfaceTestWeakAnimalColorQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestWeakAnimalGreetingQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestWeakAnimalGreetingQuery.graphql.js new file mode 100644 index 0000000000000..cbd9eb57b8333 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverInterfaceTestWeakAnimalGreetingQuery.graphql.js @@ -0,0 +1,258 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<0dda61bd5d4cfe5e4bfd63029690812c>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { PurpleOctopus____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/PurpleOctopus____relay_model_instance.graphql"; +import type { RedOctopus____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/RedOctopus____relay_model_instance.graphql"; +import {greeting as iWeakAnimalGreetingResolverType} from "../../../relay-runtime/store/__tests__/resolvers/WeakAnimalQueryResolvers.js"; +// Type assertion validating that `iWeakAnimalGreetingResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(iWeakAnimalGreetingResolverType: ( + model: PurpleOctopus____relay_model_instance$data['__relay_model_instance'] | RedOctopus____relay_model_instance$data['__relay_model_instance'], +) => ?string); +import {weak_animal as queryWeakAnimalResolverType} from "../../../relay-runtime/store/__tests__/resolvers/WeakAnimalQueryResolvers.js"; +// Type assertion validating that `queryWeakAnimalResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryWeakAnimalResolverType: ( + args: {| + request: WeakAnimalRequest, + |}, +) => ?Query__weak_animal$normalization); +import type { Query__weak_animal$normalization } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__weak_animal$normalization.graphql"; +export type WeakAnimalRequest = {| + ofType: string, +|}; +export type RelayResolverInterfaceTestWeakAnimalGreetingQuery$variables = {| + request: WeakAnimalRequest, +|}; +export type RelayResolverInterfaceTestWeakAnimalGreetingQuery$data = {| + +weak_animal: ?{| + +greeting: ?string, + |}, +|}; +export type RelayResolverInterfaceTestWeakAnimalGreetingQuery = {| + response: RelayResolverInterfaceTestWeakAnimalGreetingQuery$data, + variables: RelayResolverInterfaceTestWeakAnimalGreetingQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "request" + } +], +v1 = [ + { + "kind": "Variable", + "name": "request", + "variableName": "request" + } +], +v2 = [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } +]; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverInterfaceTestWeakAnimalGreetingQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": null, + "modelResolvers": null, + "backingField": { + "alias": null, + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "name": "weak_animal", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/WeakAnimalQueryResolvers').weak_animal, + "path": "weak_animal", + "normalizationInfo": { + "kind": "OutputType", + "concreteType": null, + "plural": false, + "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__weak_animal$normalization.graphql') + } + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "weak_animal", + "plural": false, + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "PurpleOctopus____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "greeting", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/PurpleOctopus____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/WeakAnimalQueryResolvers').greeting, '__relay_model_instance', true), + "path": "weak_animal.greeting" + } + ], + "type": "PurpleOctopus", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "RedOctopus____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "greeting", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/RedOctopus____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/WeakAnimalQueryResolvers').greeting, '__relay_model_instance', true), + "path": "weak_animal.greeting" + } + ], + "type": "RedOctopus", + "abstractKey": null + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResolverInterfaceTestWeakAnimalGreetingQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "weak_animal", + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "weak_animal", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "greeting", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v2/*: any*/), + "type": "PurpleOctopus", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "PurpleOctopus", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "name": "greeting", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": (v2/*: any*/), + "type": "RedOctopus", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "RedOctopus", + "abstractKey": null + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "5394312c92d4f62346e896c932509875", + "id": null, + "metadata": {}, + "name": "RelayResolverInterfaceTestWeakAnimalGreetingQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "f871e0ce246bae6f8a3048e352bf8bd2"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverInterfaceTestWeakAnimalGreetingQuery$variables, + RelayResolverInterfaceTestWeakAnimalGreetingQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestFieldWithArgumentsQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestFieldWithArgumentsQuery.graphql.js new file mode 100644 index 0000000000000..f6e27346ffa4d --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestFieldWithArgumentsQuery.graphql.js @@ -0,0 +1,307 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<9fe98e9b83fbd827f850f1e30deb73bd>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { TodoDescription____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql"; +import type { TodoModel____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql"; +import {todo_model as queryTodoModelResolverType} from "../../../relay-runtime/store/__tests__/resolvers/QueryTodoModel.js"; +// Type assertion validating that `queryTodoModelResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryTodoModelResolverType: ( + args: {| + todoID: string, + |}, +) => ?{| + +id: DataID, +|}); +import {text_with_prefix as todoDescriptionTextWithPrefixResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; +// Type assertion validating that `todoDescriptionTextWithPrefixResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoDescriptionTextWithPrefixResolverType: ( + __relay_model_instance: TodoDescription____relay_model_instance$data['__relay_model_instance'], + args: {| + prefix: string, + |}, +) => ?string); +import {fancy_description as todoModelFancyDescriptionResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoModel.js"; +// Type assertion validating that `todoModelFancyDescriptionResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoModelFancyDescriptionResolverType: ( + __relay_model_instance: TodoModel____relay_model_instance$data['__relay_model_instance'], +) => ?TodoDescription); +import type { TodoDescription } from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; +export type RelayResolverModelTestFieldWithArgumentsQuery$variables = {| + id: string, +|}; +export type RelayResolverModelTestFieldWithArgumentsQuery$data = {| + +todo_model: ?{| + +fancy_description: ?{| + +text_with_prefix: ?string, + |}, + |}, +|}; +export type RelayResolverModelTestFieldWithArgumentsQuery = {| + response: RelayResolverModelTestFieldWithArgumentsQuery$data, + variables: RelayResolverModelTestFieldWithArgumentsQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "todoID", + "variableName": "id" + } +], +v2 = [ + { + "kind": "Literal", + "name": "prefix", + "value": "[x]" + } +], +v3 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverModelTestFieldWithArgumentsQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoModel", + "modelResolvers": { + "TodoModel": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel__id" + }, + "kind": "RelayLiveResolver", + "name": "todo_model", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').TodoModel, 'id', true), + "path": "todo_model.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "name": "todo_model", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/QueryTodoModel').todo_model, + "path": "todo_model" + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "todo_model", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoDescription", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "fancy_description", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').fancy_description, '__relay_model_instance', true), + "path": "todo_model.fancy_description", + "normalizationInfo": { + "kind": "WeakModel", + "concreteType": "TodoDescription", + "plural": false + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoDescription", + "kind": "LinkedField", + "name": "fancy_description", + "plural": false, + "selections": [ + { + "alias": null, + "args": (v2/*: any*/), + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoDescription____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "text_with_prefix", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').text_with_prefix, '__relay_model_instance', true), + "path": "todo_model.fancy_description.text_with_prefix" + } + ], + "storageKey": null + } + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResolverModelTestFieldWithArgumentsQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "todo_model", + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "todo_model", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "fancy_description", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + (v3/*: any*/) + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoDescription", + "kind": "LinkedField", + "name": "fancy_description", + "plural": false, + "selections": [ + { + "name": "text_with_prefix", + "args": (v2/*: any*/), + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ], + "type": "TodoDescription", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": "text_with_prefix(prefix:\"[x]\")", + "isOutputType": true + } + ], + "storageKey": null + } + }, + (v3/*: any*/) + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "b10312a5cde1a4ef8e38b4b474a45635", + "id": null, + "metadata": {}, + "name": "RelayResolverModelTestFieldWithArgumentsQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "d0daaf4fcd4eaf3bce605aa4775fff4a"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverModelTestFieldWithArgumentsQuery$variables, + RelayResolverModelTestFieldWithArgumentsQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestFieldWithRootFragmentLegacyQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestFieldWithRootFragmentLegacyQuery.graphql.js new file mode 100644 index 0000000000000..96c4ff70f516b --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestFieldWithRootFragmentLegacyQuery.graphql.js @@ -0,0 +1,204 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { TodoModelCapitalizedIDLegacy$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModelCapitalizedIDLegacy.graphql"; +import {todo_model as queryTodoModelResolverType} from "../../../relay-runtime/store/__tests__/resolvers/QueryTodoModel.js"; +// Type assertion validating that `queryTodoModelResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryTodoModelResolverType: ( + args: {| + todoID: string, + |}, +) => ?{| + +id: DataID, +|}); +import {capitalized_id_legacy as todoModelCapitalizedIdLegacyResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoModel.js"; +// Type assertion validating that `todoModelCapitalizedIdLegacyResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoModelCapitalizedIdLegacyResolverType: ( + rootKey: TodoModelCapitalizedIDLegacy$key, +) => ?string); +export type RelayResolverModelTestFieldWithRootFragmentLegacyQuery$variables = {| + id: string, +|}; +export type RelayResolverModelTestFieldWithRootFragmentLegacyQuery$data = {| + +todo_model: ?{| + +capitalized_id_legacy: ?string, + |}, +|}; +export type RelayResolverModelTestFieldWithRootFragmentLegacyQuery = {| + response: RelayResolverModelTestFieldWithRootFragmentLegacyQuery$data, + variables: RelayResolverModelTestFieldWithRootFragmentLegacyQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "todoID", + "variableName": "id" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverModelTestFieldWithRootFragmentLegacyQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoModel", + "modelResolvers": { + "TodoModel": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel__id" + }, + "kind": "RelayLiveResolver", + "name": "todo_model", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').TodoModel, 'id', true), + "path": "todo_model.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "name": "todo_model", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/QueryTodoModel').todo_model, + "path": "todo_model" + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "todo_model", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModelCapitalizedIDLegacy" + }, + "kind": "RelayResolver", + "name": "capitalized_id_legacy", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').capitalized_id_legacy, + "path": "todo_model.capitalized_id_legacy" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResolverModelTestFieldWithRootFragmentLegacyQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "todo_model", + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "todo_model", + "plural": false, + "selections": [ + { + "name": "capitalized_id_legacy", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + (v2/*: any*/) + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + (v2/*: any*/) + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "f3f20658365422d00657b857562271d6", + "id": null, + "metadata": {}, + "name": "RelayResolverModelTestFieldWithRootFragmentLegacyQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "6f12681d0454d3c2f6c9ecf245085a85"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverModelTestFieldWithRootFragmentLegacyQuery$variables, + RelayResolverModelTestFieldWithRootFragmentLegacyQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestFieldWithRootFragmentQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestFieldWithRootFragmentQuery.graphql.js new file mode 100644 index 0000000000000..0bae041300b8e --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestFieldWithRootFragmentQuery.graphql.js @@ -0,0 +1,204 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { TodoModelCapitalizedID$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModelCapitalizedID.graphql"; +import {todo_model as queryTodoModelResolverType} from "../../../relay-runtime/store/__tests__/resolvers/QueryTodoModel.js"; +// Type assertion validating that `queryTodoModelResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryTodoModelResolverType: ( + args: {| + todoID: string, + |}, +) => ?{| + +id: DataID, +|}); +import {capitalized_id as todoModelCapitalizedIdResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoModel.js"; +// Type assertion validating that `todoModelCapitalizedIdResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoModelCapitalizedIdResolverType: ( + rootKey: TodoModelCapitalizedID$key, +) => ?string); +export type RelayResolverModelTestFieldWithRootFragmentQuery$variables = {| + id: string, +|}; +export type RelayResolverModelTestFieldWithRootFragmentQuery$data = {| + +todo_model: ?{| + +capitalized_id: ?string, + |}, +|}; +export type RelayResolverModelTestFieldWithRootFragmentQuery = {| + response: RelayResolverModelTestFieldWithRootFragmentQuery$data, + variables: RelayResolverModelTestFieldWithRootFragmentQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "todoID", + "variableName": "id" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverModelTestFieldWithRootFragmentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoModel", + "modelResolvers": { + "TodoModel": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel__id" + }, + "kind": "RelayLiveResolver", + "name": "todo_model", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').TodoModel, 'id', true), + "path": "todo_model.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "name": "todo_model", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/QueryTodoModel').todo_model, + "path": "todo_model" + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "todo_model", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModelCapitalizedID" + }, + "kind": "RelayResolver", + "name": "capitalized_id", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').capitalized_id, + "path": "todo_model.capitalized_id" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResolverModelTestFieldWithRootFragmentQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "todo_model", + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "todo_model", + "plural": false, + "selections": [ + { + "name": "capitalized_id", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + (v2/*: any*/) + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + (v2/*: any*/) + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "2fb513931cd5c8f2e4c1ec5a01ed6081", + "id": null, + "metadata": {}, + "name": "RelayResolverModelTestFieldWithRootFragmentQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "63d4f2846eb83fcbc57a5ce5abea136b"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverModelTestFieldWithRootFragmentQuery$variables, + RelayResolverModelTestFieldWithRootFragmentQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestFragment.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestFragment.graphql.js index ad8a1f2abe372..18a56e4cc98b1 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestFragment.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<6e65673ce9a8489063228677cb3a8ba1>> + * @generated SignedSource<<9ae3d9fb138363a3b1b1424a3e460aec>> * @flow * @lightSyntaxTransform * @nogrep @@ -20,7 +20,6 @@ import type { Fragment, ReaderFragment } from 'relay-runtime'; import type { TodoDescription____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql"; import type { TodoModel____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql"; -import type { TodoModel__fancy_description$normalization } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__fancy_description$normalization.graphql"; import type { TodoDescription } from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; import type { FragmentType } from "relay-runtime"; import {color as todoDescriptionColorResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; @@ -28,7 +27,7 @@ import {color as todoDescriptionColorResolverType} from "../../../relay-runtime/ // A type error here indicates that the type signature of the resolver module is incorrect. (todoDescriptionColorResolverType: ( __relay_model_instance: TodoDescription____relay_model_instance$data['__relay_model_instance'], -) => mixed); +) => ?mixed); import {text as todoDescriptionTextResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; // Type assertion validating that `todoDescriptionTextResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. @@ -44,7 +43,7 @@ import {fancy_description as todoModelFancyDescriptionResolverType} from "../../ declare export opaque type RelayResolverModelTestFragment$fragmentType: FragmentType; export type RelayResolverModelTestFragment$data = {| +fancy_description: ?{| - +color: ?$Call<((...empty[]) => R) => R, typeof todoDescriptionColorResolverType>, + +color: ?ReturnType, +text: ?string, |}, +id: string, @@ -74,6 +73,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "TodoDescription", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -84,12 +84,12 @@ return { }, "kind": "RelayResolver", "name": "fancy_description", - "resolverModule": require('relay-runtime/experimental').weakObjectWrapper(require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').fancy_description, '__relay_model_instance', false), '__relay_model_instance', false), + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').fancy_description, '__relay_model_instance', true), "path": "fancy_description", "normalizationInfo": { + "kind": "WeakModel", "concreteType": "TodoDescription", - "plural": false, - "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__fancy_description$normalization.graphql') + "plural": false } }, "linkedField": { @@ -106,8 +106,8 @@ return { "fragment": (v0/*: any*/), "kind": "RelayResolver", "name": "text", - "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').text, '__relay_model_instance', false), - "path": "text" + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').text, '__relay_model_instance', true), + "path": "fancy_description.text" }, { "alias": null, @@ -115,8 +115,8 @@ return { "fragment": (v0/*: any*/), "kind": "RelayResolver", "name": "color", - "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').color, '__relay_model_instance', false), - "path": "color" + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').color, '__relay_model_instance', true), + "path": "fancy_description.color" } ], "storageKey": null diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestGetMutableEntityQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestGetMutableEntityQuery.graphql.js new file mode 100644 index 0000000000000..3450f5930dad5 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestGetMutableEntityQuery.graphql.js @@ -0,0 +1,99 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<37153b011eadba493360d4d9d22f7598>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { LiveState } from "relay-runtime"; +import {mutable_entity as queryMutableEntityResolverType} from "../../../relay-runtime/store/__tests__/resolvers/MutableModel.js"; +// Type assertion validating that `queryMutableEntityResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryMutableEntityResolverType: () => LiveState); +export type RelayResolverModelTestGetMutableEntityQuery$variables = {||}; +export type RelayResolverModelTestGetMutableEntityQuery$data = {| + +mutable_entity: ?ReturnType["read"]>, +|}; +export type RelayResolverModelTestGetMutableEntityQuery = {| + response: RelayResolverModelTestGetMutableEntityQuery$data, + variables: RelayResolverModelTestGetMutableEntityQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayResolverModelTestGetMutableEntityQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayLiveResolver", + "name": "mutable_entity", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/MutableModel').mutable_entity, + "path": "mutable_entity" + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverModelTestGetMutableEntityQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "mutable_entity", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ] + } + ] + }, + "params": { + "cacheID": "aa33fbf58d2c2c1640de7da7280d2f2e", + "id": null, + "metadata": {}, + "name": "RelayResolverModelTestGetMutableEntityQuery", + "operationKind": "query", + "text": null + } +}; + +if (__DEV__) { + (node/*: any*/).hash = "bd6186904ff5b69591c6929ee7f72aa4"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverModelTestGetMutableEntityQuery$variables, + RelayResolverModelTestGetMutableEntityQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestInterfaceFragment.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestInterfaceFragment.graphql.js index 67eebd4faf045..1a67f464cdb65 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestInterfaceFragment.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestInterfaceFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<2e452547adee3384123090730327ab6b>> + * @generated SignedSource<<7b2beed5937c4289acdd306ff8af83e4>> * @flow * @lightSyntaxTransform * @nogrep @@ -22,7 +22,6 @@ import type { TodoDescription____relay_model_instance$data } from "./../../../re import type { TodoModel____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql"; import type { TodoDescription__some_client_type_with_interface$normalization } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription__some_client_type_with_interface$normalization.graphql"; import type { TodoDescription__some_interface$normalization } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription__some_interface$normalization.graphql"; -import type { TodoModel__fancy_description$normalization } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__fancy_description$normalization.graphql"; import type { TodoDescription } from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; import type { FragmentType } from "relay-runtime"; import {some_client_type_with_interface as todoDescriptionSomeClientTypeWithInterfaceResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; @@ -46,13 +45,13 @@ import {fancy_description as todoModelFancyDescriptionResolverType} from "../../ declare export opaque type RelayResolverModelTestInterfaceFragment$fragmentType: FragmentType; export type RelayResolverModelTestInterfaceFragment$data = {| +fancy_description: ?{| - +some_client_type_with_interface: {| + +some_client_type_with_interface: ?{| +client_interface: {| +__typename: string, +description: ?string, |}, |}, - +some_interface: {| + +some_interface: ?{| +__typename: string, +description: ?string, |}, @@ -99,6 +98,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "TodoDescription", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -109,12 +109,12 @@ return { }, "kind": "RelayResolver", "name": "fancy_description", - "resolverModule": require('relay-runtime/experimental').weakObjectWrapper(require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').fancy_description, '__relay_model_instance', false), '__relay_model_instance', false), + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').fancy_description, '__relay_model_instance', true), "path": "fancy_description", "normalizationInfo": { + "kind": "WeakModel", "concreteType": "TodoDescription", - "plural": false, - "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__fancy_description$normalization.graphql') + "plural": false } }, "linkedField": { @@ -128,15 +128,17 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": null, + "modelResolvers": null, "backingField": { "alias": null, "args": null, "fragment": (v0/*: any*/), "kind": "RelayResolver", "name": "some_interface", - "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').some_interface, '__relay_model_instance', false), - "path": "some_interface", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').some_interface, '__relay_model_instance', true), + "path": "fancy_description.some_interface", "normalizationInfo": { + "kind": "OutputType", "concreteType": null, "plural": false, "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription__some_interface$normalization.graphql') @@ -156,15 +158,17 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "ClientTypeWithNestedClientInterface", + "modelResolvers": null, "backingField": { "alias": null, "args": null, "fragment": (v0/*: any*/), "kind": "RelayResolver", "name": "some_client_type_with_interface", - "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').some_client_type_with_interface, '__relay_model_instance', false), - "path": "some_client_type_with_interface", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').some_client_type_with_interface, '__relay_model_instance', true), + "path": "fancy_description.some_client_type_with_interface", "normalizationInfo": { + "kind": "OutputType", "concreteType": "ClientTypeWithNestedClientInterface", "plural": false, "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription__some_client_type_with_interface$normalization.graphql') diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestNullWeakClientEdgeQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestNullWeakClientEdgeQuery.graphql.js new file mode 100644 index 0000000000000..09c307a7f3e4e --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestNullWeakClientEdgeQuery.graphql.js @@ -0,0 +1,307 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<3b405f2f5d45372047ed472a6ba2a316>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { TodoDescription____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql"; +import type { TodoModel____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql"; +import {todo_model as queryTodoModelResolverType} from "../../../relay-runtime/store/__tests__/resolvers/QueryTodoModel.js"; +// Type assertion validating that `queryTodoModelResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryTodoModelResolverType: ( + args: {| + todoID: string, + |}, +) => ?{| + +id: DataID, +|}); +import {text_with_prefix as todoDescriptionTextWithPrefixResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; +// Type assertion validating that `todoDescriptionTextWithPrefixResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoDescriptionTextWithPrefixResolverType: ( + __relay_model_instance: TodoDescription____relay_model_instance$data['__relay_model_instance'], + args: {| + prefix: string, + |}, +) => ?string); +import {fancy_description_null as todoModelFancyDescriptionNullResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoModel.js"; +// Type assertion validating that `todoModelFancyDescriptionNullResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoModelFancyDescriptionNullResolverType: ( + __relay_model_instance: TodoModel____relay_model_instance$data['__relay_model_instance'], +) => ?TodoDescription); +import type { TodoDescription } from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; +export type RelayResolverModelTestNullWeakClientEdgeQuery$variables = {| + id: string, +|}; +export type RelayResolverModelTestNullWeakClientEdgeQuery$data = {| + +todo_model: ?{| + +fancy_description_null: ?{| + +text_with_prefix: ?string, + |}, + |}, +|}; +export type RelayResolverModelTestNullWeakClientEdgeQuery = {| + response: RelayResolverModelTestNullWeakClientEdgeQuery$data, + variables: RelayResolverModelTestNullWeakClientEdgeQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "todoID", + "variableName": "id" + } +], +v2 = [ + { + "kind": "Literal", + "name": "prefix", + "value": "[x]" + } +], +v3 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverModelTestNullWeakClientEdgeQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoModel", + "modelResolvers": { + "TodoModel": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel__id" + }, + "kind": "RelayLiveResolver", + "name": "todo_model", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').TodoModel, 'id', true), + "path": "todo_model.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "name": "todo_model", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/QueryTodoModel').todo_model, + "path": "todo_model" + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "todo_model", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoDescription", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "fancy_description_null", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').fancy_description_null, '__relay_model_instance', true), + "path": "todo_model.fancy_description_null", + "normalizationInfo": { + "kind": "WeakModel", + "concreteType": "TodoDescription", + "plural": false + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoDescription", + "kind": "LinkedField", + "name": "fancy_description_null", + "plural": false, + "selections": [ + { + "alias": null, + "args": (v2/*: any*/), + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoDescription____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "text_with_prefix", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').text_with_prefix, '__relay_model_instance', true), + "path": "todo_model.fancy_description_null.text_with_prefix" + } + ], + "storageKey": null + } + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResolverModelTestNullWeakClientEdgeQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "todo_model", + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "todo_model", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "fancy_description_null", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + (v3/*: any*/) + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoDescription", + "kind": "LinkedField", + "name": "fancy_description_null", + "plural": false, + "selections": [ + { + "name": "text_with_prefix", + "args": (v2/*: any*/), + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ], + "type": "TodoDescription", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": "text_with_prefix(prefix:\"[x]\")", + "isOutputType": true + } + ], + "storageKey": null + } + }, + (v3/*: any*/) + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "0b15928ebb522ddec3b6c25a60175167", + "id": null, + "metadata": {}, + "name": "RelayResolverModelTestNullWeakClientEdgeQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "0c8b4fe8a8cff6b95781f5ad618fb7f0"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverModelTestNullWeakClientEdgeQuery$variables, + RelayResolverModelTestNullWeakClientEdgeQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestSuspendedWeakClientEdgeQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestSuspendedWeakClientEdgeQuery.graphql.js new file mode 100644 index 0000000000000..3a94d80037486 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestSuspendedWeakClientEdgeQuery.graphql.js @@ -0,0 +1,307 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<430ac2bc4f448133a1efd70b6d1760ff>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { LiveState, DataID } from "relay-runtime"; +import type { TodoDescription____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql"; +import type { TodoModel____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql"; +import {todo_model as queryTodoModelResolverType} from "../../../relay-runtime/store/__tests__/resolvers/QueryTodoModel.js"; +// Type assertion validating that `queryTodoModelResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryTodoModelResolverType: ( + args: {| + todoID: string, + |}, +) => ?{| + +id: DataID, +|}); +import {text_with_prefix as todoDescriptionTextWithPrefixResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; +// Type assertion validating that `todoDescriptionTextWithPrefixResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoDescriptionTextWithPrefixResolverType: ( + __relay_model_instance: TodoDescription____relay_model_instance$data['__relay_model_instance'], + args: {| + prefix: string, + |}, +) => ?string); +import {fancy_description_suspends as todoModelFancyDescriptionSuspendsResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoModel.js"; +// Type assertion validating that `todoModelFancyDescriptionSuspendsResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoModelFancyDescriptionSuspendsResolverType: ( + __relay_model_instance: TodoModel____relay_model_instance$data['__relay_model_instance'], +) => LiveState); +import type { TodoDescription } from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; +export type RelayResolverModelTestSuspendedWeakClientEdgeQuery$variables = {| + id: string, +|}; +export type RelayResolverModelTestSuspendedWeakClientEdgeQuery$data = {| + +todo_model: ?{| + +fancy_description_suspends: ?{| + +text_with_prefix: ?string, + |}, + |}, +|}; +export type RelayResolverModelTestSuspendedWeakClientEdgeQuery = {| + response: RelayResolverModelTestSuspendedWeakClientEdgeQuery$data, + variables: RelayResolverModelTestSuspendedWeakClientEdgeQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "todoID", + "variableName": "id" + } +], +v2 = [ + { + "kind": "Literal", + "name": "prefix", + "value": "[x]" + } +], +v3 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverModelTestSuspendedWeakClientEdgeQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoModel", + "modelResolvers": { + "TodoModel": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel__id" + }, + "kind": "RelayLiveResolver", + "name": "todo_model", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').TodoModel, 'id', true), + "path": "todo_model.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "name": "todo_model", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/QueryTodoModel').todo_model, + "path": "todo_model" + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "todo_model", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoDescription", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel____relay_model_instance" + }, + "kind": "RelayLiveResolver", + "name": "fancy_description_suspends", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').fancy_description_suspends, '__relay_model_instance', true), + "path": "todo_model.fancy_description_suspends", + "normalizationInfo": { + "kind": "WeakModel", + "concreteType": "TodoDescription", + "plural": false + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoDescription", + "kind": "LinkedField", + "name": "fancy_description_suspends", + "plural": false, + "selections": [ + { + "alias": null, + "args": (v2/*: any*/), + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoDescription____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "text_with_prefix", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').text_with_prefix, '__relay_model_instance', true), + "path": "todo_model.fancy_description_suspends.text_with_prefix" + } + ], + "storageKey": null + } + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResolverModelTestSuspendedWeakClientEdgeQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "todo_model", + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "todo_model", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "fancy_description_suspends", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + (v3/*: any*/) + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoDescription", + "kind": "LinkedField", + "name": "fancy_description_suspends", + "plural": false, + "selections": [ + { + "name": "text_with_prefix", + "args": (v2/*: any*/), + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ], + "type": "TodoDescription", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": "text_with_prefix(prefix:\"[x]\")", + "isOutputType": true + } + ], + "storageKey": null + } + }, + (v3/*: any*/) + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "c2dfd6dff420802199109b2dc95c47d8", + "id": null, + "metadata": {}, + "name": "RelayResolverModelTestSuspendedWeakClientEdgeQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "c111f7607023b81dcb652418f91c61df"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverModelTestSuspendedWeakClientEdgeQuery$variables, + RelayResolverModelTestSuspendedWeakClientEdgeQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoNullQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoNullQuery.graphql.js index c5d7feb0ca0e9..050fe1ee948b5 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoNullQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoNullQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<6bee1dc1952c6faf691f5addcb7012a1>> + * @generated SignedSource<<4daaee3d43416e3b43ccb215acf9fe4f>> * @flow * @lightSyntaxTransform * @nogrep @@ -68,6 +68,21 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "TodoModel", + "modelResolvers": { + "TodoModel": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel__id" + }, + "kind": "RelayLiveResolver", + "name": "todo_model_null", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').TodoModel, 'id', true), + "path": "todo_model_null.__relay_model_instance" + } + }, "backingField": { "alias": null, "args": null, diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoQuery.graphql.js index 427d93a2173b1..cdd6045368712 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<07884f090fba4c63975607fe481015b0>> * @flow * @lightSyntaxTransform * @nogrep @@ -92,6 +92,21 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "TodoModel", + "modelResolvers": { + "TodoModel": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel__id" + }, + "kind": "RelayLiveResolver", + "name": "todo_model", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').TodoModel, 'id', true), + "path": "todo_model.__relay_model_instance" + } + }, "backingField": { "alias": null, "args": (v1/*: any*/), diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoWithInterfaceQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoWithInterfaceQuery.graphql.js index c2534ecfda1c6..722df43d8324f 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoWithInterfaceQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoWithInterfaceQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -108,6 +108,21 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "TodoModel", + "modelResolvers": { + "TodoModel": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel__id" + }, + "kind": "RelayLiveResolver", + "name": "todo_model", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').TodoModel, 'id', true), + "path": "todo_model.__relay_model_instance" + } + }, "backingField": { "alias": null, "args": (v1/*: any*/), diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoWithNullablePluralFieldQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoWithNullablePluralFieldQuery.graphql.js new file mode 100644 index 0000000000000..b7a4e2904f8c6 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoWithNullablePluralFieldQuery.graphql.js @@ -0,0 +1,297 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<52f419e56f3be155c39f69eb0b45b876>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { TodoDescription____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql"; +import type { TodoModel____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql"; +import {todo_model as queryTodoModelResolverType} from "../../../relay-runtime/store/__tests__/resolvers/QueryTodoModel.js"; +// Type assertion validating that `queryTodoModelResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryTodoModelResolverType: ( + args: {| + todoID: string, + |}, +) => ?{| + +id: DataID, +|}); +import {text as todoDescriptionTextResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; +// Type assertion validating that `todoDescriptionTextResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoDescriptionTextResolverType: ( + __relay_model_instance: TodoDescription____relay_model_instance$data['__relay_model_instance'], +) => ?string); +import {many_fancy_descriptions_but_some_are_null as todoModelManyFancyDescriptionsButSomeAreNullResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoModel.js"; +// Type assertion validating that `todoModelManyFancyDescriptionsButSomeAreNullResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoModelManyFancyDescriptionsButSomeAreNullResolverType: ( + __relay_model_instance: TodoModel____relay_model_instance$data['__relay_model_instance'], +) => ?$ReadOnlyArray); +import type { TodoDescription } from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; +export type RelayResolverModelTestTodoWithNullablePluralFieldQuery$variables = {| + id: string, +|}; +export type RelayResolverModelTestTodoWithNullablePluralFieldQuery$data = {| + +todo_model: ?{| + +many_fancy_descriptions_but_some_are_null: ?$ReadOnlyArray, + |}, +|}; +export type RelayResolverModelTestTodoWithNullablePluralFieldQuery = {| + response: RelayResolverModelTestTodoWithNullablePluralFieldQuery$data, + variables: RelayResolverModelTestTodoWithNullablePluralFieldQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "todoID", + "variableName": "id" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverModelTestTodoWithNullablePluralFieldQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoModel", + "modelResolvers": { + "TodoModel": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel__id" + }, + "kind": "RelayLiveResolver", + "name": "todo_model", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').TodoModel, 'id', true), + "path": "todo_model.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "name": "todo_model", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/QueryTodoModel').todo_model, + "path": "todo_model" + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "todo_model", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoDescription", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "many_fancy_descriptions_but_some_are_null", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').many_fancy_descriptions_but_some_are_null, '__relay_model_instance', true), + "path": "todo_model.many_fancy_descriptions_but_some_are_null", + "normalizationInfo": { + "kind": "WeakModel", + "concreteType": "TodoDescription", + "plural": true + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoDescription", + "kind": "LinkedField", + "name": "many_fancy_descriptions_but_some_are_null", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoDescription____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "text", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').text, '__relay_model_instance', true), + "path": "todo_model.many_fancy_descriptions_but_some_are_null.text" + } + ], + "storageKey": null + } + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResolverModelTestTodoWithNullablePluralFieldQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "todo_model", + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "todo_model", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "many_fancy_descriptions_but_some_are_null", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + (v2/*: any*/) + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoDescription", + "kind": "LinkedField", + "name": "many_fancy_descriptions_but_some_are_null", + "plural": true, + "selections": [ + { + "name": "text", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ], + "type": "TodoDescription", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "storageKey": null + } + }, + (v2/*: any*/) + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "39e6653a694196b5a328171cb82b3478", + "id": null, + "metadata": {}, + "name": "RelayResolverModelTestTodoWithNullablePluralFieldQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "d015c8ed1077211af8500b2437094101"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverModelTestTodoWithNullablePluralFieldQuery$variables, + RelayResolverModelTestTodoWithNullablePluralFieldQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoWithPluralFieldQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoWithPluralFieldQuery.graphql.js index a8ab8ebec57a6..f0e334ce1f266 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoWithPluralFieldQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestTodoWithPluralFieldQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<43becbbdb2d6673ea915d42969d9e7c1>> * @flow * @lightSyntaxTransform * @nogrep @@ -92,6 +92,21 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "TodoModel", + "modelResolvers": { + "TodoModel": { + "alias": null, + "args": [], + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel__id" + }, + "kind": "RelayLiveResolver", + "name": "todo_model", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').TodoModel, 'id', true), + "path": "todo_model.__relay_model_instance" + } + }, "backingField": { "alias": null, "args": (v1/*: any*/), diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestWeakLiveColorFieldQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestWeakLiveColorFieldQuery.graphql.js new file mode 100644 index 0000000000000..e137504865340 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestWeakLiveColorFieldQuery.graphql.js @@ -0,0 +1,218 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<9cfecd71716f026151b9a16052a81992>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { LiveState } from "relay-runtime"; +import type { TodoDescription____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql"; +import {live_todo_description as queryLiveTodoDescriptionResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoModel.js"; +// Type assertion validating that `queryLiveTodoDescriptionResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryLiveTodoDescriptionResolverType: ( + args: {| + todoID: string, + |}, +) => LiveState); +import {live_color as todoDescriptionLiveColorResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; +// Type assertion validating that `todoDescriptionLiveColorResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoDescriptionLiveColorResolverType: ( + __relay_model_instance: TodoDescription____relay_model_instance$data['__relay_model_instance'], +) => LiveState); +import {text as todoDescriptionTextResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; +// Type assertion validating that `todoDescriptionTextResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoDescriptionTextResolverType: ( + __relay_model_instance: TodoDescription____relay_model_instance$data['__relay_model_instance'], +) => ?string); +import type { TodoDescription } from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; +export type RelayResolverModelTestWeakLiveColorFieldQuery$variables = {| + id: string, +|}; +export type RelayResolverModelTestWeakLiveColorFieldQuery$data = {| + +live_todo_description: ?{| + +live_color: ?ReturnType["read"]>, + +text: ?string, + |}, +|}; +export type RelayResolverModelTestWeakLiveColorFieldQuery = {| + response: RelayResolverModelTestWeakLiveColorFieldQuery$data, + variables: RelayResolverModelTestWeakLiveColorFieldQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "todoID", + "variableName": "id" + } +], +v2 = { + "args": null, + "kind": "FragmentSpread", + "name": "TodoDescription____relay_model_instance" +}, +v3 = { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ], + "type": "TodoDescription", + "abstractKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverModelTestWeakLiveColorFieldQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoDescription", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayLiveResolver", + "name": "live_todo_description", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').live_todo_description, + "path": "live_todo_description", + "normalizationInfo": { + "kind": "WeakModel", + "concreteType": "TodoDescription", + "plural": false + } + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": "TodoDescription", + "kind": "LinkedField", + "name": "live_todo_description", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "fragment": (v2/*: any*/), + "kind": "RelayResolver", + "name": "text", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').text, '__relay_model_instance', true), + "path": "live_todo_description.text" + }, + { + "alias": null, + "args": null, + "fragment": (v2/*: any*/), + "kind": "RelayLiveResolver", + "name": "live_color", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').live_color, '__relay_model_instance', true), + "path": "live_todo_description.live_color" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResolverModelTestWeakLiveColorFieldQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "live_todo_description", + "args": (v1/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + "linkedField": { + "alias": null, + "args": (v1/*: any*/), + "concreteType": "TodoDescription", + "kind": "LinkedField", + "name": "live_todo_description", + "plural": false, + "selections": [ + { + "name": "text", + "args": null, + "fragment": (v3/*: any*/), + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + { + "name": "live_color", + "args": null, + "fragment": (v3/*: any*/), + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "570571bca08d9088ff64f650056a75de", + "id": null, + "metadata": {}, + "name": "RelayResolverModelTestWeakLiveColorFieldQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "b69347659fd3f00f15e9eae940b57f60"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverModelTestWeakLiveColorFieldQuery$variables, + RelayResolverModelTestWeakLiveColorFieldQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestWeakLiveFieldQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestWeakLiveFieldQuery.graphql.js index 8e32de786ed99..a46807efa835d 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestWeakLiveFieldQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestWeakLiveFieldQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<3c8b15b5ebc0311c8409aa4b57597ea2>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,7 +18,7 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { TodoDescription____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql"; import {live_todo_description as queryLiveTodoDescriptionResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoModel.js"; // Type assertion validating that `queryLiveTodoDescriptionResolverType` resolver is correctly implemented. @@ -33,21 +33,20 @@ import {color as todoDescriptionColorResolverType} from "../../../relay-runtime/ // A type error here indicates that the type signature of the resolver module is incorrect. (todoDescriptionColorResolverType: ( __relay_model_instance: TodoDescription____relay_model_instance$data['__relay_model_instance'], -) => mixed); +) => ?mixed); import {text as todoDescriptionTextResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; // Type assertion validating that `todoDescriptionTextResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (todoDescriptionTextResolverType: ( __relay_model_instance: TodoDescription____relay_model_instance$data['__relay_model_instance'], ) => ?string); -import type { Query__live_todo_description$normalization } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__live_todo_description$normalization.graphql"; import type { TodoDescription } from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; export type RelayResolverModelTestWeakLiveFieldQuery$variables = {| id: string, |}; export type RelayResolverModelTestWeakLiveFieldQuery$data = {| +live_todo_description: ?{| - +color: ?$Call<((...empty[]) => R) => R, typeof todoDescriptionColorResolverType>, + +color: ?ReturnType, +text: ?string, |}, |}; @@ -103,18 +102,19 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "TodoDescription", + "modelResolvers": null, "backingField": { "alias": null, "args": (v1/*: any*/), "fragment": null, "kind": "RelayLiveResolver", "name": "live_todo_description", - "resolverModule": require('relay-runtime/experimental').weakObjectWrapperLive(require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').live_todo_description, '__relay_model_instance', false), + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').live_todo_description, "path": "live_todo_description", "normalizationInfo": { + "kind": "WeakModel", "concreteType": "TodoDescription", - "plural": false, - "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__live_todo_description$normalization.graphql') + "plural": false } }, "linkedField": { @@ -131,8 +131,8 @@ return { "fragment": (v2/*: any*/), "kind": "RelayResolver", "name": "text", - "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').text, '__relay_model_instance', false), - "path": "text" + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').text, '__relay_model_instance', true), + "path": "live_todo_description.text" }, { "alias": null, @@ -140,8 +140,8 @@ return { "fragment": (v2/*: any*/), "kind": "RelayResolver", "name": "color", - "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').color, '__relay_model_instance', false), - "path": "color" + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').color, '__relay_model_instance', true), + "path": "live_todo_description.color" } ], "storageKey": null diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestWithPluralFragment.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestWithPluralFragment.graphql.js index 343506942bab9..1824b98c093ae 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolverModelTestWithPluralFragment.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolverModelTestWithPluralFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<459a36b2d310d3035d012e95778606b6>> * @flow * @lightSyntaxTransform * @nogrep @@ -20,7 +20,6 @@ import type { Fragment, ReaderFragment } from 'relay-runtime'; import type { TodoDescription____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql"; import type { TodoModel____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql"; -import type { TodoModel__many_fancy_descriptions$normalization } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__many_fancy_descriptions$normalization.graphql"; import type { TodoDescription } from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; import type { FragmentType } from "relay-runtime"; import {color as todoDescriptionColorResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; @@ -28,7 +27,7 @@ import {color as todoDescriptionColorResolverType} from "../../../relay-runtime/ // A type error here indicates that the type signature of the resolver module is incorrect. (todoDescriptionColorResolverType: ( __relay_model_instance: TodoDescription____relay_model_instance$data['__relay_model_instance'], -) => mixed); +) => ?mixed); import {text as todoDescriptionTextResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; // Type assertion validating that `todoDescriptionTextResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. @@ -40,11 +39,11 @@ import {many_fancy_descriptions as todoModelManyFancyDescriptionsResolverType} f // A type error here indicates that the type signature of the resolver module is incorrect. (todoModelManyFancyDescriptionsResolverType: ( __relay_model_instance: TodoModel____relay_model_instance$data['__relay_model_instance'], -) => $ReadOnlyArray); +) => ?$ReadOnlyArray); declare export opaque type RelayResolverModelTestWithPluralFragment$fragmentType: FragmentType; export type RelayResolverModelTestWithPluralFragment$data = {| +many_fancy_descriptions: ?$ReadOnlyArray((...empty[]) => R) => R, typeof todoDescriptionColorResolverType>, + +color: ?ReturnType, +text: ?string, |}>, +$fragmentType: RelayResolverModelTestWithPluralFragment$fragmentType, @@ -73,6 +72,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "TodoDescription", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -83,12 +83,12 @@ return { }, "kind": "RelayResolver", "name": "many_fancy_descriptions", - "resolverModule": require('relay-runtime/experimental').weakObjectWrapper(require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').many_fancy_descriptions, '__relay_model_instance', false), '__relay_model_instance', true), + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').many_fancy_descriptions, '__relay_model_instance', true), "path": "many_fancy_descriptions", "normalizationInfo": { + "kind": "WeakModel", "concreteType": "TodoDescription", - "plural": true, - "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__many_fancy_descriptions$normalization.graphql') + "plural": true } }, "linkedField": { @@ -105,8 +105,8 @@ return { "fragment": (v0/*: any*/), "kind": "RelayResolver", "name": "text", - "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').text, '__relay_model_instance', false), - "path": "text" + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').text, '__relay_model_instance', true), + "path": "many_fancy_descriptions.text" }, { "alias": null, @@ -114,8 +114,8 @@ return { "fragment": (v0/*: any*/), "kind": "RelayResolver", "name": "color", - "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').color, '__relay_model_instance', false), - "path": "color" + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').color, '__relay_model_instance', true), + "path": "many_fancy_descriptions.color" } ], "storageKey": null diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_ErrorModel_Query.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_ErrorModel_Query.graphql.js new file mode 100644 index 0000000000000..5dab775ea6d74 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_ErrorModel_Query.graphql.js @@ -0,0 +1,157 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<2a1161fc02bf8e3fcf82a67a1fc6454e>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import {edge_to_model_that_throws as queryEdgeToModelThatThrowsResolverType} from "../RelayResolverNullableModelClientEdge-test.js"; +// Type assertion validating that `queryEdgeToModelThatThrowsResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryEdgeToModelThatThrowsResolverType: () => ?{| + +id: DataID, +|}); +export type RelayResolverNullableModelClientEdgeTest_ErrorModel_Query$variables = {||}; +export type RelayResolverNullableModelClientEdgeTest_ErrorModel_Query$data = {| + +edge_to_model_that_throws: ?{| + +__typename: "ErrorModel", + |}, +|}; +export type RelayResolverNullableModelClientEdgeTest_ErrorModel_Query = {| + response: RelayResolverNullableModelClientEdgeTest_ErrorModel_Query$data, + variables: RelayResolverNullableModelClientEdgeTest_ErrorModel_Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverNullableModelClientEdgeTest_ErrorModel_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "ErrorModel", + "modelResolvers": { + "ErrorModel": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "ErrorModel__id" + }, + "kind": "RelayResolver", + "name": "edge_to_model_that_throws", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./ErrorModel__id.graphql'), require('./../RelayResolverNullableModelClientEdge-test').ErrorModel, 'id', true), + "path": "edge_to_model_that_throws.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "edge_to_model_that_throws", + "resolverModule": require('./../RelayResolverNullableModelClientEdge-test').edge_to_model_that_throws, + "path": "edge_to_model_that_throws" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "ErrorModel", + "kind": "LinkedField", + "name": "edge_to_model_that_throws", + "plural": false, + "selections": [ + (v0/*: any*/) + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverNullableModelClientEdgeTest_ErrorModel_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "edge_to_model_that_throws", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "ErrorModel", + "kind": "LinkedField", + "name": "edge_to_model_that_throws", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "02171c2b441215a36b86b1d18bed6511", + "id": null, + "metadata": {}, + "name": "RelayResolverNullableModelClientEdgeTest_ErrorModel_Query", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "ebe9a87123ffc5546caeba534e71db9b"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverNullableModelClientEdgeTest_ErrorModel_Query$variables, + RelayResolverNullableModelClientEdgeTest_ErrorModel_Query$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_LiveModel_Query.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_LiveModel_Query.graphql.js new file mode 100644 index 0000000000000..7323e846ec41d --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_LiveModel_Query.graphql.js @@ -0,0 +1,279 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<157ead33394671c4571b24fad8112407>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { TodoDescription____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql"; +import type { TodoModel____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql"; +import {edge_to_live_object_does_not_exist as queryEdgeToLiveObjectDoesNotExistResolverType} from "../RelayResolverNullableModelClientEdge-test.js"; +// Type assertion validating that `queryEdgeToLiveObjectDoesNotExistResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryEdgeToLiveObjectDoesNotExistResolverType: () => ?{| + +id: DataID, +|}); +import {text as todoDescriptionTextResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; +// Type assertion validating that `todoDescriptionTextResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoDescriptionTextResolverType: ( + __relay_model_instance: TodoDescription____relay_model_instance$data['__relay_model_instance'], +) => ?string); +import {fancy_description as todoModelFancyDescriptionResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoModel.js"; +// Type assertion validating that `todoModelFancyDescriptionResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoModelFancyDescriptionResolverType: ( + __relay_model_instance: TodoModel____relay_model_instance$data['__relay_model_instance'], +) => ?TodoDescription); +import type { TodoDescription } from "../../../relay-runtime/store/__tests__/resolvers/TodoDescription.js"; +export type RelayResolverNullableModelClientEdgeTest_LiveModel_Query$variables = {||}; +export type RelayResolverNullableModelClientEdgeTest_LiveModel_Query$data = {| + +edge_to_live_object_does_not_exist: ?{| + +fancy_description: ?{| + +text: ?string, + |}, + +id: string, + |}, +|}; +export type RelayResolverNullableModelClientEdgeTest_LiveModel_Query = {| + response: RelayResolverNullableModelClientEdgeTest_LiveModel_Query$data, + variables: RelayResolverNullableModelClientEdgeTest_LiveModel_Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverNullableModelClientEdgeTest_LiveModel_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoModel", + "modelResolvers": { + "TodoModel": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel__id" + }, + "kind": "RelayLiveResolver", + "name": "edge_to_live_object_does_not_exist", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').TodoModel, 'id', true), + "path": "edge_to_live_object_does_not_exist.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "edge_to_live_object_does_not_exist", + "resolverModule": require('./../RelayResolverNullableModelClientEdge-test').edge_to_live_object_does_not_exist, + "path": "edge_to_live_object_does_not_exist" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "edge_to_live_object_does_not_exist", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoDescription", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "fancy_description", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').fancy_description, '__relay_model_instance', true), + "path": "edge_to_live_object_does_not_exist.fancy_description", + "normalizationInfo": { + "kind": "WeakModel", + "concreteType": "TodoDescription", + "plural": false + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoDescription", + "kind": "LinkedField", + "name": "fancy_description", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoDescription____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "text", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoDescription').text, '__relay_model_instance', true), + "path": "edge_to_live_object_does_not_exist.fancy_description.text" + } + ], + "storageKey": null + } + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverNullableModelClientEdgeTest_LiveModel_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "edge_to_live_object_does_not_exist", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "edge_to_live_object_does_not_exist", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "fancy_description", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + (v0/*: any*/) + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoDescription", + "kind": "LinkedField", + "name": "fancy_description", + "plural": false, + "selections": [ + { + "name": "text", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ], + "type": "TodoDescription", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "storageKey": null + } + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "ffc0723de59b6960e0ea5a5599142177", + "id": null, + "metadata": {}, + "name": "RelayResolverNullableModelClientEdgeTest_LiveModel_Query", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "37156528a49b790efe6451531de61ea5"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverNullableModelClientEdgeTest_LiveModel_Query$variables, + RelayResolverNullableModelClientEdgeTest_LiveModel_Query$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query.graphql.js new file mode 100644 index 0000000000000..8d597eeafbe33 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query.graphql.js @@ -0,0 +1,157 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import {edge_to_plural_models_that_throw as queryEdgeToPluralModelsThatThrowResolverType} from "../RelayResolverNullableModelClientEdge-test.js"; +// Type assertion validating that `queryEdgeToPluralModelsThatThrowResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryEdgeToPluralModelsThatThrowResolverType: () => ?$ReadOnlyArray); +export type RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query$variables = {||}; +export type RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query$data = {| + +edge_to_plural_models_that_throw: ?$ReadOnlyArray, +|}; +export type RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query = {| + response: RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query$data, + variables: RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "ErrorModel", + "modelResolvers": { + "ErrorModel": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "ErrorModel__id" + }, + "kind": "RelayResolver", + "name": "edge_to_plural_models_that_throw", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./ErrorModel__id.graphql'), require('./../RelayResolverNullableModelClientEdge-test').ErrorModel, 'id', true), + "path": "edge_to_plural_models_that_throw.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "edge_to_plural_models_that_throw", + "resolverModule": require('./../RelayResolverNullableModelClientEdge-test').edge_to_plural_models_that_throw, + "path": "edge_to_plural_models_that_throw" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "ErrorModel", + "kind": "LinkedField", + "name": "edge_to_plural_models_that_throw", + "plural": true, + "selections": [ + (v0/*: any*/) + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "edge_to_plural_models_that_throw", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "ErrorModel", + "kind": "LinkedField", + "name": "edge_to_plural_models_that_throw", + "plural": true, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "c6c68a175aa2fcdcab9fc87f69489a4a", + "id": null, + "metadata": {}, + "name": "RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "29e63ae02c6f32c751824079e556f81b"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query$variables, + RelayResolverNullableModelClientEdgeTest_PluralErrorModel_Query$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_PluralLiveModelNoneExist_Query.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_PluralLiveModelNoneExist_Query.graphql.js new file mode 100644 index 0000000000000..4fb21769380b1 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_PluralLiveModelNoneExist_Query.graphql.js @@ -0,0 +1,200 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<65d2574cb80396c6b3ddcd81e452b014>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { TodoModel____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql"; +import {edge_to_plural_live_objects_none_exist as queryEdgeToPluralLiveObjectsNoneExistResolverType} from "../RelayResolverNullableModelClientEdge-test.js"; +// Type assertion validating that `queryEdgeToPluralLiveObjectsNoneExistResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryEdgeToPluralLiveObjectsNoneExistResolverType: () => ?$ReadOnlyArray); +import {description as todoModelDescriptionResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoModel.js"; +// Type assertion validating that `todoModelDescriptionResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoModelDescriptionResolverType: ( + __relay_model_instance: TodoModel____relay_model_instance$data['__relay_model_instance'], +) => ?string); +export type RelayResolverNullableModelClientEdgeTest_PluralLiveModelNoneExist_Query$variables = {||}; +export type RelayResolverNullableModelClientEdgeTest_PluralLiveModelNoneExist_Query$data = {| + +edge_to_plural_live_objects_none_exist: ?$ReadOnlyArray, +|}; +export type RelayResolverNullableModelClientEdgeTest_PluralLiveModelNoneExist_Query = {| + response: RelayResolverNullableModelClientEdgeTest_PluralLiveModelNoneExist_Query$data, + variables: RelayResolverNullableModelClientEdgeTest_PluralLiveModelNoneExist_Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverNullableModelClientEdgeTest_PluralLiveModelNoneExist_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoModel", + "modelResolvers": { + "TodoModel": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel__id" + }, + "kind": "RelayLiveResolver", + "name": "edge_to_plural_live_objects_none_exist", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').TodoModel, 'id', true), + "path": "edge_to_plural_live_objects_none_exist.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "edge_to_plural_live_objects_none_exist", + "resolverModule": require('./../RelayResolverNullableModelClientEdge-test').edge_to_plural_live_objects_none_exist, + "path": "edge_to_plural_live_objects_none_exist" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "edge_to_plural_live_objects_none_exist", + "plural": true, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "description", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').description, '__relay_model_instance', true), + "path": "edge_to_plural_live_objects_none_exist.description" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverNullableModelClientEdgeTest_PluralLiveModelNoneExist_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "edge_to_plural_live_objects_none_exist", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "edge_to_plural_live_objects_none_exist", + "plural": true, + "selections": [ + (v0/*: any*/), + { + "name": "description", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + (v0/*: any*/) + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "2cf8306cf86529bf2fddf425e1816af4", + "id": null, + "metadata": {}, + "name": "RelayResolverNullableModelClientEdgeTest_PluralLiveModelNoneExist_Query", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "99ff4eeb2e8eb3dfaed38852f3d2c70f"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverNullableModelClientEdgeTest_PluralLiveModelNoneExist_Query$variables, + RelayResolverNullableModelClientEdgeTest_PluralLiveModelNoneExist_Query$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_PluralLiveModel_Query.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_PluralLiveModel_Query.graphql.js new file mode 100644 index 0000000000000..900e1918fb6a5 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_PluralLiveModel_Query.graphql.js @@ -0,0 +1,200 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { TodoModel____relay_model_instance$data } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql"; +import {edge_to_plural_live_objects_some_exist as queryEdgeToPluralLiveObjectsSomeExistResolverType} from "../RelayResolverNullableModelClientEdge-test.js"; +// Type assertion validating that `queryEdgeToPluralLiveObjectsSomeExistResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryEdgeToPluralLiveObjectsSomeExistResolverType: () => ?$ReadOnlyArray); +import {description as todoModelDescriptionResolverType} from "../../../relay-runtime/store/__tests__/resolvers/TodoModel.js"; +// Type assertion validating that `todoModelDescriptionResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoModelDescriptionResolverType: ( + __relay_model_instance: TodoModel____relay_model_instance$data['__relay_model_instance'], +) => ?string); +export type RelayResolverNullableModelClientEdgeTest_PluralLiveModel_Query$variables = {||}; +export type RelayResolverNullableModelClientEdgeTest_PluralLiveModel_Query$data = {| + +edge_to_plural_live_objects_some_exist: ?$ReadOnlyArray, +|}; +export type RelayResolverNullableModelClientEdgeTest_PluralLiveModel_Query = {| + response: RelayResolverNullableModelClientEdgeTest_PluralLiveModel_Query$data, + variables: RelayResolverNullableModelClientEdgeTest_PluralLiveModel_Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverNullableModelClientEdgeTest_PluralLiveModel_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "TodoModel", + "modelResolvers": { + "TodoModel": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel__id" + }, + "kind": "RelayLiveResolver", + "name": "edge_to_plural_live_objects_some_exist", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__id.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').TodoModel, 'id', true), + "path": "edge_to_plural_live_objects_some_exist.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "edge_to_plural_live_objects_some_exist", + "resolverModule": require('./../RelayResolverNullableModelClientEdge-test').edge_to_plural_live_objects_some_exist, + "path": "edge_to_plural_live_objects_some_exist" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "edge_to_plural_live_objects_some_exist", + "plural": true, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoModel____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "description", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql'), require('./../../../relay-runtime/store/__tests__/resolvers/TodoModel').description, '__relay_model_instance', true), + "path": "edge_to_plural_live_objects_some_exist.description" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverNullableModelClientEdgeTest_PluralLiveModel_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "edge_to_plural_live_objects_some_exist", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoModel", + "kind": "LinkedField", + "name": "edge_to_plural_live_objects_some_exist", + "plural": true, + "selections": [ + (v0/*: any*/), + { + "name": "description", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + (v0/*: any*/) + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "TodoModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "21b0ec4e0d6526708b4a0de91391b7d7", + "id": null, + "metadata": {}, + "name": "RelayResolverNullableModelClientEdgeTest_PluralLiveModel_Query", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "abbb7292c9ca7ffab83aec05b278406b"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverNullableModelClientEdgeTest_PluralLiveModel_Query$variables, + RelayResolverNullableModelClientEdgeTest_PluralLiveModel_Query$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query.graphql.js new file mode 100644 index 0000000000000..ed88a9d9fb439 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query.graphql.js @@ -0,0 +1,139 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<998e48aa0caad3542a13a6011a8c3c9f>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import {edge_to_plural_models_some_throw as queryEdgeToPluralModelsSomeThrowResolverType} from "../RelayResolverNullableModelClientEdge-test.js"; +// Type assertion validating that `queryEdgeToPluralModelsSomeThrowResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryEdgeToPluralModelsSomeThrowResolverType: () => ?$ReadOnlyArray); +export type RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query$variables = {||}; +export type RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query$data = {| + +edge_to_plural_models_some_throw: ?$ReadOnlyArray, +|}; +export type RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query = {| + response: RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query$data, + variables: RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "concreteType": "ErrorModel", + "kind": "LinkedField", + "name": "edge_to_plural_models_some_throw", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "ErrorModel", + "modelResolvers": { + "ErrorModel": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "ErrorModel__id" + }, + "kind": "RelayResolver", + "name": "edge_to_plural_models_some_throw", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./ErrorModel__id.graphql'), require('./../RelayResolverNullableModelClientEdge-test').ErrorModel, 'id', true), + "path": "edge_to_plural_models_some_throw.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "edge_to_plural_models_some_throw", + "resolverModule": require('./../RelayResolverNullableModelClientEdge-test').edge_to_plural_models_some_throw, + "path": "edge_to_plural_models_some_throw" + }, + "linkedField": (v0/*: any*/) + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "edge_to_plural_models_some_throw", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": (v0/*: any*/) + } + ] + }, + "params": { + "cacheID": "e2e77c3bba0a918fdab741702ded9248", + "id": null, + "metadata": {}, + "name": "RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "4e34ce26b08774bc983ff3702de61c95"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query$variables, + RelayResolverNullableModelClientEdgeTest_PluralSomeErrorModel_Query$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query.graphql.js new file mode 100644 index 0000000000000..70c2ad1aa76c5 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query.graphql.js @@ -0,0 +1,117 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<2930d3741e8380d84e2c6551c5c44285>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import {edge_to_server_object_does_not_exist as queryEdgeToServerObjectDoesNotExistResolverType} from "../RelayResolverNullableModelClientEdge-test.js"; +// Type assertion validating that `queryEdgeToServerObjectDoesNotExistResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryEdgeToServerObjectDoesNotExistResolverType: () => ?{| + +id: DataID, +|}); +export type RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query$variables = {||}; +export type RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query$data = {| + +edge_to_server_object_does_not_exist: ?{| + +id: string, + |}, +|}; +export type RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query = {| + response: RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query$data, + variables: RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query", + "selections": [ + { + "kind": "ClientEdgeToServerObject", + "operation": require('./ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query_edge_to_server_object_does_not_exist.graphql'), + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "edge_to_server_object_does_not_exist", + "resolverModule": require('./../RelayResolverNullableModelClientEdge-test').edge_to_server_object_does_not_exist, + "path": "edge_to_server_object_does_not_exist" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "Comment", + "kind": "LinkedField", + "name": "edge_to_server_object_does_not_exist", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query", + "selections": [ + { + "name": "edge_to_server_object_does_not_exist", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ] + }, + "params": { + "cacheID": "a0eacb513c2375b74c665f1f47407e41", + "id": null, + "metadata": {}, + "name": "RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query", + "operationKind": "query", + "text": null + } +}; + +if (__DEV__) { + (node/*: any*/).hash = "6e6fc82ab5969e84d7c748516d16686b"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query$variables, + RelayResolverNullableModelClientEdgeTest_ServerObjectReadOnlyId_Query$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_ServerObject_Query.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_ServerObject_Query.graphql.js new file mode 100644 index 0000000000000..aabfb729f6502 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_ServerObject_Query.graphql.js @@ -0,0 +1,117 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import {edge_to_server_object_does_not_exist as queryEdgeToServerObjectDoesNotExistResolverType} from "../RelayResolverNullableModelClientEdge-test.js"; +// Type assertion validating that `queryEdgeToServerObjectDoesNotExistResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryEdgeToServerObjectDoesNotExistResolverType: () => ?{| + +id: DataID, +|}); +export type RelayResolverNullableModelClientEdgeTest_ServerObject_Query$variables = {||}; +export type RelayResolverNullableModelClientEdgeTest_ServerObject_Query$data = {| + +edge_to_server_object_does_not_exist: ?{| + +name: ?string, + |}, +|}; +export type RelayResolverNullableModelClientEdgeTest_ServerObject_Query = {| + response: RelayResolverNullableModelClientEdgeTest_ServerObject_Query$data, + variables: RelayResolverNullableModelClientEdgeTest_ServerObject_Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverNullableModelClientEdgeTest_ServerObject_Query", + "selections": [ + { + "kind": "ClientEdgeToServerObject", + "operation": require('./ClientEdgeQuery_RelayResolverNullableModelClientEdgeTest_ServerObject_Query_edge_to_server_object_does_not_exist.graphql'), + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "edge_to_server_object_does_not_exist", + "resolverModule": require('./../RelayResolverNullableModelClientEdge-test').edge_to_server_object_does_not_exist, + "path": "edge_to_server_object_does_not_exist" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "Comment", + "kind": "LinkedField", + "name": "edge_to_server_object_does_not_exist", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverNullableModelClientEdgeTest_ServerObject_Query", + "selections": [ + { + "name": "edge_to_server_object_does_not_exist", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ] + }, + "params": { + "cacheID": "3a648c6908a7a0fa12a1bc4e9b12d362", + "id": null, + "metadata": {}, + "name": "RelayResolverNullableModelClientEdgeTest_ServerObject_Query", + "operationKind": "query", + "text": null + } +}; + +if (__DEV__) { + (node/*: any*/).hash = "3990dc068bf228226a21832b04bbd39a"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverNullableModelClientEdgeTest_ServerObject_Query$variables, + RelayResolverNullableModelClientEdgeTest_ServerObject_Query$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_StrongModel_Query.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_StrongModel_Query.graphql.js new file mode 100644 index 0000000000000..c1cd647330a23 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_StrongModel_Query.graphql.js @@ -0,0 +1,198 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { StrongModel____relay_model_instance$data } from "./StrongModel____relay_model_instance.graphql"; +import {edge_to_strong_model_does_not_exist as queryEdgeToStrongModelDoesNotExistResolverType} from "../RelayResolverNullableModelClientEdge-test.js"; +// Type assertion validating that `queryEdgeToStrongModelDoesNotExistResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryEdgeToStrongModelDoesNotExistResolverType: () => ?{| + +id: DataID, +|}); +import {name as strongModelNameResolverType} from "../RelayResolverNullableModelClientEdge-test.js"; +// Type assertion validating that `strongModelNameResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(strongModelNameResolverType: ( + __relay_model_instance: StrongModel____relay_model_instance$data['__relay_model_instance'], +) => ?string); +export type RelayResolverNullableModelClientEdgeTest_StrongModel_Query$variables = {||}; +export type RelayResolverNullableModelClientEdgeTest_StrongModel_Query$data = {| + +edge_to_strong_model_does_not_exist: ?{| + +name: ?string, + |}, +|}; +export type RelayResolverNullableModelClientEdgeTest_StrongModel_Query = {| + response: RelayResolverNullableModelClientEdgeTest_StrongModel_Query$data, + variables: RelayResolverNullableModelClientEdgeTest_StrongModel_Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverNullableModelClientEdgeTest_StrongModel_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "StrongModel", + "modelResolvers": { + "StrongModel": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "StrongModel__id" + }, + "kind": "RelayResolver", + "name": "edge_to_strong_model_does_not_exist", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./StrongModel__id.graphql'), require('./../RelayResolverNullableModelClientEdge-test').StrongModel, 'id', true), + "path": "edge_to_strong_model_does_not_exist.__relay_model_instance" + } + }, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "edge_to_strong_model_does_not_exist", + "resolverModule": require('./../RelayResolverNullableModelClientEdge-test').edge_to_strong_model_does_not_exist, + "path": "edge_to_strong_model_does_not_exist" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "StrongModel", + "kind": "LinkedField", + "name": "edge_to_strong_model_does_not_exist", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "StrongModel____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./StrongModel____relay_model_instance.graphql'), require('./../RelayResolverNullableModelClientEdge-test').name, '__relay_model_instance', true), + "path": "edge_to_strong_model_does_not_exist.name" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverNullableModelClientEdgeTest_StrongModel_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "edge_to_strong_model_does_not_exist", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "StrongModel", + "kind": "LinkedField", + "name": "edge_to_strong_model_does_not_exist", + "plural": false, + "selections": [ + { + "name": "name", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "__relay_model_instance", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + (v0/*: any*/) + ], + "type": "StrongModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ], + "type": "StrongModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + (v0/*: any*/) + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "acb5dbbcdb10b8c23559acfa09470a62", + "id": null, + "metadata": {}, + "name": "RelayResolverNullableModelClientEdgeTest_StrongModel_Query", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "22ad42880cde32f3213c37be50f8dadd"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverNullableModelClientEdgeTest_StrongModel_Query$variables, + RelayResolverNullableModelClientEdgeTest_StrongModel_Query$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_WeakModel_Query.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_WeakModel_Query.graphql.js new file mode 100644 index 0000000000000..f31267247ecb6 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolverNullableModelClientEdgeTest_WeakModel_Query.graphql.js @@ -0,0 +1,169 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { WeakModel____relay_model_instance$data } from "./WeakModel____relay_model_instance.graphql"; +import {edge_to_null_weak_model as queryEdgeToNullWeakModelResolverType} from "../RelayResolverNullableModelClientEdge-test.js"; +// Type assertion validating that `queryEdgeToNullWeakModelResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryEdgeToNullWeakModelResolverType: () => ?WeakModel); +import {first_name as weakModelFirstNameResolverType} from "../RelayResolverNullableModelClientEdge-test.js"; +// Type assertion validating that `weakModelFirstNameResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(weakModelFirstNameResolverType: ( + __relay_model_instance: WeakModel____relay_model_instance$data['__relay_model_instance'], +) => ?string); +import type { WeakModel } from "../RelayResolverNullableModelClientEdge-test.js"; +export type RelayResolverNullableModelClientEdgeTest_WeakModel_Query$variables = {||}; +export type RelayResolverNullableModelClientEdgeTest_WeakModel_Query$data = {| + +edge_to_null_weak_model: ?{| + +first_name: ?string, + |}, +|}; +export type RelayResolverNullableModelClientEdgeTest_WeakModel_Query = {| + response: RelayResolverNullableModelClientEdgeTest_WeakModel_Query$data, + variables: RelayResolverNullableModelClientEdgeTest_WeakModel_Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolverNullableModelClientEdgeTest_WeakModel_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "WeakModel", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "edge_to_null_weak_model", + "resolverModule": require('./../RelayResolverNullableModelClientEdge-test').edge_to_null_weak_model, + "path": "edge_to_null_weak_model", + "normalizationInfo": { + "kind": "WeakModel", + "concreteType": "WeakModel", + "plural": false + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "WeakModel", + "kind": "LinkedField", + "name": "edge_to_null_weak_model", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "WeakModel____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "first_name", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./WeakModel____relay_model_instance.graphql'), require('./../RelayResolverNullableModelClientEdge-test').first_name, '__relay_model_instance', true), + "path": "edge_to_null_weak_model.first_name" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolverNullableModelClientEdgeTest_WeakModel_Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "edge_to_null_weak_model", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "WeakModel", + "kind": "LinkedField", + "name": "edge_to_null_weak_model", + "plural": false, + "selections": [ + { + "name": "first_name", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ], + "type": "WeakModel", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "a25d6728f75a10c69d1e13cb6be2e84a", + "id": null, + "metadata": {}, + "name": "RelayResolverNullableModelClientEdgeTest_WeakModel_Query", + "operationKind": "query", + "text": null + } +}; + +if (__DEV__) { + (node/*: any*/).hash = "e3560014bc230453c5e68fddb617537e"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolverNullableModelClientEdgeTest_WeakModel_Query$variables, + RelayResolverNullableModelClientEdgeTest_WeakModel_Query$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestExceptionalProjectQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestExceptionalProjectQuery.graphql.js index 4eadbff76d568..75d268b6d3f4b 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestExceptionalProjectQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestExceptionalProjectQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<5451d709cb840278df5d822aba954bbe>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,7 +18,7 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { RelayResolversWithOutputTypeTestFragment$fragmentType } from "./RelayResolversWithOutputTypeTestFragment.graphql"; import {todos as queryTodosResolverType} from "../../../relay-runtime/store/__tests__/resolvers/QueryTodos.js"; // Type assertion validating that `queryTodosResolverType` resolver is correctly implemented. @@ -76,7 +76,7 @@ v1 = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "Todo", @@ -94,6 +94,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "TodoConnection", + "modelResolvers": null, "backingField": { "alias": null, "args": (v0/*: any*/), @@ -103,6 +104,7 @@ return { "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/QueryTodos').todos, "path": "todos", "normalizationInfo": { + "kind": "OutputType", "concreteType": "TodoConnection", "plural": false, "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__todos$normalization.graphql') @@ -258,7 +260,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "storageKey": null @@ -276,7 +278,7 @@ return { "fragment": (v1/*: any*/), "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "storageKey": null diff --git a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestFragment.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestFragment.graphql.js index 2f50e477ef065..a1729857cba3d 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestFragment.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<3667cd860cf304ea0aa637b1d9d43854>> + * @generated SignedSource<<0791a3689047698f85dc032c99248acd>> * @flow * @lightSyntaxTransform * @nogrep @@ -58,6 +58,7 @@ var node/*: ReaderFragment*/ = { { "kind": "ClientEdgeToClientObject", "concreteType": "TodoText", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -71,6 +72,7 @@ var node/*: ReaderFragment*/ = { "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/TodoTextResolver').text, "path": "text", "normalizationInfo": { + "kind": "OutputType", "concreteType": "TodoText", "plural": false, "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Todo__text$normalization.graphql') diff --git a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestManyLiveTodosQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestManyLiveTodosQuery.graphql.js new file mode 100644 index 0000000000000..475b51b1b11ad --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestManyLiveTodosQuery.graphql.js @@ -0,0 +1,253 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { LiveState } from "relay-runtime"; +import type { RelayResolversWithOutputTypeTestFragment$fragmentType } from "./RelayResolversWithOutputTypeTestFragment.graphql"; +import {many_live_todos as queryManyLiveTodosResolverType} from "../../../relay-runtime/store/__tests__/resolvers/QueryManyLiveTodos.js"; +// Type assertion validating that `queryManyLiveTodosResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryManyLiveTodosResolverType: () => LiveState>); +import type { Query__many_live_todos$normalization } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__many_live_todos$normalization.graphql"; +export type RelayResolversWithOutputTypeTestManyLiveTodosQuery$variables = {||}; +export type RelayResolversWithOutputTypeTestManyLiveTodosQuery$data = {| + +many_live_todos: ?$ReadOnlyArray, +|}; +export type RelayResolversWithOutputTypeTestManyLiveTodosQuery = {| + response: RelayResolversWithOutputTypeTestManyLiveTodosQuery$data, + variables: RelayResolversWithOutputTypeTestManyLiveTodosQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = (function(){ +var v0 = { + "kind": "InlineFragment", + "selections": [ + { + "name": "self", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "todo_id", + "storageKey": null + } + ], + "type": "Todo", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "Todo", + "abstractKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayResolversWithOutputTypeTestManyLiveTodosQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "concreteType": "Todo", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayLiveResolver", + "name": "many_live_todos", + "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/QueryManyLiveTodos').many_live_todos, + "path": "many_live_todos", + "normalizationInfo": { + "kind": "OutputType", + "concreteType": "Todo", + "plural": true, + "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__many_live_todos$normalization.graphql') + } + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "Todo", + "kind": "LinkedField", + "name": "many_live_todos", + "plural": true, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayResolversWithOutputTypeTestFragment" + } + ], + "storageKey": null + } + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayResolversWithOutputTypeTestManyLiveTodosQuery", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "many_live_todos", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "Todo", + "kind": "LinkedField", + "name": "many_live_todos", + "plural": true, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "text", + "args": null, + "fragment": (v0/*: any*/), + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "TodoText", + "kind": "LinkedField", + "name": "text", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "content", + "storageKey": null + }, + { + "alias": null, + "args": null, + "concreteType": "TodoTextStyle", + "kind": "LinkedField", + "name": "style", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "font_style", + "storageKey": null + }, + { + "alias": null, + "args": null, + "concreteType": "TodoTextColor", + "kind": "LinkedField", + "name": "color", + "plural": false, + "selections": [ + { + "name": "human_readable_color", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "hex", + "storageKey": null + } + ], + "type": "TodoTextColor", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": null + } + }, + { + "name": "complete", + "args": null, + "fragment": (v0/*: any*/), + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "b9aabe2c5911f0a5daf91a73d666b31c", + "id": null, + "metadata": {}, + "name": "RelayResolversWithOutputTypeTestManyLiveTodosQuery", + "operationKind": "query", + "text": null + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "f42ffca5f81738e839984490939acc31"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayResolversWithOutputTypeTestManyLiveTodosQuery$variables, + RelayResolversWithOutputTypeTestManyLiveTodosQuery$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestManyTodosQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestManyTodosQuery.graphql.js index 03f61946973c1..82789c84dd99f 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestManyTodosQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestManyTodosQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<4312060e4773e8d091cd4f42e5bebf10>> + * @generated SignedSource<<0612be7579d4f6404f10ba4c2be3d727>> * @flow * @lightSyntaxTransform * @nogrep @@ -26,7 +26,7 @@ import {many_todos as queryManyTodosResolverType} from "../../../relay-runtime/s args: {| todo_ids: $ReadOnlyArray, |}, -) => $ReadOnlyArray); +) => ?$ReadOnlyArray); import type { Query__many_todos$normalization } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__many_todos$normalization.graphql"; export type RelayResolversWithOutputTypeTestManyTodosQuery$variables = {| todos: $ReadOnlyArray, @@ -79,7 +79,7 @@ v2 = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "Todo", @@ -97,6 +97,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "Todo", + "modelResolvers": null, "backingField": { "alias": null, "args": (v1/*: any*/), @@ -106,6 +107,7 @@ return { "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/QueryManyTodos').many_todos, "path": "many_todos", "normalizationInfo": { + "kind": "OutputType", "concreteType": "Todo", "plural": true, "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__many_todos$normalization.graphql') @@ -223,7 +225,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "storageKey": null @@ -241,7 +243,7 @@ return { "fragment": (v2/*: any*/), "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "storageKey": null diff --git a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTextColorComponentFragment.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTextColorComponentFragment.graphql.js index c80beb8d26909..7f064e3669662 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTextColorComponentFragment.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTextColorComponentFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -25,10 +25,10 @@ import {human_readable_color as todoTextColorHumanReadableColorResolverType} fro // A type error here indicates that the type signature of the resolver module is incorrect. (todoTextColorHumanReadableColorResolverType: ( rootKey: TodoTextColorResolverFragment$key, -) => mixed); +) => ?string); declare export opaque type RelayResolversWithOutputTypeTestTextColorComponentFragment$fragmentType: FragmentType; export type RelayResolversWithOutputTypeTestTextColorComponentFragment$data = {| - +human_readable_color: ?$Call<((...empty[]) => R) => R, typeof todoTextColorHumanReadableColorResolverType>, + +human_readable_color: ?string, +$fragmentType: RelayResolversWithOutputTypeTestTextColorComponentFragment$fragmentType, |}; export type RelayResolversWithOutputTypeTestTextColorComponentFragment$key = { diff --git a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTodoCompleteFragment.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTodoCompleteFragment.graphql.js index c3faefaf54c6f..3f9a88efaa80c 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTodoCompleteFragment.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTodoCompleteFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<4595575dd8109cc00a23aa8ffb50f49b>> + * @generated SignedSource<<9f6217969a01dabce4ce1b2c176f1d25>> * @flow * @lightSyntaxTransform * @nogrep @@ -25,10 +25,10 @@ import {complete as todoCompleteResolverType} from "../../../relay-runtime/store // A type error here indicates that the type signature of the resolver module is incorrect. (todoCompleteResolverType: ( rootKey: TodoCompleteResolverFragment$key, -) => mixed); +) => ?boolean); declare export opaque type RelayResolversWithOutputTypeTestTodoCompleteFragment$fragmentType: FragmentType; export type RelayResolversWithOutputTypeTestTodoCompleteFragment$data = {| - +complete: ?$Call<((...empty[]) => R) => R, typeof todoCompleteResolverType>, + +complete: ?boolean, +$fragmentType: RelayResolversWithOutputTypeTestTodoCompleteFragment$fragmentType, |}; export type RelayResolversWithOutputTypeTestTodoCompleteFragment$key = { diff --git a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTodoQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTodoQuery.graphql.js index 2191099a97370..16f70fa06fd92 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTodoQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTodoQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<4574da14e49da2cd2c7a51dfe63919b2>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,7 +18,7 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { RelayResolversWithOutputTypeTestFragment$fragmentType } from "./RelayResolversWithOutputTypeTestFragment.graphql"; import {todo as queryTodoResolverType} from "../../../relay-runtime/store/__tests__/resolvers/QueryTodo.js"; // Type assertion validating that `queryTodoResolverType` resolver is correctly implemented. @@ -80,7 +80,7 @@ v2 = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "Todo", @@ -98,6 +98,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "Todo", + "modelResolvers": null, "backingField": { "alias": null, "args": (v1/*: any*/), @@ -107,6 +108,7 @@ return { "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/QueryTodo').todo, "path": "todo", "normalizationInfo": { + "kind": "OutputType", "concreteType": "Todo", "plural": false, "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__todo$normalization.graphql') @@ -224,7 +226,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "storageKey": null @@ -242,7 +244,7 @@ return { "fragment": (v2/*: any*/), "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "storageKey": null diff --git a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTodoWithBlockedQuery.graphql.js b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTodoWithBlockedQuery.graphql.js index 4c800288d2a00..64fe4d4f10766 100644 --- a/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTodoWithBlockedQuery.graphql.js +++ b/packages/react-relay/__tests__/__generated__/RelayResolversWithOutputTypeTestTodoWithBlockedQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<7f89eba01d5fcdaff6b15a832d0496b1>> + * @generated SignedSource<<23a2228e817b92c509d48fc7d96971e8>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,7 +18,7 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { RelayResolversWithOutputTypeTestFragment$fragmentType } from "./RelayResolversWithOutputTypeTestFragment.graphql"; import type { TodoBlockedByResolverFragment$key } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/TodoBlockedByResolverFragment.graphql"; import {todo as queryTodoResolverType} from "../../../relay-runtime/store/__tests__/resolvers/QueryTodo.js"; @@ -34,7 +34,7 @@ import {blocked_by as todoBlockedByResolverType} from "../../../relay-runtime/st // A type error here indicates that the type signature of the resolver module is incorrect. (todoBlockedByResolverType: ( rootKey: TodoBlockedByResolverFragment$key, -) => $ReadOnlyArray); +) => ?$ReadOnlyArray); import type { Query__todo$normalization } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__todo$normalization.graphql"; import type { Todo__blocked_by$normalization } from "./../../../relay-runtime/store/__tests__/resolvers/__generated__/Todo__blocked_by$normalization.graphql"; export type RelayResolversWithOutputTypeTestTodoWithBlockedQuery$variables = {| @@ -90,7 +90,7 @@ v2 = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "Todo", @@ -108,6 +108,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "Todo", + "modelResolvers": null, "backingField": { "alias": null, "args": (v1/*: any*/), @@ -117,6 +118,7 @@ return { "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/QueryTodo').todo, "path": "todo", "normalizationInfo": { + "kind": "OutputType", "concreteType": "Todo", "plural": false, "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Query__todo$normalization.graphql') @@ -133,6 +135,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "Todo", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -144,8 +147,9 @@ return { "kind": "RelayResolver", "name": "blocked_by", "resolverModule": require('./../../../relay-runtime/store/__tests__/resolvers/TodoBlockedByResolver').blocked_by, - "path": "blocked_by", + "path": "todo.blocked_by", "normalizationInfo": { + "kind": "OutputType", "concreteType": "Todo", "plural": true, "normalizationNode": require('./../../../relay-runtime/store/__tests__/resolvers/__generated__/Todo__blocked_by$normalization.graphql') @@ -285,7 +289,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "storageKey": null @@ -303,7 +307,7 @@ return { "fragment": (v2/*: any*/), "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "storageKey": null diff --git a/packages/react-relay/__tests__/__generated__/StrongModel____relay_model_instance.graphql.js b/packages/react-relay/__tests__/__generated__/StrongModel____relay_model_instance.graphql.js new file mode 100644 index 0000000000000..624bf37720bc3 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/StrongModel____relay_model_instance.graphql.js @@ -0,0 +1,68 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<7c5d632e9655b03baf5b566251e7b947>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { StrongModel__id$data } from "./StrongModel__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {StrongModel as strongModelRelayModelInstanceResolverType} from "../RelayResolverNullableModelClientEdge-test.js"; +// Type assertion validating that `strongModelRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(strongModelRelayModelInstanceResolverType: ( + id: StrongModel__id$data['id'], +) => mixed); +declare export opaque type StrongModel____relay_model_instance$fragmentType: FragmentType; +export type StrongModel____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: StrongModel____relay_model_instance$fragmentType, +|}; +export type StrongModel____relay_model_instance$key = { + +$data?: StrongModel____relay_model_instance$data, + +$fragmentSpreads: StrongModel____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "StrongModel____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "StrongModel__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./StrongModel__id.graphql'), require('./../RelayResolverNullableModelClientEdge-test').StrongModel, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "StrongModel", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + StrongModel____relay_model_instance$fragmentType, + StrongModel____relay_model_instance$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/StrongModel__id.graphql.js b/packages/react-relay/__tests__/__generated__/StrongModel__id.graphql.js new file mode 100644 index 0000000000000..6b4b5ab2e0943 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/StrongModel__id.graphql.js @@ -0,0 +1,60 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type StrongModel__id$fragmentType: FragmentType; +export type StrongModel__id$data = {| + +id: string, + +$fragmentType: StrongModel__id$fragmentType, +|}; +export type StrongModel__id$key = { + +$data?: StrongModel__id$data, + +$fragmentSpreads: StrongModel__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "StrongModel__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "StrongModel", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + StrongModel__id$fragmentType, + StrongModel__id$data, +>*/); diff --git a/packages/react-relay/__tests__/__generated__/WeakModel____relay_model_instance.graphql.js b/packages/react-relay/__tests__/__generated__/WeakModel____relay_model_instance.graphql.js new file mode 100644 index 0000000000000..a240584215bb8 --- /dev/null +++ b/packages/react-relay/__tests__/__generated__/WeakModel____relay_model_instance.graphql.js @@ -0,0 +1,61 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { WeakModel } from "../RelayResolverNullableModelClientEdge-test.js"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type WeakModel____relay_model_instance$fragmentType: FragmentType; +export type WeakModel____relay_model_instance$data = {| + +__relay_model_instance: WeakModel, + +$fragmentType: WeakModel____relay_model_instance$fragmentType, +|}; +export type WeakModel____relay_model_instance$key = { + +$data?: WeakModel____relay_model_instance$data, + +$fragmentSpreads: WeakModel____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "WeakModel____relay_model_instance", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ] + } + ], + "type": "WeakModel", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + WeakModel____relay_model_instance$fragmentType, + WeakModel____relay_model_instance$data, +>*/); diff --git a/packages/react-relay/__tests__/mockScheduler.js b/packages/react-relay/__tests__/mockScheduler.js new file mode 100644 index 0000000000000..a38660d759e1b --- /dev/null +++ b/packages/react-relay/__tests__/mockScheduler.js @@ -0,0 +1,36 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow + * @format + * @oncall relay + */ + +const invariant = require('invariant'); +const Scheduler = require('scheduler/unstable_mock'); + +// The version of scheduler we get internally is always the latest. However, the +// version we get on GitHub is a transitive dependency from `react-test-renderer`. +// +// Some methods in the unstable_mock have been renamed between these two +// versions. This mock file provides a centralized place to reconcile those +// differences so that the same tests can work both internally and on GitHub. + +if (Scheduler.log == null) { + invariant( + Scheduler.unstable_yieldValue != null, + 'Expected to find one of log or unstable_yieldValue', + ); + Scheduler.log = Scheduler.unstable_yieldValue; +} +if (Scheduler.unstable_clearLog == null) { + invariant( + Scheduler.unstable_clearYields != null, + 'Expected to find one of unstable_clearLog or unstable_clearYields', + ); + Scheduler.unstable_clearLog = Scheduler.unstable_clearYields; +} +module.exports = Scheduler; diff --git a/packages/react-relay/buildReactRelayContainer.js b/packages/react-relay/buildReactRelayContainer.js index daf90bc7ca603..5974eadbddd2c 100644 --- a/packages/react-relay/buildReactRelayContainer.js +++ b/packages/react-relay/buildReactRelayContainer.js @@ -52,10 +52,13 @@ function buildReactRelayContainer>( const Container = createContainerWithFragments(ComponentClass, fragments); Container.displayName = containerName; - function forwardRef( + function ForwardRef( props: any, - ref: ((null | any) => mixed) | {current: null | any, ...}, + ref: + | ((null | React$ElementRef) => mixed) + | {-current: null | React$ElementRef, ...}, ) { + // $FlowFixMe[react-rule-hook] const context = useContext(ReactRelayContext); invariant( context != null, @@ -64,6 +67,7 @@ function buildReactRelayContainer>( containerName, containerName, ); + // $FlowFixMe[react-rule-hook] const queryRendererContext = useContext(ReactRelayQueryRendererContext); return ( @@ -77,8 +81,8 @@ function buildReactRelayContainer>( /> ); } - forwardRef.displayName = containerName; - const ForwardContainer = React.forwardRef(forwardRef); + ForwardRef.displayName = containerName; + const ForwardContainer = React.forwardRef(ForwardRef); if (__DEV__) { // Used by RelayModernTestUtils diff --git a/packages/react-relay/getRootVariablesForFragments.js b/packages/react-relay/getRootVariablesForFragments.js index 36a70ff176a5e..50e84507bcf10 100644 --- a/packages/react-relay/getRootVariablesForFragments.js +++ b/packages/react-relay/getRootVariablesForFragments.js @@ -25,6 +25,7 @@ function getRootVariablesForFragments( // should all point to the same owner Object.keys(fragments).forEach(key => { const fragmentNode = fragments[key]; + // $FlowFixMe[invalid-computed-prop] const fragmentRef = props[key]; const selector = getSelector(fragmentNode, fragmentRef); const fragmentOwnerVariables = diff --git a/packages/react-relay/hooks.js b/packages/react-relay/hooks.js index 9a22f49fbe0ed..41e42d556c739 100644 --- a/packages/react-relay/hooks.js +++ b/packages/react-relay/hooks.js @@ -42,7 +42,7 @@ export type { RefetchFn, RefetchFnDynamic, Options as RefetchOptions, -} from './relay-hooks/useRefetchableFragmentNode'; +} from './relay-hooks/legacy/useRefetchableFragmentNode'; export type { DataID, DeclarativeMutationConfig, diff --git a/packages/react-relay/index.js b/packages/react-relay/index.js index e69c520508c49..814ba78867c92 100644 --- a/packages/react-relay/index.js +++ b/packages/react-relay/index.js @@ -59,7 +59,7 @@ export type { RefetchFn, RefetchFnDynamic, Options as RefetchOptions, -} from './relay-hooks/useRefetchableFragmentNode'; +} from './relay-hooks/legacy/useRefetchableFragmentNode'; export type { DataID, DeclarativeMutationConfig, diff --git a/packages/react-relay/multi-actor/__tests__/ActorChange-test.js b/packages/react-relay/multi-actor/__tests__/ActorChange-test.js index 07f120bffb576..144cc098f0fee 100644 --- a/packages/react-relay/multi-actor/__tests__/ActorChange-test.js +++ b/packages/react-relay/multi-actor/__tests__/ActorChange-test.js @@ -131,7 +131,7 @@ type Props = $ReadOnly<{ function ActorMessage(props: Props) { const data = useFragment(fragment, props.myFragment); - const [commit] = useMutation<$FlowFixMe>(mutation); + const [commit] = useMutation(mutation); // We're calling this hook only to verify that it won't throw. // `useRelayActorEnvironment` should be able to have access to `getEnvironmentForActor` function @@ -146,7 +146,9 @@ function ActorMessage(props: Props) { onClick={() => commit({ variables: { - feedbackID: 'feedback:1234', + input: { + feedbackId: 'feedback:1234', + }, }, }) } @@ -170,7 +172,7 @@ describe('ActorChange', () => { createNetworkForActor: actorIdentifier => Network.create((...args) => fetchFnForActor(actorIdentifier, ...args)), logFn: jest.fn(), - requiredFieldLogger: jest.fn(), + relayFieldLogger: jest.fn(), }); environment = multiActorEnvironment.forActor( getActorIdentifier('actor:1234'), diff --git a/packages/react-relay/multi-actor/__tests__/ActorChangeWithDefer-test.js b/packages/react-relay/multi-actor/__tests__/ActorChangeWithDefer-test.js index d5274ffc97d2b..a7170ab872a70 100644 --- a/packages/react-relay/multi-actor/__tests__/ActorChangeWithDefer-test.js +++ b/packages/react-relay/multi-actor/__tests__/ActorChangeWithDefer-test.js @@ -152,7 +152,7 @@ describe('ActorChange with @defer', () => { createNetworkForActor: actorIdentifier => Network.create((...args) => fetchFnForActor(...args)), logFn: jest.fn(), - requiredFieldLogger: jest.fn(), + relayFieldLogger: jest.fn(), }); environment = multiActorEnvironment.forActor( getActorIdentifier('actor:1234'), diff --git a/packages/react-relay/multi-actor/__tests__/ActorChangeWithMutation-test.js b/packages/react-relay/multi-actor/__tests__/ActorChangeWithMutation-test.js index cddec48250bfb..a3f0ea1415592 100644 --- a/packages/react-relay/multi-actor/__tests__/ActorChangeWithMutation-test.js +++ b/packages/react-relay/multi-actor/__tests__/ActorChangeWithMutation-test.js @@ -13,7 +13,6 @@ import type {ActorIdentifier} from '../../../relay-runtime/multi-actor-environment/ActorIdentifier'; import type {ActorChangeWithMutationTestFragment$key} from './__generated__/ActorChangeWithMutationTestFragment.graphql'; -import type {ActorChangeWithMutationTestMutation} from './__generated__/ActorChangeWithMutationTestMutation.graphql'; import type { IActorEnvironment, IMultiActorEnvironment, @@ -144,7 +143,7 @@ type Props = $ReadOnly<{ function ActorComponent(props: Props) { const data = useFragment(fragment, props.fragmentKey); - const [commit] = useMutation(mutation); + const [commit] = useMutation(mutation); props.render({ id: data.id, @@ -200,7 +199,7 @@ describe('ActorChange', () => { createNetworkForActor: actorIdentifier => Network.create((...args) => fetchFnForActor(actorIdentifier, ...args)), logFn: jest.fn(), - requiredFieldLogger: jest.fn(), + relayFieldLogger: jest.fn(), }); environment = multiActorEnvironment.forActor( getActorIdentifier('actor:1234'), diff --git a/packages/react-relay/multi-actor/__tests__/ActorChangeWithStream-test.js b/packages/react-relay/multi-actor/__tests__/ActorChangeWithStream-test.js index 08c7424695aa0..82a8bbf0d0e2b 100644 --- a/packages/react-relay/multi-actor/__tests__/ActorChangeWithStream-test.js +++ b/packages/react-relay/multi-actor/__tests__/ActorChangeWithStream-test.js @@ -141,7 +141,7 @@ describe('ActorChange with @stream', () => { createNetworkForActor: actorIdentifier => Network.create((...args) => fetchFnForActor(...args)), logFn: jest.fn(), - requiredFieldLogger: jest.fn(), + relayFieldLogger: jest.fn(), }); environment = multiActorEnvironment.forActor( getActorIdentifier('actor:1234'), diff --git a/packages/react-relay/multi-actor/useRelayActorEnvironment.js b/packages/react-relay/multi-actor/useRelayActorEnvironment.js index aa88e5fa26cb7..71c2fe95fc2ac 100644 --- a/packages/react-relay/multi-actor/useRelayActorEnvironment.js +++ b/packages/react-relay/multi-actor/useRelayActorEnvironment.js @@ -20,7 +20,7 @@ const ReactRelayContext = require('./../ReactRelayContext'); const invariant = require('invariant'); const {useContext} = require('react'); -function useRelayActorEnvironment( +hook useRelayActorEnvironment( actorIdentifier: ActorIdentifier, ): IActorEnvironment { const context = useContext(ReactRelayContext); diff --git a/packages/react-relay/package.json b/packages/react-relay/package.json index 448a8ddfd0b24..9bdb425399ebe 100644 --- a/packages/react-relay/package.json +++ b/packages/react-relay/package.json @@ -1,7 +1,7 @@ { "name": "react-relay", "description": "A framework for building GraphQL-driven React applications.", - "version": "15.0.0", + "version": "17.0.0", "keywords": [ "graphql", "relay", @@ -20,7 +20,7 @@ "fbjs": "^3.0.2", "invariant": "^2.2.4", "nullthrows": "^1.1.1", - "relay-runtime": "15.0.0" + "relay-runtime": "17.0.0" }, "peerDependencies": { "react": "^16.9.0 || ^17 || ^18" diff --git a/packages/react-relay/relay-hooks/EntryPointContainer.react.js b/packages/react-relay/relay-hooks/EntryPointContainer.react.js index 1b002880e59eb..669b776f0085c 100644 --- a/packages/react-relay/relay-hooks/EntryPointContainer.react.js +++ b/packages/react-relay/relay-hooks/EntryPointContainer.react.js @@ -23,10 +23,15 @@ const {useContext, useEffect} = require('react'); const warning = require('warning'); function EntryPointContainer< + // $FlowFixMe[unsupported-variance-annotation] +TPreloadedQueries: {...}, + // $FlowFixMe[unsupported-variance-annotation] +TPreloadedNestedEntryPoints: {...}, + // $FlowFixMe[unsupported-variance-annotation] +TRuntimeProps: {...}, + // $FlowFixMe[unsupported-variance-annotation] +TExtraProps, + // $FlowFixMe[unsupported-variance-annotation] +TEntryPointComponent: EntryPointComponent< TPreloadedQueries, TPreloadedNestedEntryPoints, diff --git a/packages/react-relay/relay-hooks/EntryPointTypes.flow.js b/packages/react-relay/relay-hooks/EntryPointTypes.flow.js index c7b6acb9c1672..eed9a56677912 100644 --- a/packages/react-relay/relay-hooks/EntryPointTypes.flow.js +++ b/packages/react-relay/relay-hooks/EntryPointTypes.flow.js @@ -14,7 +14,7 @@ /* eslint-disable no-unused-vars */ import type {JSResourceReference} from 'JSResourceReference'; -import type {AbstractComponent, ElementConfig} from 'React'; +import type {AbstractComponent, ElementConfig} from 'react'; import type { CacheConfig, FetchPolicy, @@ -45,12 +45,15 @@ export type LoadQueryOptions = { +__nameForWarning?: ?string, }; -// Note: the phantom type parameter here helps ensures that the -// $Parameters.js value matches the type param provided to preloadQuery. -// eslint-disable-next-line no-unused-vars export type PreloadableConcreteRequest = { kind: 'PreloadableConcreteRequest', params: RequestParameters, + // Note: the phantom type parameter here helps ensures that the + // $Parameters.js value matches the type param provided to preloadQuery. + // We also need to add usage of this generic here, + // becuase not using the generic in the definition makes it + // unconstrained in the call to a function that accepts PreloadableConcreteRequest + __phantom__?: ?TQuery, }; export type EnvironmentProviderOptions = {+[string]: mixed, ...}; @@ -131,12 +134,12 @@ defined during component runtime TExtraProps - a bag of extra props that you may define in `entrypoint` file and they will be passed to the EntryPointComponent as `extraProps` */ -type InternalEntryPointRepresentation< - +TEntryPointParams, +export type InternalEntryPointRepresentation< + TEntryPointParams, TPreloadedQueries, - TPreloadedEntryPoints, - TRuntimeProps, - TExtraProps, + TPreloadedEntryPoints = {...}, + TRuntimeProps = {...}, + TExtraProps = null, > = $ReadOnly<{ getPreloadProps: ( entryPointParams: TEntryPointParams, @@ -192,8 +195,11 @@ export type PreloadProps< TExtraProps = null, TEnvironmentProviderOptions = EnvironmentProviderOptions, > = $ReadOnly<{ - entryPoints?: $ObjMap, + entryPoints?: { + +[K in keyof TPreloadedEntryPoints]?: ?ThinNestedEntryPointParams, + }, extraProps?: TExtraProps, + // $FlowFixMe[deprecated-type] queries?: $ObjMap< TPreloadedQueries, ExtractQueryTypeHelper, @@ -211,22 +217,23 @@ export type PreloadedEntryPoint = $ReadOnly<{ rootModuleID: string, }>; -type _ComponentFromEntryPoint = < - +TPreloadParams, - +TComponent, - +TEntryPoint: EntryPoint, ->( - TEntryPoint, -) => TComponent; - -type ComponentFromEntryPoint<+TEntryPoint> = $Call< - _ComponentFromEntryPoint, - TEntryPoint, ->; - -export type EntryPointElementConfig<+TEntryPoint> = ElementConfig< - ComponentFromEntryPoint, ->['props']; +export type EntryPointElementConfig< + // $FlowExpectedError[unclear-type] Need any to make it supertype of all InternalEntryPointRepresentation + +TEntryPoint: InternalEntryPointRepresentation, +> = + TEntryPoint extends InternalEntryPointRepresentation< + // $FlowExpectedError[unclear-type] Need any to make it supertype of all InternalEntryPointRepresentation + any, + // $FlowExpectedError[unclear-type] Need any to make it supertype of all InternalEntryPointRepresentation + any, + // $FlowExpectedError[unclear-type] Need any to make it supertype of all InternalEntryPointRepresentation + any, + infer Props, + // $FlowExpectedError[unclear-type] Need any to make it supertype of all InternalEntryPointRepresentation + any, + > + ? Props + : empty; export type ThinQueryParams< TQuery: OperationType, @@ -238,26 +245,18 @@ export type ThinQueryParams< variables: TQuery['variables'], }>; -type ThinNestedEntryPointParams = $ReadOnly<{ - entryPoint: TEntryPoint, - entryPointParams: TEntryPointParams, -}>; +/** + * We make the type of `ThinNestedEntryPointParams` opaque, so that the only way + * to construct a `ThinNestedEntryPointParams` is by calling `NestedRelayEntryPoint` + * from `NestedRelayEntryPointBuilderUtils` module. + */ +declare export opaque type ThinNestedEntryPointParams; export type ExtractQueryTypeHelper = ( PreloadedQuery, ) => ThinQueryParams; -export type ExtractEntryPointTypeHelper = < - TEntryPointParams, - TEntryPointComponent, ->( - ?PreloadedEntryPoint, -) => ?ThinNestedEntryPointParams< - TEntryPointParams, - EntryPoint, ->; - -export type EntryPoint<+TEntryPointParams, +TEntryPointComponent> = +export type EntryPoint = InternalEntryPointRepresentation< TEntryPointParams, ElementConfig['queries'], @@ -266,12 +265,7 @@ export type EntryPoint<+TEntryPointParams, +TEntryPointComponent> = ElementConfig['extraProps'], >; -type ExtractFirstParam = ((P) => R) => P; -type GetPreloadPropsType = T['getPreloadProps']; -export type PreloadParamsOf = $Call< - ExtractFirstParam, - GetPreloadPropsType, ->; +export type PreloadParamsOf = Parameters[0]; export type IEnvironmentProvider = { getEnvironment: (options: ?TOptions) => IEnvironment, diff --git a/packages/react-relay/relay-hooks/HooksImplementation.js b/packages/react-relay/relay-hooks/HooksImplementation.js deleted file mode 100644 index 50212a8c8f685..0000000000000 --- a/packages/react-relay/relay-hooks/HooksImplementation.js +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; - -import typeof useFragment from './useFragment'; -import type {UsePaginationFragmentType} from './usePaginationFragment'; -import type {UseRefetchableFragmentType} from './useRefetchableFragment'; - -const warning = require('warning'); - -type HooksImplementation = { - useFragment: useFragment, - usePaginationFragment: UsePaginationFragmentType, - useRefetchableFragment: UseRefetchableFragmentType, -}; - -let implementation: HooksImplementation | null = null; - -function inject(impl: HooksImplementation): void { - warning( - implementation !== null, - 'Relay HooksImplementation was injected twice.', - ); - implementation = impl; -} - -function get(): HooksImplementation | null { - return implementation; -} - -module.exports = { - inject, - get, -}; diff --git a/packages/react-relay/relay-hooks/LazyLoadEntryPointContainer_DEPRECATED.react.js b/packages/react-relay/relay-hooks/LazyLoadEntryPointContainer_DEPRECATED.react.js index 293b339d8bfae..f937c30f4bb18 100644 --- a/packages/react-relay/relay-hooks/LazyLoadEntryPointContainer_DEPRECATED.react.js +++ b/packages/react-relay/relay-hooks/LazyLoadEntryPointContainer_DEPRECATED.react.js @@ -85,8 +85,10 @@ function prepareEntryPoint< } const preloadProps = entryPoint.getPreloadProps(entryPointParams); const {queries, entryPoints, extraProps} = preloadProps; - const preloadedQueries: $Shape = {}; - const preloadedEntryPoints: $Shape = {}; + // $FlowFixMe[incompatible-type] + const preloadedQueries: Partial = {}; + // $FlowFixMe[incompatible-type] + const preloadedEntryPoints: Partial = {}; if (queries != null) { const queriesPropNames = Object.keys(queries); queriesPropNames.forEach(queryPropName => { @@ -173,7 +175,20 @@ function LazyLoadEntryPointContainer_DEPRECATED< const entryPointParamsHash = stableStringify(entryPointParams); const {getComponent, queries, entryPoints, extraProps, rootModuleID} = useMemo(() => { - return prepareEntryPoint( + return prepareEntryPoint< + TEntryPointParams, + TPreloadedQueries, + TPreloadedEntryPoints, + TRuntimeProps, + TExtraProps, + EntryPointComponent< + TPreloadedQueries, + TPreloadedEntryPoints, + TRuntimeProps, + TExtraProps, + >, + _, + >( environmentProvider ?? { getEnvironment: () => environment, }, diff --git a/packages/react-relay/relay-hooks/NestedRelayEntryPointBuilderUtils.js b/packages/react-relay/relay-hooks/NestedRelayEntryPointBuilderUtils.js new file mode 100644 index 0000000000000..856ba2ec7427a --- /dev/null +++ b/packages/react-relay/relay-hooks/NestedRelayEntryPointBuilderUtils.js @@ -0,0 +1,45 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; + +import type { + InternalEntryPointRepresentation, + ThinNestedEntryPointParams, +} from './EntryPointTypes.flow'; + +/** + * This is an identity function to construct a type safe nested entrypoint. + * By calling this function, we ensure that the type of entryPointParams matches + * exactly the type of preloadProps of the entrypoint. + * + * We make the type of `ThinNestedEntryPointParams` opaque, so that the only way + * to construct a `ThinNestedEntryPointParams` is by calling this function. + */ +declare function NestedRelayEntryPoint( + $ReadOnly<{ + entryPoint: InternalEntryPointRepresentation< + TEntryPointParams, + $FlowFixMe, + $FlowFixMe, + $FlowFixMe, + $FlowFixMe, + >, + entryPointParams: TEntryPointParams, + }>, +): ThinNestedEntryPointParams; + +// eslint-disable-next-line no-redeclare +function NestedRelayEntryPoint

(params: P): P { + return params; +} + +export {NestedRelayEntryPoint}; diff --git a/packages/react-relay/relay-hooks/SuspenseResource.js b/packages/react-relay/relay-hooks/SuspenseResource.js index 781793143a2fe..ede73c0f62e96 100644 --- a/packages/react-relay/relay-hooks/SuspenseResource.js +++ b/packages/react-relay/relay-hooks/SuspenseResource.js @@ -13,7 +13,7 @@ import type {Disposable, IEnvironment} from 'relay-runtime'; -const invariant = require('invariant'); +const warning = require('warning'); const TEMPORARY_RETAIN_DURATION_MS = 5 * 60 * 1000; @@ -38,13 +38,16 @@ class SuspenseResource { dispose: () => { this._retainCount = Math.max(0, this._retainCount - 1); if (this._retainCount === 0) { - invariant( - this._retainDisposable != null, - 'Relay: Expected disposable to release query to be defined.' + - "If you're seeing this, this is likely a bug in Relay.", - ); - this._retainDisposable.dispose(); - this._retainDisposable = null; + if (this._retainDisposable != null) { + this._retainDisposable.dispose(); + this._retainDisposable = null; + } else { + warning( + false, + 'Relay: Expected disposable to release query to be defined.' + + "If you're seeing this, this is likely a bug in Relay.", + ); + } } }, }; diff --git a/packages/react-relay/relay-hooks/__flowtests__/EntryPointTypes/NestedEntrypoints-flowtest.js b/packages/react-relay/relay-hooks/__flowtests__/EntryPointTypes/NestedEntrypoints-flowtest.js index c722ac27970f5..849593768a9f1 100644 --- a/packages/react-relay/relay-hooks/__flowtests__/EntryPointTypes/NestedEntrypoints-flowtest.js +++ b/packages/react-relay/relay-hooks/__flowtests__/EntryPointTypes/NestedEntrypoints-flowtest.js @@ -18,6 +18,8 @@ import type { } from '../../EntryPointTypes.flow'; import type {JSResourceReference} from 'JSResourceReference'; +import {NestedRelayEntryPoint} from '../../NestedRelayEntryPointBuilderUtils'; + declare function mockJSResource( module: TModule, ): JSResourceReference; @@ -65,14 +67,14 @@ type BadParentEntrypointParams = $ReadOnly<{}>; getPreloadProps(_params: BadParentEntrypointParams) { return { entryPoints: { - nestedComponent: { - entryPoint: NestedEntryPoint, + nestedComponent: NestedRelayEntryPoint({ /** $FlowExpectedError The entryPointParams here should be of type NestedEntrypointPreloadParams, but it does not contain subEntrypointPreloadParam */ + entryPoint: NestedEntryPoint, entryPointParams: Object.freeze({}), - }, + }), }, }; }, @@ -90,13 +92,13 @@ type GoodParentEntrypointParams = $ReadOnly<{}>; getPreloadProps(_params: GoodParentEntrypointParams) { return { entryPoints: { - nestedComponent: { + nestedComponent: NestedRelayEntryPoint({ entryPoint: NestedEntryPoint, // No flow error since this matches NestedEntrypointPreloadParams entryPointParams: { subEntrypointPreloadParam: 'test', }, - }, + }), }, }; }, diff --git a/packages/react-relay/relay-hooks/__flowtests__/useBlockingPaginationFragment-flowtest.js b/packages/react-relay/relay-hooks/__flowtests__/useBlockingPaginationFragment-flowtest.js index 89d267a360a92..b4a1c566939e1 100644 --- a/packages/react-relay/relay-hooks/__flowtests__/useBlockingPaginationFragment-flowtest.js +++ b/packages/react-relay/relay-hooks/__flowtests__/useBlockingPaginationFragment-flowtest.js @@ -19,7 +19,7 @@ import type { } from './utils'; import type {IEnvironment, Variables} from 'relay-runtime'; -import useBlockingPaginationFragment from '../useBlockingPaginationFragment'; +import useBlockingPaginationFragment from '../legacy/useBlockingPaginationFragment'; import { fragmentData, keyAnotherNonNullable, @@ -45,23 +45,34 @@ type ExpectedReturnType< /* eslint-disable react-hooks/rules-of-hooks */ // Nullability of returned data type is correct +// $FlowFixMe[prop-missing] +// $FlowFixMe[incompatible-cast] +// $FlowFixMe[incompatible-exact] +// $FlowFixMe[react-rule-hook] (useBlockingPaginationFragment( refetchableFragmentInput, keyNonNullable, ): ExpectedReturnType); +// $FlowFixMe[react-rule-hook] (useBlockingPaginationFragment( refetchableFragmentInput, keyNullable, ): ExpectedReturnType); // $FlowExpectedError: can't cast nullable to non-nullable +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[incompatible-cast] (useBlockingPaginationFragment( refetchableFragmentInput, keyNullable, ): ExpectedReturnType); // $FlowExpectedError: actual type of returned data is correct +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[incompatible-exact] +// $FlowFixMe[prop-missing] +// $FlowFixMe[incompatible-cast] (useBlockingPaginationFragment( refetchableFragmentInput, // $FlowExpectedError[incompatible-call] @@ -69,6 +80,7 @@ type ExpectedReturnType< ): ExpectedReturnType); // $FlowExpectedError[incompatible-call] `Example_user$fragmentType` is incompatible with `FragmentType` +// $FlowFixMe[react-rule-hook] (useBlockingPaginationFragment( refetchableFragmentInput, // $FlowExpectedError[incompatible-call] @@ -76,18 +88,28 @@ type ExpectedReturnType< ): ExpectedReturnType); // $FlowExpectedError: Key should not be a user provided object +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[prop-missing] +// $FlowFixMe[cannot-resolve-name] useBlockingPaginationFragment(fragmentInput, {abc: 123}); // $FlowExpectedError: Key should not be an empty object +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[prop-missing] +// $FlowFixMe[cannot-resolve-name] useBlockingPaginationFragment(fragmentInput, {}); // $FlowExpectedError: Key should be the `$key` type from generated flow +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[prop-missing] +// $FlowFixMe[cannot-resolve-name] useBlockingPaginationFragment(fragmentInput, fragmentData); // Refetch function options: declare var variables: QueryVariables; declare var environment: IEnvironment; +// $FlowFixMe[react-rule-hook] const {refetch} = useBlockingPaginationFragment( refetchableFragmentInput, keyNonNullable, @@ -106,15 +128,19 @@ refetch(variables, { declare var extraVariables: {nickname: string}; declare var invalidVariables: {foo: string}; +// $FlowFixMe[react-rule-hook] const {loadNext} = useBlockingPaginationFragment( refetchableFragmentInput, keyNonNullable, ); // Accepts extraVariables loadNext(10, { + // $FlowFixMe[prop-missing] + // $FlowFixMe[incompatible-call] UNSTABLE_extraVariables: extraVariables, }); +// $FlowFixMe[prop-missing] loadNext(10, { // $FlowExpectedError: doesn't accept variables not available in the Flow type UNSTABLE_extraVariables: invalidVariables, diff --git a/packages/react-relay/relay-hooks/__flowtests__/usePaginationFragment-flowtest.js b/packages/react-relay/relay-hooks/__flowtests__/usePaginationFragment-flowtest.js index 5c2cccd49f55a..392b4978506f9 100644 --- a/packages/react-relay/relay-hooks/__flowtests__/usePaginationFragment-flowtest.js +++ b/packages/react-relay/relay-hooks/__flowtests__/usePaginationFragment-flowtest.js @@ -47,29 +47,42 @@ type ExpectedReturnType< /* eslint-disable react-hooks/rules-of-hooks */ // Nullability of returned data type is correct +// $FlowFixMe[prop-missing] +// $FlowFixMe[incompatible-cast] +// $FlowFixMe[incompatible-exact] +// $FlowFixMe[react-rule-hook] (usePaginationFragment( refetchableFragmentInput, keyNonNullable, ): ExpectedReturnType); +// $FlowFixMe[react-rule-hook] (usePaginationFragment( refetchableFragmentInput, keyNullable, ): ExpectedReturnType); // $FlowExpectedError: can't cast nullable to non-nullable +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[incompatible-cast] (usePaginationFragment( refetchableFragmentInput, keyNullable, ): ExpectedReturnType); // $FlowExpectedError: actual type of returned data is correct +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[incompatible-exact] +// $FlowFixMe[prop-missing] +// $FlowFixMe[incompatible-cast] (usePaginationFragment( refetchableFragmentInput, // $FlowFixMe[incompatible-call] keyAnotherNonNullable, ): ExpectedReturnType); // $FlowExpectedError +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[incompatible-cast] (usePaginationFragment( refetchableFragmentInput, // $FlowFixMe[incompatible-call] @@ -77,18 +90,25 @@ type ExpectedReturnType< ): ExpectedReturnType); // $FlowExpectedError: Key should not be a user provided object +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[prop-missing] usePaginationFragment(refetchableFragmentInput, {abc: 123}); // $FlowExpectedError: Key should not be an empty object +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[prop-missing] usePaginationFragment(refetchableFragmentInput, {}); // $FlowExpectedError: Key should be the `$key` type from generated flow +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[prop-missing] usePaginationFragment(refetchableFragmentInput, fragmentData); // Refetch function options: declare var variables: QueryVariables; declare var environment: IEnvironment; +// $FlowFixMe[react-rule-hook] const {refetch} = usePaginationFragment( refetchableFragmentInput, keyNonNullable, @@ -107,15 +127,19 @@ refetch(variables, { declare var extraVariables: {nickname: string}; declare var invalidVariables: {foo: string}; +// $FlowFixMe[react-rule-hook] const {loadNext} = usePaginationFragment( refetchableFragmentInput, keyNonNullable, ); // Accepts extraVariables loadNext(10, { + // $FlowFixMe[prop-missing] + // $FlowFixMe[incompatible-call] UNSTABLE_extraVariables: extraVariables, }); +// $FlowFixMe[prop-missing] loadNext(10, { // $FlowExpectedError: doesn't accept variables not available in the Flow type UNSTABLE_extraVariables: invalidVariables, diff --git a/packages/react-relay/relay-hooks/__flowtests__/useRefetchableFragment-flowtest.js b/packages/react-relay/relay-hooks/__flowtests__/useRefetchableFragment-flowtest.js index 5fa4f41928044..1dff30df69495 100644 --- a/packages/react-relay/relay-hooks/__flowtests__/useRefetchableFragment-flowtest.js +++ b/packages/react-relay/relay-hooks/__flowtests__/useRefetchableFragment-flowtest.js @@ -31,53 +31,77 @@ import { /* eslint-disable react-hooks/rules-of-hooks */ // Nullability of returned data type is correct +// $FlowFixMe[prop-missing] +// $FlowFixMe[incompatible-cast] +// $FlowFixMe[incompatible-exact] +// $FlowFixMe[react-rule-hook] (useRefetchableFragment(refetchableFragmentInput, keyNonNullable): [ NonNullableData, FetchFn, ]); +// $FlowFixMe[react-rule-hook] (useRefetchableFragment(refetchableFragmentInput, keyNullable): [ NullableData, FetchFn, ]); // $FlowExpectedError: can't cast nullable to non-nullable +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[incompatible-cast] (useRefetchableFragment(refetchableFragmentInput, keyNullable): [ NonNullableData, FetchFn, ]); // $FlowExpectedError: refetch requires exact type if key is nullable +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[incompatible-exact] +// $FlowFixMe[prop-missing] (useRefetchableFragment(refetchableFragmentInput, keyNullable): [ NullableData, FetchFn, ]); // $FlowExpectedError: actual type of returned data is correct +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[incompatible-call] +// $FlowFixMe[incompatible-exact] +// $FlowFixMe[prop-missing] +// $FlowFixMe[incompatible-cast] (useRefetchableFragment(refetchableFragmentInput, keyAnotherNonNullable): [ NonNullableData, FetchFn, ]); // $FlowExpectedError - incompatible key types +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[incompatible-call] (useRefetchableFragment(refetchableFragmentInput, keyAnotherNullable): [ NullableData, FetchFn, ]); // $FlowExpectedError: Key should not be a user provided object +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[prop-missing] useRefetchableFragment(refetchableFragmentInput, {abc: 123}); // $FlowExpectedError: Key should not be an empty object +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[prop-missing] useRefetchableFragment(refetchableFragmentInput, {}); // $FlowExpectedError: Key should be the `$key` type from generated flow +// $FlowFixMe[react-rule-hook] +// $FlowFixMe[prop-missing] useRefetchableFragment(refetchableFragmentInput, fragmentData); // Refetch function options: declare var variables: QueryVariables; declare var environment: IEnvironment; +// $FlowFixMe[react-rule-hook] const [, refetch] = useRefetchableFragment( refetchableFragmentInput, keyNonNullable, diff --git a/packages/react-relay/relay-hooks/__tests__/FragmentResource-ClientEdges-test.js b/packages/react-relay/relay-hooks/__tests__/FragmentResource-ClientEdges-test.js index 4bd4aceaaa03b..8e4020b7b7fc2 100644 --- a/packages/react-relay/relay-hooks/__tests__/FragmentResource-ClientEdges-test.js +++ b/packages/react-relay/relay-hooks/__tests__/FragmentResource-ClientEdges-test.js @@ -13,7 +13,7 @@ const { getFragmentResourceForEnvironment, -} = require('react-relay/relay-hooks/FragmentResource'); +} = require('react-relay/relay-hooks/legacy/FragmentResource'); const {RelayFeatureFlags, getFragment} = require('relay-runtime'); const {graphql} = require('relay-runtime/query/GraphQLTag'); const { @@ -30,12 +30,10 @@ disallowConsoleErrors(); beforeEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = true; }); afterEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = false; }); const BASIC_QUERY = graphql` diff --git a/packages/react-relay/relay-hooks/__tests__/FragmentResource-Resolver-test.js b/packages/react-relay/relay-hooks/__tests__/FragmentResource-Resolver-test.js index 18e3db88faf1f..eb54f7d0e191e 100644 --- a/packages/react-relay/relay-hooks/__tests__/FragmentResource-Resolver-test.js +++ b/packages/react-relay/relay-hooks/__tests__/FragmentResource-Resolver-test.js @@ -11,20 +11,21 @@ 'use strict'; +import type {RelayFieldLoggerEvent} from 'relay-runtime/store/RelayStoreTypes'; + const { getFragmentResourceForEnvironment, -} = require('react-relay/relay-hooks/FragmentResource'); +} = require('react-relay/relay-hooks/legacy/FragmentResource'); const {RelayFeatureFlags, getFragment} = require('relay-runtime'); const {graphql} = require('relay-runtime/query/GraphQLTag'); const { createOperationDescriptor, } = require('relay-runtime/store/RelayModernOperationDescriptor'); const {createMockEnvironment} = require('relay-test-utils-internal'); - const { disallowConsoleErrors, disallowWarnings, -} = require(`relay-test-utils-internal`); +} = require('relay-test-utils-internal'); disallowConsoleErrors(); disallowWarnings(); @@ -58,24 +59,12 @@ describe('FragmentResource RelayResolver behavior', () => { let query; let fragmentNode; let fragmentRef; - let mockRequiredFieldLogger; + let mockRelayFieldLogger; beforeEach(() => { - mockRequiredFieldLogger = jest.fn< - [ - | {+fieldPath: string, +kind: 'missing_field.log', +owner: string} - | {+fieldPath: string, +kind: 'missing_field.throw', +owner: string} - | { - +error: Error, - +fieldPath: string, - +kind: 'relay_resolver.error', - +owner: string, - }, - ], - void, - >(); + mockRelayFieldLogger = jest.fn<[RelayFieldLoggerEvent], void>(); environment = createMockEnvironment({ - requiredFieldLogger: mockRequiredFieldLogger, + relayFieldLogger: mockRelayFieldLogger, }); FragmentResource = getFragmentResourceForEnvironment(environment); query = createOperationDescriptor(BASIC_QUERY, {id: '1'}); @@ -98,9 +87,9 @@ describe('FragmentResource RelayResolver behavior', () => { it('Reports an error to the logger when a resolver field throws an error.', async () => { FragmentResource.read(fragmentNode, fragmentRef, 'componentDisplayName'); - expect(environment.requiredFieldLogger).toHaveBeenCalledTimes(1); + expect(environment.relayFieldLogger).toHaveBeenCalledTimes(1); - const event = mockRequiredFieldLogger.mock.calls[0][0]; + const event = mockRelayFieldLogger.mock.calls[0][0]; if (event.kind !== 'relay_resolver.error') { throw new Error( "Expected log event to be of kind 'relay_resolver.error'", diff --git a/packages/react-relay/relay-hooks/__tests__/FragmentResource-SemanticNonNull-test.js b/packages/react-relay/relay-hooks/__tests__/FragmentResource-SemanticNonNull-test.js new file mode 100644 index 0000000000000..cf5a649b71177 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/FragmentResource-SemanticNonNull-test.js @@ -0,0 +1,120 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow + * @format + * @oncall relay + */ + +'use strict'; + +import {getFragmentResourceForEnvironment} from '../legacy/FragmentResource'; +import { + RelayFeatureFlags, + createOperationDescriptor, + graphql, +} from 'relay-runtime'; +import RelayNetwork from 'relay-runtime/network/RelayNetwork'; +import RelayModernEnvironment from 'relay-runtime/store/RelayModernEnvironment'; +import RelayModernStore from 'relay-runtime/store/RelayModernStore'; +import RelayRecordSource from 'relay-runtime/store/RelayRecordSource'; + +const componentDisplayName = 'TestComponent'; + +let query; +let FragmentResource; + +beforeEach(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = true; + + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + __typename: 'User', + id: '1', + name: null, + __errors: { + name: [ + { + message: 'There was an error!', + path: ['me', 'name'], + }, + ], + }, + }, + }); + + const store = new RelayModernStore(source, { + gcReleaseBufferSize: 0, + }); + + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); + FragmentResource = getFragmentResourceForEnvironment(environment); + + query = createOperationDescriptor( + graphql` + query FragmentResourceSemanticNonNullTestQuery($id: ID!) { + node(id: $id) { + __typename + ...FragmentResourceSemanticNonNullTestFragment1 + ...FragmentResourceSemanticNonNullTestFragment2 + } + } + `, + {id: '1'}, + ); +}); + +test('Throws if a field has error with explicit error handling enabled', () => { + expect(() => { + FragmentResource.read( + graphql` + fragment FragmentResourceSemanticNonNullTestFragment1 on User + @throwOnFieldError { + name + } + `, + { + __id: '1', + __fragments: { + FragmentResourceSemanticNonNullTestFragment1: {}, + }, + __fragmentOwner: query.request, + }, + componentDisplayName, + ); + }).toThrowError( + 'Relay: Unexpected response payload - this object includes an errors property in which you can access the underlying errors', + ); +}); + +test('Does not throw if a field has error without explicit error handling enabled', () => { + expect(() => { + FragmentResource.read( + graphql` + fragment FragmentResourceSemanticNonNullTestFragment2 on User { + name + } + `, + { + __id: '1', + __fragments: { + FragmentResourceSemanticNonNullTestFragment2: {}, + }, + __fragmentOwner: query.request, + }, + componentDisplayName, + ); + }).not.toThrow(); +}); diff --git a/packages/react-relay/relay-hooks/__tests__/FragmentResource-WithOperationTracker-test.js b/packages/react-relay/relay-hooks/__tests__/FragmentResource-WithOperationTracker-test.js index 7e3c02bbba0c9..abd344bf3f735 100644 --- a/packages/react-relay/relay-hooks/__tests__/FragmentResource-WithOperationTracker-test.js +++ b/packages/react-relay/relay-hooks/__tests__/FragmentResource-WithOperationTracker-test.js @@ -16,7 +16,7 @@ import type { } from '../../../relay-runtime/util/NormalizationNode'; import type {LogEvent} from 'relay-runtime/store/RelayStoreTypes'; -const {createFragmentResource} = require('../FragmentResource'); +const {createFragmentResource} = require('../legacy/FragmentResource'); const invariant = require('invariant'); const { createOperationDescriptor, @@ -24,435 +24,447 @@ const { graphql, } = require('relay-runtime'); const RelayOperationTracker = require('relay-runtime/store/RelayOperationTracker'); +const RelayFeatureFlags = require('relay-runtime/util/RelayFeatureFlags'); const {createMockEnvironment} = require('relay-test-utils'); const {disallowWarnings} = require('relay-test-utils-internal'); disallowWarnings(); -describe('FragmentResource with Operation Tracker and Missing Data', () => { - const componentName = 'TestComponent'; - let environment; - let NodeQuery; - let ViewerFriendsQuery; - let FriendsPaginationQuery; - let UserFragment; - let PlainUserNameRenderer_name; - let PlainUserNameRenderer_name$normalization; - let FragmentResource; - let operationLoader; - let operationTracker; - let viewerOperation; - let nodeOperation; - let logger; +describe.each([true, false])( + 'FragmentResource with Operation Tracker and Missing Data with ENABLE_LOOSE_SUBSCRIPTION_ATTRIBUTION=%p', + looseAttribution => { + const componentName = 'TestComponent'; + let environment; + let NodeQuery; + let ViewerFriendsQuery; + let FriendsPaginationQuery; + let UserFragment; + let PlainUserNameRenderer_name; + let PlainUserNameRenderer_name$normalization; + let FragmentResource; + let operationLoader; + let operationTracker; + let viewerOperation; + let nodeOperation; + let logger; - beforeEach(() => { - operationLoader = { - load: jest.fn<[mixed], Promise>(), - get: jest.fn<[mixed], ?NormalizationRootNode>(), - }; - operationTracker = new RelayOperationTracker(); - logger = jest.fn<[LogEvent], void>(); - environment = createMockEnvironment({ - operationTracker, - operationLoader, - log: logger, - }); - NodeQuery = graphql` - query FragmentResourceWithOperationTrackerTestNodeQuery($id: ID!) { - node(id: $id) { - ...FragmentResourceWithOperationTrackerTestUserFragment + beforeEach(() => { + RelayFeatureFlags.ENABLE_LOOSE_SUBSCRIPTION_ATTRIBUTION = + looseAttribution; + operationLoader = { + load: jest.fn<[mixed], Promise>(), + get: jest.fn<[mixed], ?NormalizationRootNode>(), + }; + operationTracker = new RelayOperationTracker(); + logger = jest.fn<[LogEvent], void>(); + environment = createMockEnvironment({ + operationTracker, + operationLoader, + log: logger, + }); + NodeQuery = graphql` + query FragmentResourceWithOperationTrackerTestNodeQuery($id: ID!) { + node(id: $id) { + ...FragmentResourceWithOperationTrackerTestUserFragment + } } - } - `; - ViewerFriendsQuery = graphql` - query FragmentResourceWithOperationTrackerTestViewerFriendsQuery { - viewer { - actor { - friends(first: 1) @connection(key: "Viewer_friends") { - edges { - node { - ...FragmentResourceWithOperationTrackerTestUserFragment + `; + ViewerFriendsQuery = graphql` + query FragmentResourceWithOperationTrackerTestViewerFriendsQuery { + viewer { + actor { + friends(first: 1) @connection(key: "Viewer_friends") { + edges { + node { + ...FragmentResourceWithOperationTrackerTestUserFragment + } } } } } } - } - `; - FriendsPaginationQuery = graphql` - query FragmentResourceWithOperationTrackerTestFriendsPaginationQuery( - $id: ID! - ) { - node(id: $id) { - ... on User { - friends(first: 1) @connection(key: "Viewer_friends") { - edges { - node { - ...FragmentResourceWithOperationTrackerTestUserFragment + `; + FriendsPaginationQuery = graphql` + query FragmentResourceWithOperationTrackerTestFriendsPaginationQuery( + $id: ID! + ) { + node(id: $id) { + ... on User { + friends(first: 1) @connection(key: "Viewer_friends") { + edges { + node { + ...FragmentResourceWithOperationTrackerTestUserFragment + } } } } } } - } - `; - PlainUserNameRenderer_name = graphql` - fragment FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name on PlainUserNameRenderer { - plaintext - data { - text - } - } - `; - graphql` - fragment FragmentResourceWithOperationTrackerTestMarkdownUserNameRenderer_name on MarkdownUserNameRenderer { - markdown - data { - markup + `; + PlainUserNameRenderer_name = graphql` + fragment FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name on PlainUserNameRenderer { + plaintext + data { + text + } } - } - `; - PlainUserNameRenderer_name$normalization = require('./__generated__/FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name$normalization.graphql'); - UserFragment = graphql` - fragment FragmentResourceWithOperationTrackerTestUserFragment on User { - id - name - nameRenderer @match { - ...FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name - @module(name: "PlainUserNameRenderer.react") - ...FragmentResourceWithOperationTrackerTestMarkdownUserNameRenderer_name - @module(name: "MarkdownUserNameRenderer.react") + `; + graphql` + fragment FragmentResourceWithOperationTrackerTestMarkdownUserNameRenderer_name on MarkdownUserNameRenderer { + markdown + data { + markup + } } - plainNameRenderer: nameRenderer - @match( - key: "FragmentResourceWithOperationTrackerTestUserFragment_plainNameRenderer" - ) { - ...FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name - @module(name: "PlainUserNameRenderer.react") + `; + PlainUserNameRenderer_name$normalization = require('./__generated__/FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name$normalization.graphql'); + UserFragment = graphql` + fragment FragmentResourceWithOperationTrackerTestUserFragment on User { + id + name + nameRenderer @match { + ...FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name + @module(name: "PlainUserNameRenderer.react") + ...FragmentResourceWithOperationTrackerTestMarkdownUserNameRenderer_name + @module(name: "MarkdownUserNameRenderer.react") + } + plainNameRenderer: nameRenderer + @match( + key: "FragmentResourceWithOperationTrackerTestUserFragment_plainNameRenderer" + ) { + ...FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name + @module(name: "PlainUserNameRenderer.react") + } } - } - `; + `; - FragmentResource = createFragmentResource(environment); - viewerOperation = createOperationDescriptor(ViewerFriendsQuery, {}); - nodeOperation = createOperationDescriptor(NodeQuery, { - id: 'user-id-1', - }); - environment.execute({operation: viewerOperation}).subscribe({}); - environment.subscribe( - environment.lookup(viewerOperation.fragment), - jest.fn(), - ); + FragmentResource = createFragmentResource(environment); + viewerOperation = createOperationDescriptor(ViewerFriendsQuery, {}); + nodeOperation = createOperationDescriptor(NodeQuery, { + id: 'user-id-1', + }); + environment.execute({operation: viewerOperation}).subscribe({}); + environment.subscribe( + environment.lookup(viewerOperation.fragment), + jest.fn(), + ); - environment.mock.resolve(viewerOperation, { - data: { - viewer: { - actor: { - id: 'viewer-id', - __typename: 'User', - friends: { - pageInfo: { - hasNextPage: true, - hasPrevPage: false, - startCursor: 'cursor-1', - endCursor: 'cursor-1', - }, - edges: [ - { - cursor: 'cursor-1', - node: { - id: 'user-id-1', - name: 'Alice', - __typename: 'User', - nameRenderer: null, - plainNameRenderer: null, - }, + environment.mock.resolve(viewerOperation, { + data: { + viewer: { + actor: { + id: 'viewer-id', + __typename: 'User', + friends: { + pageInfo: { + hasNextPage: true, + hasPrevPage: false, + startCursor: 'cursor-1', + endCursor: 'cursor-1', }, - ], + edges: [ + { + cursor: 'cursor-1', + node: { + id: 'user-id-1', + name: 'Alice', + __typename: 'User', + nameRenderer: null, + plainNameRenderer: null, + }, + }, + ], + }, }, }, }, - }, - }); + }); - // We need to subscribe to a fragment in order for OperationTracker - // to be able to notify owners if they are affected by any pending operation - environment.subscribe( - environment.lookup( - createReaderSelector( - UserFragment, - 'user-id-1', - viewerOperation.request.variables, - viewerOperation.request, + // We need to subscribe to a fragment in order for OperationTracker + // to be able to notify owners if they are affected by any pending operation + environment.subscribe( + environment.lookup( + createReaderSelector( + UserFragment, + 'user-id-1', + viewerOperation.request.variables, + viewerOperation.request, + ), ), - ), - jest.fn(), - ); - }); + jest.fn(), + ); + }); - it('should throw and cache promise for pending operation affecting fragment owner', () => { - environment.execute({operation: nodeOperation}).subscribe({}); - operationLoader.load.mockImplementation(() => - Promise.resolve(PlainUserNameRenderer_name$normalization), - ); - environment.mock.nextValue(nodeOperation, { - data: { - node: { - __typename: 'User', - id: 'user-id-1', - name: 'Alice', - nameRenderer: { - __typename: 'PlainUserNameRenderer', - __module_component_FragmentResourceWithOperationTrackerTestUserFragment: - 'PlainUserNameRenderer.react', - __module_operation_FragmentResourceWithOperationTrackerTestUserFragment: - 'FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name$normalization.graphql', - plaintext: 'Plaintext', - data: { - id: 'plain-test-data-id-1', - text: 'Data Text', + afterEach(() => { + RelayFeatureFlags.ENABLE_LOOSE_SUBSCRIPTION_ATTRIBUTION = false; + }); + + it('should throw and cache promise for pending operation affecting fragment owner', () => { + environment.execute({operation: nodeOperation}).subscribe({}); + operationLoader.load.mockImplementation(() => + Promise.resolve(PlainUserNameRenderer_name$normalization), + ); + environment.mock.nextValue(nodeOperation, { + data: { + node: { + __typename: 'User', + id: 'user-id-1', + name: 'Alice', + nameRenderer: { + __typename: 'PlainUserNameRenderer', + __module_component_FragmentResourceWithOperationTrackerTestUserFragment: + 'PlainUserNameRenderer.react', + __module_operation_FragmentResourceWithOperationTrackerTestUserFragment: + 'FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name$normalization.graphql', + plaintext: 'Plaintext', + data: { + id: 'plain-test-data-id-1', + text: 'Data Text', + }, }, - }, - plainNameRenderer: { - __typename: 'PlainUserNameRenderer', - __module_component_FragmentResourceWithOperationTrackerTestUserFragment_plainNameRenderer: - 'PlainUserNameRenderer.react', - __module_operation_FragmentResourceWithOperationTrackerTestUserFragment_plainNameRenderer: - 'FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name$normalization.graphql', - plaintext: 'Plaintext', - data: { - id: 'plain-test-data-id-1', - text: 'Data Text', + plainNameRenderer: { + __typename: 'PlainUserNameRenderer', + __module_component_FragmentResourceWithOperationTrackerTestUserFragment_plainNameRenderer: + 'PlainUserNameRenderer.react', + __module_operation_FragmentResourceWithOperationTrackerTestUserFragment_plainNameRenderer: + 'FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name$normalization.graphql', + plaintext: 'Plaintext', + data: { + id: 'plain-test-data-id-1', + text: 'Data Text', + }, }, }, }, - }, - }); - expect(operationLoader.load).toBeCalledTimes(2); + }); + expect(operationLoader.load).toBeCalledTimes(2); - // Calling `complete` here will just mark network request as completed, but - // we still need to process follow-ups with normalization ASTs by resolving - // the operation loader promise - environment.mock.complete(nodeOperation); + // Calling `complete` here will just mark network request as completed, but + // we still need to process follow-ups with normalization ASTs by resolving + // the operation loader promise + environment.mock.complete(nodeOperation); - const fragmentRef = { - __id: 'client:user-id-1:nameRenderer(supported:["PlainUserNameRenderer"])', - __fragments: { - FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name: {}, - }, - __fragmentOwner: viewerOperation.request, - }; + const fragmentRef = { + __id: 'client:user-id-1:nameRenderer(supported:"1AwQS7")', + __fragments: { + FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name: + {}, + }, + __fragmentOwner: viewerOperation.request, + }; - let thrown = null; - try { - FragmentResource.read( - PlainUserNameRenderer_name, - fragmentRef, - componentName, - ); - } catch (promise) { - expect(promise).toBeInstanceOf(Promise); - thrown = promise; - } - expect(thrown).not.toBe(null); + let thrown = null; + try { + FragmentResource.read( + PlainUserNameRenderer_name, + fragmentRef, + componentName, + ); + } catch (promise) { + expect(promise).toBeInstanceOf(Promise); + thrown = promise; + } + expect(thrown).not.toBe(null); - // Try reading fragment a second time while affecting operation is pending - let cached = null; - try { - FragmentResource.read( - PlainUserNameRenderer_name, - fragmentRef, - componentName, - ); - } catch (promise) { - expect(promise).toBeInstanceOf(Promise); - cached = promise; - } - // Assert that promise from first read was cached - expect(cached).toBe(thrown); + // Try reading fragment a second time while affecting operation is pending + let cached = null; + try { + FragmentResource.read( + PlainUserNameRenderer_name, + fragmentRef, + componentName, + ); + } catch (promise) { + expect(promise).toBeInstanceOf(Promise); + cached = promise; + } + // Assert that promise from first read was cached + expect(cached).toBe(thrown); - // Assert that we logged a 'pendingoperation.found' event. - const pendingOperationFoundEvents = logger.mock.calls - .map(([event]) => event) - .filter(event => event.name === 'pendingoperation.found'); + // Assert that we logged a 'pendingoperation.found' event. + const pendingOperationFoundEvents = logger.mock.calls + .map(([event]) => event) + .filter(event => event.name === 'pendingoperation.found'); - expect(pendingOperationFoundEvents.length).toBe(1); - const event = pendingOperationFoundEvents[0]; - invariant( - event.name === 'pendingoperation.found', - "Expected log event to be 'pendingoperation.found'", - ); - expect(event.fragment.name).toBe( - 'FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name', - ); - expect(event.fragmentOwner.node.operation.name).toBe( - viewerOperation.request.node.operation.name, - ); - expect( - event.pendingOperations.map(owner => owner.node.operation.name), - ).toEqual(['FragmentResourceWithOperationTrackerTestNodeQuery']); - }); + expect(pendingOperationFoundEvents.length).toBe(1); + const event = pendingOperationFoundEvents[0]; + invariant( + event.name === 'pendingoperation.found', + "Expected log event to be 'pendingoperation.found'", + ); + expect(event.fragment.name).toBe( + 'FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name', + ); + expect(event.fragmentOwner.node.operation.name).toBe( + viewerOperation.request.node.operation.name, + ); + expect( + event.pendingOperations.map(owner => owner.node.operation.name), + ).toEqual(['FragmentResourceWithOperationTrackerTestNodeQuery']); + }); - it('should read the data from the store once operation fully completed', () => { - environment.execute({operation: nodeOperation}).subscribe({}); - operationLoader.load.mockImplementation(() => - Promise.resolve(PlainUserNameRenderer_name$normalization), - ); - environment.mock.nextValue(nodeOperation, { - data: { - node: { - __typename: 'User', - id: 'user-id-1', - name: 'Alice', - nameRenderer: { - __typename: 'PlainUserNameRenderer', - __module_component_FragmentResourceWithOperationTrackerTestUserFragment: - 'PlainUserNameRenderer.react', - __module_operation_FragmentResourceWithOperationTrackerTestUserFragment: - 'PlainUserNameRenderer_name$normalization.graphql', - plaintext: 'Plaintext', - data: { - id: 'plain-test-data-id-1', - text: 'Data Text', + it('should read the data from the store once operation fully completed', () => { + environment.execute({operation: nodeOperation}).subscribe({}); + operationLoader.load.mockImplementation(() => + Promise.resolve(PlainUserNameRenderer_name$normalization), + ); + environment.mock.nextValue(nodeOperation, { + data: { + node: { + __typename: 'User', + id: 'user-id-1', + name: 'Alice', + nameRenderer: { + __typename: 'PlainUserNameRenderer', + __module_component_FragmentResourceWithOperationTrackerTestUserFragment: + 'PlainUserNameRenderer.react', + __module_operation_FragmentResourceWithOperationTrackerTestUserFragment: + 'PlainUserNameRenderer_name$normalization.graphql', + plaintext: 'Plaintext', + data: { + id: 'plain-test-data-id-1', + text: 'Data Text', + }, }, - }, - plainNameRenderer: { - __typename: 'PlainUserNameRenderer', - __module_component_FragmentResourceWithOperationTrackerTestUserFragment_plainNameRenderer: - 'PlainUserNameRenderer.react', - __module_operation_FragmentResourceWithOperationTrackerTestUserFragment_plainNameRenderer: - 'PlainUserNameRenderer_name$normalization.graphql', - plaintext: 'Plaintext', - data: { - id: 'plain-test-data-id-1', - text: 'Data Text', + plainNameRenderer: { + __typename: 'PlainUserNameRenderer', + __module_component_FragmentResourceWithOperationTrackerTestUserFragment_plainNameRenderer: + 'PlainUserNameRenderer.react', + __module_operation_FragmentResourceWithOperationTrackerTestUserFragment_plainNameRenderer: + 'PlainUserNameRenderer_name$normalization.graphql', + plaintext: 'Plaintext', + data: { + id: 'plain-test-data-id-1', + text: 'Data Text', + }, }, }, }, - }, - }); - expect(operationLoader.load).toBeCalledTimes(2); - environment.mock.complete(nodeOperation); - // To make sure promise is resolved - jest.runAllTimers(); - const snapshot = FragmentResource.read( - PlainUserNameRenderer_name, - { - __id: 'client:user-id-1:nameRenderer(supported:["PlainUserNameRenderer"])', - __fragments: { - FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name: - {}, + }); + expect(operationLoader.load).toBeCalledTimes(2); + environment.mock.complete(nodeOperation); + // To make sure promise is resolved + jest.runAllTimers(); + const snapshot = FragmentResource.read( + PlainUserNameRenderer_name, + { + __id: 'client:user-id-1:nameRenderer(supported:"1AwQS7")', + __fragments: { + FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name: + {}, + }, + __fragmentOwner: viewerOperation.request, }, - __fragmentOwner: viewerOperation.request, - }, - componentName, - ); - expect(snapshot.data).toEqual({ - data: { - text: 'Data Text', - }, - plaintext: 'Plaintext', + componentName, + ); + expect(snapshot.data).toEqual({ + data: { + text: 'Data Text', + }, + plaintext: 'Plaintext', + }); }); - }); - it('should suspend on pagination query and then read the data', () => { - const paginationOperation = createOperationDescriptor( - FriendsPaginationQuery, - { - id: 'viewer-id', - }, - ); - environment.execute({operation: paginationOperation}).subscribe({}); - operationLoader.load.mockImplementation(() => - Promise.resolve(PlainUserNameRenderer_name$normalization), - ); - environment.mock.nextValue(paginationOperation, { - data: { - node: { - __typename: 'User', + it('should suspend on pagination query and then read the data', () => { + const paginationOperation = createOperationDescriptor( + FriendsPaginationQuery, + { id: 'viewer-id', - friends: { - pageInfo: { - hasNextPage: true, - hasPrevPage: false, - startCursor: 'cursor-2', - endCursor: 'cursor-2', - }, - edges: [ - { - cursor: 'cursor-2', - node: { - __typename: 'User', - id: 'user-id-2', - name: 'Bob', - nameRenderer: { - __typename: 'PlainUserNameRenderer', - __module_component_FragmentResourceWithOperationTrackerTestUserFragment: - 'PlainUserNameRenderer.react', - __module_operation_FragmentResourceWithOperationTrackerTestUserFragment: - 'PlainUserNameRenderer_name$normalization.graphql', - plaintext: 'Plaintext 2', - data: { - id: 'plain-test-data-id-2', + }, + ); + environment.execute({operation: paginationOperation}).subscribe({}); + operationLoader.load.mockImplementation(() => + Promise.resolve(PlainUserNameRenderer_name$normalization), + ); + environment.mock.nextValue(paginationOperation, { + data: { + node: { + __typename: 'User', + id: 'viewer-id', + friends: { + pageInfo: { + hasNextPage: true, + hasPrevPage: false, + startCursor: 'cursor-2', + endCursor: 'cursor-2', + }, + edges: [ + { + cursor: 'cursor-2', + node: { + __typename: 'User', + id: 'user-id-2', + name: 'Bob', + nameRenderer: { + __typename: 'PlainUserNameRenderer', + __module_component_FragmentResourceWithOperationTrackerTestUserFragment: + 'PlainUserNameRenderer.react', + __module_operation_FragmentResourceWithOperationTrackerTestUserFragment: + 'PlainUserNameRenderer_name$normalization.graphql', + plaintext: 'Plaintext 2', + data: { + id: 'plain-test-data-id-2', - text: 'Data Text 2', + text: 'Data Text 2', + }, }, - }, - plainNameRenderer: { - __typename: 'PlainUserNameRenderer', - __module_component_FragmentResourceWithOperationTrackerTestUserFragment_plainNameRenderer: - 'PlainUserNameRenderer.react', - __module_operation_FragmentResourceWithOperationTrackerTestUserFragment_plainNameRenderer: - 'PlainUserNameRenderer_name$normalization.graphql', - plaintext: 'Plaintext 2', - data: { - id: 'plain-test-data-id-2', - text: 'Data Text 2', + plainNameRenderer: { + __typename: 'PlainUserNameRenderer', + __module_component_FragmentResourceWithOperationTrackerTestUserFragment_plainNameRenderer: + 'PlainUserNameRenderer.react', + __module_operation_FragmentResourceWithOperationTrackerTestUserFragment_plainNameRenderer: + 'PlainUserNameRenderer_name$normalization.graphql', + plaintext: 'Plaintext 2', + data: { + id: 'plain-test-data-id-2', + text: 'Data Text 2', + }, }, }, }, - }, - ], + ], + }, }, }, - }, - }); - expect(operationLoader.load).toBeCalledTimes(2); - const fragmentRef = { - __id: 'client:user-id-2:nameRenderer(supported:["PlainUserNameRenderer"])', - __fragments: { - FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name: {}, - }, - __fragmentOwner: viewerOperation.request, - }; - let promiseThrown = false; - try { - FragmentResource.read( + }); + expect(operationLoader.load).toBeCalledTimes(2); + const fragmentRef = { + __id: 'client:user-id-2:nameRenderer(supported:"1AwQS7")', + __fragments: { + FragmentResourceWithOperationTrackerTestPlainUserNameRenderer_name: + {}, + }, + __fragmentOwner: viewerOperation.request, + }; + let promiseThrown = false; + try { + FragmentResource.read( + PlainUserNameRenderer_name, + fragmentRef, + componentName, + ); + } catch (promise) { + expect(promise).toBeInstanceOf(Promise); + promiseThrown = true; + } + expect(promiseThrown).toBe(true); + + // Complete the request + environment.mock.complete(paginationOperation); + // This should resolve promises + jest.runAllTimers(); + + const snapshot = FragmentResource.read( PlainUserNameRenderer_name, fragmentRef, componentName, ); - } catch (promise) { - expect(promise).toBeInstanceOf(Promise); - promiseThrown = true; - } - expect(promiseThrown).toBe(true); - - // Complete the request - environment.mock.complete(paginationOperation); - // This should resolve promises - jest.runAllTimers(); - - const snapshot = FragmentResource.read( - PlainUserNameRenderer_name, - fragmentRef, - componentName, - ); - expect(snapshot.data).toEqual({ - data: { - text: 'Data Text 2', - }, - plaintext: 'Plaintext 2', + expect(snapshot.data).toEqual({ + data: { + text: 'Data Text 2', + }, + plaintext: 'Plaintext 2', + }); }); - }); -}); + }, +); diff --git a/packages/react-relay/relay-hooks/__tests__/FragmentResource-WithOperationTrackerOptimisticUpdates-test.js b/packages/react-relay/relay-hooks/__tests__/FragmentResource-WithOperationTrackerOptimisticUpdates-test.js new file mode 100644 index 0000000000000..6cca0ac01238e --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/FragmentResource-WithOperationTrackerOptimisticUpdates-test.js @@ -0,0 +1,282 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; +import type {LogEvent} from 'relay-runtime/store/RelayStoreTypes'; + +const {getFragment} = require('../../../relay-runtime'); +const {createFragmentResource} = require('../legacy/FragmentResource'); +const { + createOperationDescriptor, + createReaderSelector, + graphql, +} = require('relay-runtime'); +const RelayOperationTracker = require('relay-runtime/store/RelayOperationTracker'); +const RelayFeatureFlags = require('relay-runtime/util/RelayFeatureFlags'); +const {createMockEnvironment} = require('relay-test-utils'); +const {disallowWarnings} = require('relay-test-utils-internal'); + +disallowWarnings(); + +describe('FragmentResource with Operation Tracker for optimistic updates behavior', () => { + const componentName = 'TestComponent'; + let environment; + let UserFragment; + let FragmentResource; + let operationTracker; + let nodeOperation; + let nodeOperation2; + let logger; + let UserQuery; + let ViewerFriendsQuery; + let viewerOperation; + + beforeEach(() => { + RelayFeatureFlags.ENABLE_OPERATION_TRACKER_OPTIMISTIC_UPDATES = true; + operationTracker = new RelayOperationTracker(); + logger = jest.fn<[LogEvent], void>(); + environment = createMockEnvironment({ + operationTracker, + log: logger, + }); + + UserFragment = graphql` + fragment FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment on User { + id + name + } + `; + UserQuery = graphql` + query FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery( + $id: ID! + ) { + node(id: $id) { + __typename + ...FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment + } + } + `; + + ViewerFriendsQuery = graphql` + query FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery { + viewer { + actor { + friends(first: 1) @connection(key: "Viewer_friends") { + edges { + node { + ...FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment + } + } + } + } + } + } + `; + FragmentResource = createFragmentResource(environment); + nodeOperation = createOperationDescriptor(UserQuery, { + id: 'user-id-1', + }); + nodeOperation2 = createOperationDescriptor(UserQuery, { + id: 'user-id-2', + }); + viewerOperation = createOperationDescriptor(ViewerFriendsQuery, {}); + environment.execute({operation: viewerOperation}).subscribe({}); + environment.execute({operation: nodeOperation}).subscribe({}); + environment.subscribe( + environment.lookup(viewerOperation.fragment), + jest.fn(), + ); + + // We need to subscribe to a fragment in order for OperationTracker + // to be able to notify owners if they are affected by any pending operation + environment.subscribe( + environment.lookup( + createReaderSelector( + UserFragment, + 'user-id-1', + viewerOperation.request.variables, + viewerOperation.request, + ), + ), + jest.fn(), + ); + }); + + afterEach(() => { + RelayFeatureFlags.ENABLE_OPERATION_TRACKER_OPTIMISTIC_UPDATES = false; + }); + + it('should throw promise for pending operation affecting fragment owner', () => { + environment.commitPayload(viewerOperation, { + viewer: { + actor: { + id: 'viewer-id', + __typename: 'User', + friends: { + pageInfo: { + hasNextPage: true, + hasPrevPage: false, + startCursor: 'cursor-1', + endCursor: 'cursor-1', + }, + edges: [ + { + cursor: 'cursor-1', + node: { + id: 'user-id-1', + name: 'Alice', + __typename: 'User', + }, + }, + ], + }, + }, + }, + }); + + const fragmentRef = { + __id: 'user-id-1', + __fragments: { + FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment: {}, + }, + __fragmentOwner: nodeOperation.request, + }; + + const result = FragmentResource.read( + getFragment(UserFragment), + fragmentRef, + componentName, + ); + FragmentResource.subscribe(result, jest.fn()); + + // Execute the nodeOperation as a mutation and set the record as undefined in optimistic updater + environment + .executeMutation({ + operation: nodeOperation, + optimisticUpdater: store => { + const record = store.get('user-id-1'); + record?.setValue(undefined, 'name'); + }, + }) + .subscribe({}); + + // Check the pending opeartion for both the node and viewer query + const pendingOperationsForViewerOperation = + operationTracker.getPendingOperationsAffectingOwner( + viewerOperation.request, + )?.promise; + expect(pendingOperationsForViewerOperation).not.toBe(null); + + const pendingOperationsForNodeOperation = + operationTracker.getPendingOperationsAffectingOwner( + nodeOperation.request, + )?.promise; + expect(pendingOperationsForNodeOperation).not.toBe(null); + }); + + it('when an unrelated operation resolves while an optimistic response is currently applied', () => { + environment.commitPayload(viewerOperation, { + viewer: { + actor: { + id: 'viewer-id', + __typename: 'User', + friends: { + pageInfo: { + hasNextPage: true, + hasPrevPage: false, + startCursor: 'cursor-1', + endCursor: 'cursor-2', + }, + edges: [ + { + cursor: 'cursor-1', + node: { + id: 'user-id-1', + name: 'Alice', + __typename: 'User', + }, + }, + { + cursor: 'cursor-2', + node: { + id: 'user-id-2', + name: 'Bob', + __typename: 'User', + }, + }, + ], + }, + }, + }, + }); + + const fragmentRef = { + __id: 'user-id-1', + __fragments: { + FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment: {}, + }, + __fragmentOwner: nodeOperation.request, + }; + const fragmentRef2 = { + __id: 'user-id-2', + __fragments: { + FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment: {}, + }, + __fragmentOwner: nodeOperation2.request, + }; + + const result = FragmentResource.read( + getFragment(UserFragment), + fragmentRef, + componentName, + ); + FragmentResource.subscribe(result, jest.fn()); + const result2 = FragmentResource.read( + getFragment(UserFragment), + fragmentRef2, + componentName, + ); + FragmentResource.subscribe(result2, jest.fn()); + + // Execute the nodeOperation as a mutation and set the record as undefined in optimistic updater + environment + .executeMutation({ + operation: nodeOperation, + optimisticUpdater: store => { + const record = store.get('user-id-1'); + record?.setValue(undefined, 'name'); + }, + }) + .subscribe({}); + environment + .executeMutation({ + operation: nodeOperation2, + optimisticUpdater: store => { + const record = store.get('user-id-2'); + record?.setValue(undefined, 'name'); + }, + }) + .subscribe({}); + + const pendingOperationsForNodeOperation = + operationTracker.getPendingOperationsAffectingOwner( + nodeOperation.request, + ); + expect(pendingOperationsForNodeOperation?.pendingOperations.length).toBe(1); + const pendingOperationsForNodeOperation2 = + operationTracker.getPendingOperationsAffectingOwner( + nodeOperation2.request, + ); + expect(pendingOperationsForNodeOperation2?.pendingOperations.length).toBe( + 1, + ); + }); +}); diff --git a/packages/react-relay/relay-hooks/__tests__/FragmentResource-WithOperationTrackerSuspense-test.js b/packages/react-relay/relay-hooks/__tests__/FragmentResource-WithOperationTrackerSuspense-test.js new file mode 100644 index 0000000000000..77211d64c1473 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/FragmentResource-WithOperationTrackerSuspense-test.js @@ -0,0 +1,350 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; +import type {LogEvent} from 'relay-runtime/store/RelayStoreTypes'; + +const {getFragment} = require('../../../relay-runtime'); +const {createFragmentResource} = require('../legacy/FragmentResource'); +const { + createOperationDescriptor, + createReaderSelector, + graphql, +} = require('relay-runtime'); +const RelayOperationTracker = require('relay-runtime/store/RelayOperationTracker'); +const RelayFeatureFlags = require('relay-runtime/util/RelayFeatureFlags'); +const {createMockEnvironment} = require('relay-test-utils'); +const {disallowWarnings} = require('relay-test-utils-internal'); + +disallowWarnings(); + +describe('FragmentResource with Operation Tracker and Suspense behavior', () => { + const componentName = 'TestComponent'; + let environment; + let UserFragment; + let FragmentResource; + let operationTracker; + let nodeOperation; + let logger; + let UserQuery; + let ViewerFriendsQuery; + let viewerOperation; + let UsersFragment; + let UsersQuery; + let pluralOperation; + + const pluralVariables = {ids: ['user-id-1']}; + + beforeEach(() => { + RelayFeatureFlags.ENABLE_RELAY_OPERATION_TRACKER_SUSPENSE = true; + RelayFeatureFlags.ENABLE_OPERATION_TRACKER_OPTIMISTIC_UPDATES = true; + operationTracker = new RelayOperationTracker(); + logger = jest.fn<[LogEvent], void>(); + environment = createMockEnvironment({ + operationTracker, + log: logger, + }); + + UserFragment = graphql` + fragment FragmentResourceWithOperationTrackerSuspenseTestFragment on User { + id + name + } + `; + UserQuery = graphql` + query FragmentResourceWithOperationTrackerSuspenseTestQuery($id: ID!) { + node(id: $id) { + __typename + ...FragmentResourceWithOperationTrackerSuspenseTestFragment + } + } + `; + + UsersFragment = graphql` + fragment FragmentResourceWithOperationTrackerSuspenseTest2Fragment on User + @relay(plural: true) { + id + name + } + `; + + UsersQuery = graphql` + query FragmentResourceWithOperationTrackerSuspenseTest2Query( + $ids: [ID!]! + ) { + nodes(ids: $ids) { + __typename + ...FragmentResourceWithOperationTrackerSuspenseTest2Fragment + } + } + `; + + ViewerFriendsQuery = graphql` + query FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery { + viewer { + actor { + friends(first: 1) @connection(key: "Viewer_friends") { + edges { + node { + ...FragmentResourceWithOperationTrackerSuspenseTestFragment + } + } + } + } + } + } + `; + FragmentResource = createFragmentResource(environment); + nodeOperation = createOperationDescriptor(UserQuery, { + id: 'user-id-1', + }); + viewerOperation = createOperationDescriptor(ViewerFriendsQuery, {}); + pluralOperation = createOperationDescriptor(UsersQuery, pluralVariables); + + environment.execute({operation: viewerOperation}).subscribe({}); + environment.execute({operation: nodeOperation}).subscribe({}); + environment.execute({operation: pluralOperation}).subscribe({}); + + environment.subscribe( + environment.lookup(viewerOperation.fragment), + jest.fn(), + ); + + // We need to subscribe to a fragment in order for OperationTracker + // to be able to notify owners if they are affected by any pending operation + environment.subscribe( + environment.lookup( + createReaderSelector( + UserFragment, + 'user-id-1', + viewerOperation.request.variables, + viewerOperation.request, + ), + ), + jest.fn(), + ); + environment.subscribe( + environment.lookup( + createReaderSelector( + UsersFragment, + 'user-id-1', + pluralOperation.request.variables, + pluralOperation.request, + ), + ), + jest.fn(), + ); + }); + + afterEach(() => { + RelayFeatureFlags.ENABLE_RELAY_OPERATION_TRACKER_SUSPENSE = false; + RelayFeatureFlags.ENABLE_OPERATION_TRACKER_OPTIMISTIC_UPDATES = false; + }); + + it('should throw promise for pending operation affecting fragment owner', async () => { + environment.commitPayload(viewerOperation, { + viewer: { + actor: { + id: 'viewer-id', + __typename: 'User', + friends: { + pageInfo: { + hasNextPage: true, + hasPrevPage: false, + startCursor: 'cursor-1', + endCursor: 'cursor-1', + }, + edges: [ + { + cursor: 'cursor-1', + node: { + id: 'user-id-1', + name: 'Alice', + __typename: 'User', + }, + }, + ], + }, + }, + }, + }); + + const fragmentRef = { + __id: 'user-id-1', + __fragments: { + FragmentResourceWithOperationTrackerSuspenseTestFragment: {}, + }, + __fragmentOwner: nodeOperation.request, + }; + + let result = FragmentResource.read( + getFragment(UserFragment), + fragmentRef, + componentName, + ); + FragmentResource.subscribe(result, jest.fn()); + + // Execute the nodeOperation query with executeMutation and set the record as undefined in optimistic updater + environment + .executeMutation({ + operation: nodeOperation, + optimisticUpdater: store => { + const record = store.get('user-id-1'); + record?.setValue(undefined, 'name'); + }, + }) + .subscribe({}); + + let thrown = null; + try { + FragmentResource.read( + getFragment(UserFragment), + fragmentRef, + componentName, + ); + } catch (p) { + expect(p).toBeInstanceOf(Promise); + thrown = p; + } + expect(thrown).not.toBe(null); + + environment.mock.nextValue(nodeOperation, { + data: { + node: { + __typename: 'User', + id: 'user-id-1', + name: 'Alice222', + }, + }, + }); + + environment.mock.complete(nodeOperation.request.node); + await expect(thrown).resolves.not.toThrow(); + + result = FragmentResource.read( + getFragment(UserFragment), + fragmentRef, + componentName, + ); + expect(result.data).toEqual({ + id: 'user-id-1', + name: 'Alice222', + }); + }); + + it('should throw promise for plural fragment', async () => { + environment.commitPayload(viewerOperation, { + viewer: { + actor: { + id: 'viewer-id', + __typename: 'User', + friends: { + pageInfo: { + hasNextPage: true, + hasPrevPage: false, + startCursor: 'cursor-1', + endCursor: 'cursor-1', + }, + edges: [ + { + cursor: 'cursor-1', + node: { + id: 'user-id-1', + name: 'Alice', + __typename: 'User', + }, + }, + ], + }, + }, + }, + }); + + const fragment2Ref = { + __id: 'user-id-1', + __fragments: { + FragmentResourceWithOperationTrackerSuspenseTest2Fragment: {}, + }, + __fragmentOwner: pluralOperation.request, + }; + + let result = FragmentResource.read( + getFragment(UsersFragment), + [fragment2Ref], + componentName, + ); + FragmentResource.subscribe(result, jest.fn()); + + const fragmentRef = { + __id: 'user-id-1', + __fragments: { + FragmentResourceWithOperationTrackerSuspenseTestFragment: {}, + }, + __fragmentOwner: nodeOperation.request, + }; + + const result2 = FragmentResource.read( + getFragment(UserFragment), + fragmentRef, + componentName, + ); + FragmentResource.subscribe(result2, jest.fn()); + + // Execute the nodeOperation query with executeMutation and set the record as undefined in optimistic updater + environment + .executeMutation({ + operation: nodeOperation, + optimisticUpdater: store => { + const record = store.get('user-id-1'); + record?.setValue(undefined, 'name'); + }, + }) + .subscribe({}); + + let thrown = null; + try { + FragmentResource.read( + getFragment(UsersFragment), + [fragment2Ref], + componentName, + ); + } catch (p) { + expect(p).toBeInstanceOf(Promise); + thrown = p; + } + expect(thrown).not.toBe(null); + + environment.mock.nextValue(nodeOperation, { + data: { + node: { + __typename: 'User', + id: 'user-id-1', + name: 'Alice222', + }, + }, + }); + + environment.mock.complete(nodeOperation.request.node); + await expect(thrown).resolves.not.toThrow(); + + result = FragmentResource.read( + getFragment(UsersFragment), + [fragment2Ref], + componentName, + ); + expect(result.data).toEqual([ + { + id: 'user-id-1', + name: 'Alice222', + }, + ]); + }); +}); diff --git a/packages/react-relay/relay-hooks/__tests__/FragmentResource-test.js b/packages/react-relay/relay-hooks/__tests__/FragmentResource-test.js index 59fc6f87ff85a..9e210be14fd91 100644 --- a/packages/react-relay/relay-hooks/__tests__/FragmentResource-test.js +++ b/packages/react-relay/relay-hooks/__tests__/FragmentResource-test.js @@ -58,7 +58,9 @@ import type { import type {LogEvent} from 'relay-runtime'; import type {Fragment, Query} from 'relay-runtime/util/RelayRuntimeTypes'; -const {getFragmentResourceForEnvironment} = require('../FragmentResource'); +const { + getFragmentResourceForEnvironment, +} = require('../legacy/FragmentResource'); const { __internal: {fetchQuery}, createOperationDescriptor, @@ -132,7 +134,6 @@ describe('FragmentResource', () => { let logEvents: Array; beforeEach(() => { - // jest.resetModules(); ({createMockEnvironment} = require('relay-test-utils-internal')); logEvents = []; diff --git a/packages/react-relay/relay-hooks/__tests__/FragmentResourceRequiredField-test.js b/packages/react-relay/relay-hooks/__tests__/FragmentResourceRequiredField-test.js index b6b16d87076a9..235231852297b 100644 --- a/packages/react-relay/relay-hooks/__tests__/FragmentResourceRequiredField-test.js +++ b/packages/react-relay/relay-hooks/__tests__/FragmentResourceRequiredField-test.js @@ -10,9 +10,14 @@ */ 'use strict'; -import type {LogEvent} from '../../../relay-runtime/store/RelayStoreTypes'; +import type { + LogEvent, + RelayFieldLoggerEvent, +} from 'relay-runtime/store/RelayStoreTypes'; -const {getFragmentResourceForEnvironment} = require('../FragmentResource'); +const { + getFragmentResourceForEnvironment, +} = require('../legacy/FragmentResource'); const { __internal: {fetchQuery}, createOperationDescriptor, @@ -33,27 +38,15 @@ let environment; let query; let FragmentResource; let logger; -let requiredFieldLogger; +let relayFieldLogger; beforeEach(() => { logger = jest.fn<[LogEvent], void>(); - requiredFieldLogger = jest.fn< - [ - | {+fieldPath: string, +kind: 'missing_field.log', +owner: string} - | {+fieldPath: string, +kind: 'missing_field.throw', +owner: string} - | { - +error: Error, - +fieldPath: string, - +kind: 'relay_resolver.error', - +owner: string, - }, - ], - void, - >(); + relayFieldLogger = jest.fn<[RelayFieldLoggerEvent], void>(); environment = createMockEnvironment({ log: logger, - requiredFieldLogger, + relayFieldLogger, }); FragmentResource = getFragmentResourceForEnvironment(environment); @@ -116,7 +109,7 @@ test('Logs if a @required(action: LOG) field is null', () => { }, componentDisplayName, ); - expect(requiredFieldLogger).toHaveBeenCalledWith({ + expect(relayFieldLogger).toHaveBeenCalledWith({ fieldPath: 'alternate_name', kind: 'missing_field.log', owner: 'FragmentResourceRequiredFieldTestUserFragment', @@ -179,7 +172,7 @@ test('Throws if a @required(action: THROW) field is present and then goes missin "Relay: Missing @required value at path 'name' in 'FragmentResourceRequiredFieldTestUserFragment'.", ); - expect(requiredFieldLogger).toHaveBeenCalledWith({ + expect(relayFieldLogger).toHaveBeenCalledWith({ fieldPath: 'name', kind: 'missing_field.throw', owner: 'FragmentResourceRequiredFieldTestUserFragment', diff --git a/packages/react-relay/relay-hooks/__tests__/LazyLoadEntryPointContainer_DEEPRECATED-test.js b/packages/react-relay/relay-hooks/__tests__/LazyLoadEntryPointContainer_DEEPRECATED-test.js index 5ae0c24d445fb..26b2abac679b9 100644 --- a/packages/react-relay/relay-hooks/__tests__/LazyLoadEntryPointContainer_DEEPRECATED-test.js +++ b/packages/react-relay/relay-hooks/__tests__/LazyLoadEntryPointContainer_DEEPRECATED-test.js @@ -141,6 +141,8 @@ beforeEach(() => { queries: { prefetched: { parameters: params, + /* $FlowFixMe[prop-missing] Error revealed after improved builtin + * React utility types */ variables: {id: entryPointParams.id}, }, }, @@ -296,6 +298,7 @@ it('renders synchronously when the query and component are already loaded', () = it('re-renders without reloading when non-prefetch props change', () => { // $FlowFixMe[missing-local-annot] error found when enabling Flow LTI mode const Component = jest.fn(props => { + // $FlowFixMe[react-rule-hook] const data = usePreloadedQuery(query, props.queries.prefetched); return data.node?.name; }); @@ -338,6 +341,7 @@ it('re-renders without reloading when non-prefetch props change', () => { it('re-renders and reloads when prefetch params change', () => { // $FlowFixMe[missing-local-annot] error found when enabling Flow LTI mode const Component = jest.fn(props => { + // $FlowFixMe[react-rule-hook] const data = usePreloadedQuery(query, props.queries.prefetched); return data.node?.name; }); @@ -523,6 +527,8 @@ it('should use environment from `getEnvironment` prop to fetch a query', () => { actorID: '5', }, parameters: params, + /* $FlowFixMe[prop-missing] Error revealed after improved builtin + * React utility types */ variables: {id: entryPointParams.id}, }, }, diff --git a/packages/react-relay/relay-hooks/__tests__/MatchContainer-test.js b/packages/react-relay/relay-hooks/__tests__/MatchContainer-test.js index cd0fad73e2c0a..576538f04fd7d 100644 --- a/packages/react-relay/relay-hooks/__tests__/MatchContainer-test.js +++ b/packages/react-relay/relay-hooks/__tests__/MatchContainer-test.js @@ -73,14 +73,16 @@ describe('MatchContainer', () => { )); }); - it('throws when match prop is null', () => { + it('throws when match prop is null', async () => { // This prevents console.error output in the test, which is expected jest.spyOn(console, 'error').mockImplementationOnce(() => {}); - expect(() => { - TestRenderer.create( - , - ); - }).toThrow( + await expect(async () => { + await TestRenderer.act(() => { + TestRenderer.create( + , + ); + }); + }).rejects.toThrow( 'MatchContainer: Expected `match` value to be an object or null/undefined.', ); }); @@ -199,7 +201,7 @@ describe('MatchContainer', () => { it('renders the fallback if the match object is empty', () => { loader.mockReturnValue(React.memo((UserComponent: $FlowFixMe))); const otherProps = {otherProp: 'hello!'}; - const Fallback = (jest.fn(() =>

fallback
): $FlowFixMe); + const Fallback: $FlowFixMe = jest.fn(() =>
fallback
); const renderer = TestRenderer.create( { it('renders the fallback if the match object is missing expected fields', () => { loader.mockReturnValue(React.memo((UserComponent: $FlowFixMe))); const otherProps = {otherProp: 'hello!'}; - const Fallback = (jest.fn(() =>
fallback
): $FlowFixMe); + const Fallback: $FlowFixMe = jest.fn(() =>
fallback
); const renderer = TestRenderer.create( { expect(Fallback).toBeCalledTimes(1); }); - it('throws if the match object is invalid (__id)', () => { + it('throws if the match object is invalid (__id)', async () => { jest.spyOn(console, 'error').mockImplementationOnce(() => {}); loader.mockReturnValue(React.memo((UserComponent: $FlowFixMe))); const otherProps = {otherProp: 'hello!'}; - const Fallback = (jest.fn(() =>
fallback
): $FlowFixMe); - expect(() => { - TestRenderer.create( - : $FlowFixMe)} - />, - ); - }).toThrow( + const Fallback: $FlowFixMe = jest.fn(() =>
fallback
); + await expect(async () => { + await TestRenderer.act(() => { + TestRenderer.create( + : $FlowFixMe)} + />, + ); + }); + }).rejects.toThrow( "MatchContainer: Invalid 'match' value, expected an object that has a '...SomeFragment' spread.", ); }); - it('throws if the match object is invalid (__fragments)', () => { + it('throws if the match object is invalid (__fragments)', async () => { jest.spyOn(console, 'error').mockImplementationOnce(() => {}); loader.mockReturnValue(React.memo((UserComponent: $FlowFixMe))); const otherProps = {otherProp: 'hello!'}; - const Fallback = (jest.fn(() =>
fallback
): $FlowFixMe); - expect(() => { - TestRenderer.create( - : $FlowFixMe)} - />, - ); - }).toThrow( + const Fallback: $FlowFixMe = jest.fn(() =>
fallback
); + await expect(async () => { + await TestRenderer.act(() => { + TestRenderer.create( + : $FlowFixMe)} + />, + ); + }); + }).rejects.toThrow( "MatchContainer: Invalid 'match' value, expected an object that has a '...SomeFragment' spread.", ); }); - it('throws if the match object is invalid (__fragmentOwner)', () => { + it('throws if the match object is invalid (__fragmentOwner)', async () => { jest.spyOn(console, 'error').mockImplementationOnce(() => {}); loader.mockReturnValue(React.memo((UserComponent: $FlowFixMe))); const otherProps = {otherProp: 'hello!'}; - const Fallback = (jest.fn(() =>
fallback
): $FlowFixMe); - expect(() => { - TestRenderer.create( - : $FlowFixMe)} - />, - ); - }).toThrow( + const Fallback: $FlowFixMe = jest.fn(() =>
fallback
); + await expect(async () => { + await TestRenderer.act(() => { + TestRenderer.create( + : $FlowFixMe)} + />, + ); + }); + }).rejects.toThrow( "MatchContainer: Invalid 'match' value, expected an object that has a '...SomeFragment' spread.", ); }); - it('throws if the match object is invalid (__fragmentPropName)', () => { + it('throws if the match object is invalid (__fragmentPropName)', async () => { jest.spyOn(console, 'error').mockImplementationOnce(() => {}); loader.mockReturnValue(React.memo((UserComponent: $FlowFixMe))); const otherProps = {otherProp: 'hello!'}; - const Fallback = (jest.fn(() =>
fallback
): $FlowFixMe); - expect(() => { - TestRenderer.create( - : $FlowFixMe)} - />, - ); - }).toThrow( + const Fallback: $FlowFixMe = jest.fn(() =>
fallback
); + await expect(async () => { + await TestRenderer.act(() => { + TestRenderer.create( + : $FlowFixMe)} + />, + ); + }); + }).rejects.toThrow( "MatchContainer: Invalid 'match' value, expected an object that has a '...SomeFragment' spread.", ); }); @@ -353,7 +363,7 @@ describe('MatchContainer', () => { it('renders the fallback if the match value is null', () => { loader.mockReturnValue(React.memo((UserComponent: $FlowFixMe))); const otherProps = {otherProp: 'hello!'}; - const Fallback = (jest.fn(() =>
fallback
): $FlowFixMe); + const Fallback: $FlowFixMe = jest.fn(() =>
fallback
); const renderer = TestRenderer.create( { it('renders the fallback if the match value is undefined', () => { loader.mockReturnValue(React.memo((UserComponent: $FlowFixMe))); const otherProps = {otherProp: 'hello!'}; - const Fallback = (jest.fn(() =>
fallback
): $FlowFixMe); + const Fallback: $FlowFixMe = jest.fn(() =>
fallback
); const renderer = TestRenderer.create( { it('transitions from fallback when new props have a component', () => { loader.mockReturnValue(React.memo((UserComponent: $FlowFixMe))); - const Fallback = (jest.fn(() =>
fallback
): $FlowFixMe); + const Fallback: $FlowFixMe = jest.fn(() =>
fallback
); const renderer = TestRenderer.create( { propName: 'actor', module: 'ActorContainer.react', }); - const Fallback = (jest.fn(() =>
fallback
): $FlowFixMe); + const Fallback: $FlowFixMe = jest.fn(() =>
fallback
); const renderer = TestRenderer.create( > + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -25,10 +25,10 @@ import {always_throws as userAlwaysThrowsResolverType} from "../../../../relay-r // A type error here indicates that the type signature of the resolver module is incorrect. (userAlwaysThrowsResolverType: ( rootKey: UserAlwaysThrowsResolver$key, -) => mixed); +) => ?string); declare export opaque type FragmentResourceResolverTestFragment1$fragmentType: FragmentType; export type FragmentResourceResolverTestFragment1$data = {| - +always_throws: ?$Call<((...empty[]) => R) => R, typeof userAlwaysThrowsResolverType>, + +always_throws: ?string, +$fragmentType: FragmentResourceResolverTestFragment1$fragmentType, |}; export type FragmentResourceResolverTestFragment1$key = { diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceSemanticNonNullTestFragment1.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceSemanticNonNullTestFragment1.graphql.js new file mode 100644 index 0000000000000..b599876d497b5 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceSemanticNonNullTestFragment1.graphql.js @@ -0,0 +1,61 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type FragmentResourceSemanticNonNullTestFragment1$fragmentType: FragmentType; +export type FragmentResourceSemanticNonNullTestFragment1$data = {| + +name: ?string, + +$fragmentType: FragmentResourceSemanticNonNullTestFragment1$fragmentType, +|}; +export type FragmentResourceSemanticNonNullTestFragment1$key = { + +$data?: FragmentResourceSemanticNonNullTestFragment1$data, + +$fragmentSpreads: FragmentResourceSemanticNonNullTestFragment1$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "throwOnFieldError": true + }, + "name": "FragmentResourceSemanticNonNullTestFragment1", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "1a78814fdaf60c7ab1bc0f62142f256d"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + FragmentResourceSemanticNonNullTestFragment1$fragmentType, + FragmentResourceSemanticNonNullTestFragment1$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceSemanticNonNullTestFragment2.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceSemanticNonNullTestFragment2.graphql.js new file mode 100644 index 0000000000000..a2eb0665983fb --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceSemanticNonNullTestFragment2.graphql.js @@ -0,0 +1,59 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<43dd6048fb8a8d2148dc145630c926ac>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type FragmentResourceSemanticNonNullTestFragment2$fragmentType: FragmentType; +export type FragmentResourceSemanticNonNullTestFragment2$data = {| + +name: ?string, + +$fragmentType: FragmentResourceSemanticNonNullTestFragment2$fragmentType, +|}; +export type FragmentResourceSemanticNonNullTestFragment2$key = { + +$data?: FragmentResourceSemanticNonNullTestFragment2$data, + +$fragmentSpreads: FragmentResourceSemanticNonNullTestFragment2$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "FragmentResourceSemanticNonNullTestFragment2", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "4666ac08008cb843a0a50695abc8df0f"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + FragmentResourceSemanticNonNullTestFragment2$fragmentType, + FragmentResourceSemanticNonNullTestFragment2$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceSemanticNonNullTestQuery.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceSemanticNonNullTestQuery.graphql.js new file mode 100644 index 0000000000000..9df00e206ca39 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceSemanticNonNullTestQuery.graphql.js @@ -0,0 +1,152 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<9edf937671528fb8914422019bada16e>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { FragmentResourceSemanticNonNullTestFragment1$fragmentType } from "./FragmentResourceSemanticNonNullTestFragment1.graphql"; +import type { FragmentResourceSemanticNonNullTestFragment2$fragmentType } from "./FragmentResourceSemanticNonNullTestFragment2.graphql"; +export type FragmentResourceSemanticNonNullTestQuery$variables = {| + id: string, +|}; +export type FragmentResourceSemanticNonNullTestQuery$data = {| + +node: ?{| + +__typename: string, + +$fragmentSpreads: FragmentResourceSemanticNonNullTestFragment1$fragmentType & FragmentResourceSemanticNonNullTestFragment2$fragmentType, + |}, +|}; +export type FragmentResourceSemanticNonNullTestQuery = {| + response: FragmentResourceSemanticNonNullTestQuery$data, + variables: FragmentResourceSemanticNonNullTestQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "FragmentResourceSemanticNonNullTestQuery", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + { + "args": null, + "kind": "FragmentSpread", + "name": "FragmentResourceSemanticNonNullTestFragment1" + }, + { + "args": null, + "kind": "FragmentSpread", + "name": "FragmentResourceSemanticNonNullTestFragment2" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "FragmentResourceSemanticNonNullTestQuery", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "3214df14c7879c270b9f7eb23e7f9be5", + "id": null, + "metadata": {}, + "name": "FragmentResourceSemanticNonNullTestQuery", + "operationKind": "query", + "text": "query FragmentResourceSemanticNonNullTestQuery(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ...FragmentResourceSemanticNonNullTestFragment1\n ...FragmentResourceSemanticNonNullTestFragment2\n id\n }\n}\n\nfragment FragmentResourceSemanticNonNullTestFragment1 on User {\n name\n}\n\nfragment FragmentResourceSemanticNonNullTestFragment2 on User {\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "79609fa487e20403b5757ec5fe030446"; +} + +module.exports = ((node/*: any*/)/*: Query< + FragmentResourceSemanticNonNullTestQuery$variables, + FragmentResourceSemanticNonNullTestQuery$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment.graphql.js new file mode 100644 index 0000000000000..8ebd8f953f82b --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment.graphql.js @@ -0,0 +1,67 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<9140b5d003eaf8ad8af413ad7e1005d5>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment$fragmentType: FragmentType; +export type FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment$data = {| + +id: string, + +name: ?string, + +$fragmentType: FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment$fragmentType, +|}; +export type FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment$key = { + +$data?: FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment$data, + +$fragmentSpreads: FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "608a07a152032988a6413ca6f1dbfbaf"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment$fragmentType, + FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery.graphql.js new file mode 100644 index 0000000000000..91382633a1dc1 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery.graphql.js @@ -0,0 +1,146 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<74a5dbdc8516bd27deb03e9589c5ec06>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment$fragmentType } from "./FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment.graphql"; +export type FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery$variables = {| + id: string, +|}; +export type FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery$data = {| + +node: ?{| + +__typename: string, + +$fragmentSpreads: FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment$fragmentType, + |}, +|}; +export type FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery = {| + response: FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery$data, + variables: FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + { + "args": null, + "kind": "FragmentSpread", + "name": "FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "1e4b5355ae26c1d5586da76240492e10", + "id": null, + "metadata": {}, + "name": "FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery", + "operationKind": "query", + "text": "query FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ...FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment\n id\n }\n}\n\nfragment FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment on User {\n id\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "14c673af170df5bad4f4905bb7368415"; +} + +module.exports = ((node/*: any*/)/*: Query< + FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery$variables, + FragmentResourceWithOperationTrackerOptimisticUpdatesTestQuery$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery.graphql.js new file mode 100644 index 0000000000000..f841262a0c5d3 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery.graphql.js @@ -0,0 +1,286 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment$fragmentType } from "./FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment.graphql"; +export type FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery$variables = {||}; +export type FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery$data = {| + +viewer: ?{| + +actor: ?{| + +friends: ?{| + +edges: ?$ReadOnlyArray, + |}, + |}, + |}, +|}; +export type FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery = {| + response: FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery$data, + variables: FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}, +v1 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "cursor", + "storageKey": null +}, +v2 = { + "alias": null, + "args": null, + "concreteType": "PageInfo", + "kind": "LinkedField", + "name": "pageInfo", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "endCursor", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "hasNextPage", + "storageKey": null + } + ], + "storageKey": null +}, +v3 = [ + { + "kind": "Literal", + "name": "first", + "value": 1 + } +], +v4 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "Viewer", + "kind": "LinkedField", + "name": "viewer", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "actor", + "plural": false, + "selections": [ + { + "alias": "friends", + "args": null, + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "__Viewer_friends_connection", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment" + }, + (v0/*: any*/) + ], + "storageKey": null + }, + (v1/*: any*/) + ], + "storageKey": null + }, + (v2/*: any*/) + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "Viewer", + "kind": "LinkedField", + "name": "viewer", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "actor", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": (v3/*: any*/), + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "friends", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v4/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + (v0/*: any*/) + ], + "storageKey": null + }, + (v1/*: any*/) + ], + "storageKey": null + }, + (v2/*: any*/) + ], + "storageKey": "friends(first:1)" + }, + { + "alias": null, + "args": (v3/*: any*/), + "filters": null, + "handle": "connection", + "key": "Viewer_friends", + "kind": "LinkedHandle", + "name": "friends" + }, + (v4/*: any*/) + ], + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "a722ba9082a20e95ed461fd70017e783", + "id": null, + "metadata": { + "connection": [ + { + "count": null, + "cursor": null, + "direction": "forward", + "path": [ + "viewer", + "actor", + "friends" + ] + } + ] + }, + "name": "FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery", + "operationKind": "query", + "text": "query FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery {\n viewer {\n actor {\n __typename\n friends(first: 1) {\n edges {\n node {\n ...FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment\n id\n __typename\n }\n cursor\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n id\n }\n }\n}\n\nfragment FragmentResourceWithOperationTrackerOptimisticUpdatesTestFragment on User {\n id\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "f9b64d236a796cfb14e99418ead325ba"; +} + +module.exports = ((node/*: any*/)/*: Query< + FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery$variables, + FragmentResourceWithOperationTrackerOptimisticUpdatesTestViewerFriendsQuery$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTest2Fragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTest2Fragment.graphql.js new file mode 100644 index 0000000000000..aefb498b61040 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTest2Fragment.graphql.js @@ -0,0 +1,69 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type FragmentResourceWithOperationTrackerSuspenseTest2Fragment$fragmentType: FragmentType; +export type FragmentResourceWithOperationTrackerSuspenseTest2Fragment$data = $ReadOnlyArray<{| + +id: string, + +name: ?string, + +$fragmentType: FragmentResourceWithOperationTrackerSuspenseTest2Fragment$fragmentType, +|}>; +export type FragmentResourceWithOperationTrackerSuspenseTest2Fragment$key = $ReadOnlyArray<{ + +$data?: FragmentResourceWithOperationTrackerSuspenseTest2Fragment$data, + +$fragmentSpreads: FragmentResourceWithOperationTrackerSuspenseTest2Fragment$fragmentType, + ... +}>; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "plural": true + }, + "name": "FragmentResourceWithOperationTrackerSuspenseTest2Fragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "d3df6c392eb4ca3ce9a0c63f7b9f9936"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + FragmentResourceWithOperationTrackerSuspenseTest2Fragment$fragmentType, + FragmentResourceWithOperationTrackerSuspenseTest2Fragment$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTest2Query.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTest2Query.graphql.js new file mode 100644 index 0000000000000..73236aec36589 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTest2Query.graphql.js @@ -0,0 +1,146 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<50860b038f1f099a4dff3e28b751b6ab>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { FragmentResourceWithOperationTrackerSuspenseTest2Fragment$fragmentType } from "./FragmentResourceWithOperationTrackerSuspenseTest2Fragment.graphql"; +export type FragmentResourceWithOperationTrackerSuspenseTest2Query$variables = {| + ids: $ReadOnlyArray, +|}; +export type FragmentResourceWithOperationTrackerSuspenseTest2Query$data = {| + +nodes: ?$ReadOnlyArray, +|}; +export type FragmentResourceWithOperationTrackerSuspenseTest2Query = {| + response: FragmentResourceWithOperationTrackerSuspenseTest2Query$data, + variables: FragmentResourceWithOperationTrackerSuspenseTest2Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "ids" + } +], +v1 = [ + { + "kind": "Variable", + "name": "ids", + "variableName": "ids" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "FragmentResourceWithOperationTrackerSuspenseTest2Query", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "nodes", + "plural": true, + "selections": [ + (v2/*: any*/), + { + "args": null, + "kind": "FragmentSpread", + "name": "FragmentResourceWithOperationTrackerSuspenseTest2Fragment" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "FragmentResourceWithOperationTrackerSuspenseTest2Query", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "nodes", + "plural": true, + "selections": [ + (v2/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "63f940988c565dc9600a3d3ccd7e92cd", + "id": null, + "metadata": {}, + "name": "FragmentResourceWithOperationTrackerSuspenseTest2Query", + "operationKind": "query", + "text": "query FragmentResourceWithOperationTrackerSuspenseTest2Query(\n $ids: [ID!]!\n) {\n nodes(ids: $ids) {\n __typename\n ...FragmentResourceWithOperationTrackerSuspenseTest2Fragment\n id\n }\n}\n\nfragment FragmentResourceWithOperationTrackerSuspenseTest2Fragment on User {\n id\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "f32d541459443bcb33b1bd0bff9a612e"; +} + +module.exports = ((node/*: any*/)/*: Query< + FragmentResourceWithOperationTrackerSuspenseTest2Query$variables, + FragmentResourceWithOperationTrackerSuspenseTest2Query$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTestFragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTestFragment.graphql.js new file mode 100644 index 0000000000000..c14a00a43829a --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTestFragment.graphql.js @@ -0,0 +1,67 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<6b85ffd10ca4c803f16753d66e7f0613>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type FragmentResourceWithOperationTrackerSuspenseTestFragment$fragmentType: FragmentType; +export type FragmentResourceWithOperationTrackerSuspenseTestFragment$data = {| + +id: string, + +name: ?string, + +$fragmentType: FragmentResourceWithOperationTrackerSuspenseTestFragment$fragmentType, +|}; +export type FragmentResourceWithOperationTrackerSuspenseTestFragment$key = { + +$data?: FragmentResourceWithOperationTrackerSuspenseTestFragment$data, + +$fragmentSpreads: FragmentResourceWithOperationTrackerSuspenseTestFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "FragmentResourceWithOperationTrackerSuspenseTestFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "e12427787bafaeb86e94ee353ec32606"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + FragmentResourceWithOperationTrackerSuspenseTestFragment$fragmentType, + FragmentResourceWithOperationTrackerSuspenseTestFragment$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTestQuery.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTestQuery.graphql.js new file mode 100644 index 0000000000000..f8c4e905bb48e --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTestQuery.graphql.js @@ -0,0 +1,146 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<3913d108171e606ef0523ee539b2b4f1>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { FragmentResourceWithOperationTrackerSuspenseTestFragment$fragmentType } from "./FragmentResourceWithOperationTrackerSuspenseTestFragment.graphql"; +export type FragmentResourceWithOperationTrackerSuspenseTestQuery$variables = {| + id: string, +|}; +export type FragmentResourceWithOperationTrackerSuspenseTestQuery$data = {| + +node: ?{| + +__typename: string, + +$fragmentSpreads: FragmentResourceWithOperationTrackerSuspenseTestFragment$fragmentType, + |}, +|}; +export type FragmentResourceWithOperationTrackerSuspenseTestQuery = {| + response: FragmentResourceWithOperationTrackerSuspenseTestQuery$data, + variables: FragmentResourceWithOperationTrackerSuspenseTestQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "FragmentResourceWithOperationTrackerSuspenseTestQuery", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + { + "args": null, + "kind": "FragmentSpread", + "name": "FragmentResourceWithOperationTrackerSuspenseTestFragment" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "FragmentResourceWithOperationTrackerSuspenseTestQuery", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "9f986aa05e2829cbb73fb03a13b16e06", + "id": null, + "metadata": {}, + "name": "FragmentResourceWithOperationTrackerSuspenseTestQuery", + "operationKind": "query", + "text": "query FragmentResourceWithOperationTrackerSuspenseTestQuery(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ...FragmentResourceWithOperationTrackerSuspenseTestFragment\n id\n }\n}\n\nfragment FragmentResourceWithOperationTrackerSuspenseTestFragment on User {\n id\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "cbef95bbb7223d9a6dca6b4123ef5730"; +} + +module.exports = ((node/*: any*/)/*: Query< + FragmentResourceWithOperationTrackerSuspenseTestQuery$variables, + FragmentResourceWithOperationTrackerSuspenseTestQuery$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery.graphql.js new file mode 100644 index 0000000000000..95e56cd9ff7cf --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery.graphql.js @@ -0,0 +1,286 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { FragmentResourceWithOperationTrackerSuspenseTestFragment$fragmentType } from "./FragmentResourceWithOperationTrackerSuspenseTestFragment.graphql"; +export type FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery$variables = {||}; +export type FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery$data = {| + +viewer: ?{| + +actor: ?{| + +friends: ?{| + +edges: ?$ReadOnlyArray, + |}, + |}, + |}, +|}; +export type FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery = {| + response: FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery$data, + variables: FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}, +v1 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "cursor", + "storageKey": null +}, +v2 = { + "alias": null, + "args": null, + "concreteType": "PageInfo", + "kind": "LinkedField", + "name": "pageInfo", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "endCursor", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "hasNextPage", + "storageKey": null + } + ], + "storageKey": null +}, +v3 = [ + { + "kind": "Literal", + "name": "first", + "value": 1 + } +], +v4 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "Viewer", + "kind": "LinkedField", + "name": "viewer", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "actor", + "plural": false, + "selections": [ + { + "alias": "friends", + "args": null, + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "__Viewer_friends_connection", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "FragmentResourceWithOperationTrackerSuspenseTestFragment" + }, + (v0/*: any*/) + ], + "storageKey": null + }, + (v1/*: any*/) + ], + "storageKey": null + }, + (v2/*: any*/) + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "Viewer", + "kind": "LinkedField", + "name": "viewer", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "actor", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": (v3/*: any*/), + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "friends", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v4/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + (v0/*: any*/) + ], + "storageKey": null + }, + (v1/*: any*/) + ], + "storageKey": null + }, + (v2/*: any*/) + ], + "storageKey": "friends(first:1)" + }, + { + "alias": null, + "args": (v3/*: any*/), + "filters": null, + "handle": "connection", + "key": "Viewer_friends", + "kind": "LinkedHandle", + "name": "friends" + }, + (v4/*: any*/) + ], + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "1f101fa9f94518f5b602545fa8906679", + "id": null, + "metadata": { + "connection": [ + { + "count": null, + "cursor": null, + "direction": "forward", + "path": [ + "viewer", + "actor", + "friends" + ] + } + ] + }, + "name": "FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery", + "operationKind": "query", + "text": "query FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery {\n viewer {\n actor {\n __typename\n friends(first: 1) {\n edges {\n node {\n ...FragmentResourceWithOperationTrackerSuspenseTestFragment\n id\n __typename\n }\n cursor\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n id\n }\n }\n}\n\nfragment FragmentResourceWithOperationTrackerSuspenseTestFragment on User {\n id\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "ed31499a862a024df8808467d4b24601"; +} + +module.exports = ((node/*: any*/)/*: Query< + FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery$variables, + FragmentResourceWithOperationTrackerSuspenseTestViewerFriendsQuery$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestFriendsPaginationQuery.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestFriendsPaginationQuery.graphql.js index 546c03d7ab236..347ccf93edef1 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestFriendsPaginationQuery.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestFriendsPaginationQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<77aaa4dff93cd31277edc79027e121d5>> + * @generated SignedSource<<844ba03d69397d7655e235f947b471e3>> * @flow * @lightSyntaxTransform * @nogrep @@ -236,10 +236,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -277,7 +274,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" }, { "alias": "plainNameRenderer", @@ -285,9 +282,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer" - ] + "value": "1AwQS7" } ], "concreteType": null, @@ -311,7 +306,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"1AwQS7\")" }, (v2/*: any*/) ], diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestNodeQuery.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestNodeQuery.graphql.js index 310acf8f44ffa..ee949e0273c3d 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestNodeQuery.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestNodeQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<52a8c1b75facb93f5014c33c45de06a7>> * @flow * @lightSyntaxTransform * @nogrep @@ -123,10 +123,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -164,7 +161,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" }, { "alias": "plainNameRenderer", @@ -172,9 +169,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer" - ] + "value": "1AwQS7" } ], "concreteType": null, @@ -198,7 +193,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"1AwQS7\")" } ], "type": "User", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestUserFragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestUserFragment.graphql.js index 7790e8f0610bc..f7d54f1cb33e2 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestUserFragment.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestUserFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -73,10 +73,7 @@ var node/*: ReaderFragment*/ = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -113,7 +110,7 @@ var node/*: ReaderFragment*/ = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" }, { "alias": "plainNameRenderer", @@ -121,9 +118,7 @@ var node/*: ReaderFragment*/ = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer" - ] + "value": "1AwQS7" } ], "concreteType": null, @@ -146,7 +141,7 @@ var node/*: ReaderFragment*/ = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"1AwQS7\")" } ], "type": "User", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestViewerFriendsQuery.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestViewerFriendsQuery.graphql.js index 4330681b9ee40..7659e5548da2c 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestViewerFriendsQuery.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/FragmentResourceWithOperationTrackerTestViewerFriendsQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<7c489f5e7cb8805b1d72b47ee8c3f2ef>> + * @generated SignedSource<<6e17c443dee3c70c6dd815c26842d585>> * @flow * @lightSyntaxTransform * @nogrep @@ -231,10 +231,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -272,7 +269,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" }, { "alias": "plainNameRenderer", @@ -280,9 +277,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer" - ] + "value": "1AwQS7" } ], "concreteType": null, @@ -306,7 +301,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"1AwQS7\")" }, (v0/*: any*/) ], diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/RefetchableClientEdgeQuery_FragmentResourceClientEdgesTestFragment1_client_edge.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/RefetchableClientEdgeQuery_FragmentResourceClientEdgesTestFragment1_client_edge.graphql.js index 4c7792737c40d..dbb545d5052ac 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/RefetchableClientEdgeQuery_FragmentResourceClientEdgesTestFragment1_client_edge.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/RefetchableClientEdgeQuery_FragmentResourceClientEdgesTestFragment1_client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<1b4b3613e8eb27e8eb87b478b107648b>> + * @generated SignedSource<<4b70286945a5d40ef704aca8d8ffb04f>> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_FragmentResourceClientEdgesTestFragment1_client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_FragmentResourceClientEdgesTestFragment1_client_edge", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/preloadQueryDEPRECATEDTest_ProvidedVarQuery.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/preloadQueryDEPRECATEDTest_ProvidedVarQuery.graphql.js index adc6b146bd2d4..7fa7134a61359 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/preloadQueryDEPRECATEDTest_ProvidedVarQuery.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/preloadQueryDEPRECATEDTest_ProvidedVarQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<222188c6acc87a632e09a7e966a14432>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -31,21 +31,19 @@ export type preloadQueryDEPRECATEDTest_ProvidedVarQuery = {| response: preloadQueryDEPRECATEDTest_ProvidedVarQuery$data, variables: preloadQueryDEPRECATEDTest_ProvidedVarQuery$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider'), + "__relay_internal__pv__RelayProvider_returnsFalserelayprovider": require('./../RelayProvider_returnsFalse.relayprovider') +}: {| +__relay_internal__pv__RelayProvider_returnsFalserelayprovider: {| +get: () => boolean, |}, +__relay_internal__pv__RelayProvider_returnsTruerelayprovider: {| +get: () => boolean, |}, -|}; +|}); */ -var providedVariablesDefinition/*: ProvidedVariablesType*/ = { - "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider'), - "__relay_internal__pv__RelayProvider_returnsFalserelayprovider": require('./../RelayProvider_returnsFalse.relayprovider') -}; - var node/*: ConcreteRequest*/ = (function(){ var v0 = { "defaultValue": null, @@ -203,7 +201,10 @@ return { "name": "preloadQueryDEPRECATEDTest_ProvidedVarQuery", "operationKind": "query", "text": "query preloadQueryDEPRECATEDTest_ProvidedVarQuery(\n $id: ID!\n $__relay_internal__pv__RelayProvider_returnsTruerelayprovider: Boolean!\n $__relay_internal__pv__RelayProvider_returnsFalserelayprovider: Boolean!\n) {\n node(id: $id) {\n __typename\n ...preloadQueryDEPRECATEDTest_ProvidedVarFragment\n id\n }\n}\n\nfragment preloadQueryDEPRECATEDTest_ProvidedVarFragment on User {\n name @include(if: $__relay_internal__pv__RelayProvider_returnsTruerelayprovider)\n firstName @include(if: $__relay_internal__pv__RelayProvider_returnsFalserelayprovider)\n lastName @skip(if: $__relay_internal__pv__RelayProvider_returnsFalserelayprovider)\n username @skip(if: $__relay_internal__pv__RelayProvider_returnsTruerelayprovider)\n}\n", - "providedVariables": providedVariablesDefinition + "providedVariables": { + "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider'), + "__relay_internal__pv__RelayProvider_returnsFalserelayprovider": require('./../RelayProvider_returnsFalse.relayprovider') + } } }; })(); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentTest2Fragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentTest2Fragment.graphql.js index 33d57e5d4f3d2..7586664dd0365 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentTest2Fragment.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentTest2Fragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<169f364cafb7106b58e6f19eb668579e>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -104,7 +104,10 @@ return { "node" ], "operation": require('./useBlockingPaginationFragmentTest2FragmentPaginationQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "useBlockingPaginationFragmentTest2Fragment", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentTest4Fragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentTest4Fragment.graphql.js index e745912d5c34d..3f3857ccea487 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentTest4Fragment.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentTest4Fragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -42,7 +42,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./useBlockingPaginationFragmentTest4FragmentRefetchQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "useBlockingPaginationFragmentTest4Fragment", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentTestUserFragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentTestUserFragment.graphql.js index 4678c0e486c85..a9362e3939613 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentTestUserFragment.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentTestUserFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<69b22a79c5e55847626b125c376b0ceb>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -121,7 +121,10 @@ return { "node" ], "operation": require('./useBlockingPaginationFragmentTestUserFragmentPaginationQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "useBlockingPaginationFragmentTestUserFragment", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentWithSuspenseTransitionTestUserFragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentWithSuspenseTransitionTestUserFragment.graphql.js index 1099edc1b86ee..72b13a980cfa3 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentWithSuspenseTransitionTestUserFragment.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useBlockingPaginationFragmentWithSuspenseTransitionTestUserFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<7243d1a57efc233d9be96f249683ad02>> + * @generated SignedSource<<1a2c32db911019bfa94d781c718e11a6>> * @flow * @lightSyntaxTransform * @nogrep @@ -116,7 +116,10 @@ return { "node" ], "operation": require('./useBlockingPaginationFragmentWithSuspenseTransitionTestUserFragmentPaginationQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "useBlockingPaginationFragmentWithSuspenseTransitionTestUserFragment", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentNullabilityTest1Query.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentNullabilityTest1Query.graphql.js new file mode 100644 index 0000000000000..8a6e622f0409d --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentNullabilityTest1Query.graphql.js @@ -0,0 +1,100 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import {field_that_throws as queryFieldThatThrowsResolverType} from "../useFragment_nullability-test.js"; +// Type assertion validating that `queryFieldThatThrowsResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryFieldThatThrowsResolverType: () => number); +export type useFragmentNullabilityTest1Query$variables = {||}; +export type useFragmentNullabilityTest1Query$data = {| + +field_that_throws: number, +|}; +export type useFragmentNullabilityTest1Query = {| + response: useFragmentNullabilityTest1Query$data, + variables: useFragmentNullabilityTest1Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "throwOnFieldError": true + }, + "name": "useFragmentNullabilityTest1Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "field_that_throws", + "resolverModule": require('./../useFragment_nullability-test').field_that_throws, + "path": "field_that_throws" + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "useFragmentNullabilityTest1Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "field_that_throws", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ] + } + ] + }, + "params": { + "cacheID": "5dc12cf6a0aa364b0a4db301e63f1190", + "id": null, + "metadata": {}, + "name": "useFragmentNullabilityTest1Query", + "operationKind": "query", + "text": null + } +}; + +if (__DEV__) { + (node/*: any*/).hash = "61fff2873123177b72b204296bd4c86f"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + useFragmentNullabilityTest1Query$variables, + useFragmentNullabilityTest1Query$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentNullabilityTest2Query.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentNullabilityTest2Query.graphql.js new file mode 100644 index 0000000000000..644a240bfc812 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentNullabilityTest2Query.graphql.js @@ -0,0 +1,116 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<53be1d5034ad84b25f236af4706b73cc>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { useFragmentNullabilityTestFragmentWithFieldThatThrows$key } from "./useFragmentNullabilityTestFragmentWithFieldThatThrows.graphql"; +import {field_with_fragment_that_throws as queryFieldWithFragmentThatThrowsResolverType} from "../useFragment_nullability-test.js"; +// Type assertion validating that `queryFieldWithFragmentThatThrowsResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryFieldWithFragmentThatThrowsResolverType: ( + rootKey: useFragmentNullabilityTestFragmentWithFieldThatThrows$key, +) => number); +export type useFragmentNullabilityTest2Query$variables = {||}; +export type useFragmentNullabilityTest2Query$data = {| + +field_with_fragment_that_throws: number, +|}; +export type useFragmentNullabilityTest2Query = {| + response: useFragmentNullabilityTest2Query$data, + variables: useFragmentNullabilityTest2Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "throwOnFieldError": true + }, + "name": "useFragmentNullabilityTest2Query", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "useFragmentNullabilityTestFragmentWithFieldThatThrows" + }, + "kind": "RelayResolver", + "name": "field_with_fragment_that_throws", + "resolverModule": require('./../useFragment_nullability-test').field_with_fragment_that_throws, + "path": "field_with_fragment_that_throws" + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "useFragmentNullabilityTest2Query", + "selections": [ + { + "name": "field_with_fragment_that_throws", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "field_that_throws", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ] + }, + "params": { + "cacheID": "dc2a856d03fd42d985e8b68abf6ae426", + "id": null, + "metadata": {}, + "name": "useFragmentNullabilityTest2Query", + "operationKind": "query", + "text": null + } +}; + +if (__DEV__) { + (node/*: any*/).hash = "2abb5a27637c0fa0c8c03068f4148d51"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + useFragmentNullabilityTest2Query$variables, + useFragmentNullabilityTest2Query$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentNullabilityTest3Query.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentNullabilityTest3Query.graphql.js new file mode 100644 index 0000000000000..5dff45d57e6bb --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentNullabilityTest3Query.graphql.js @@ -0,0 +1,114 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { useFragmentNullabilityTestFragmentWithFieldThatThrows$key } from "./useFragmentNullabilityTestFragmentWithFieldThatThrows.graphql"; +import {field_with_fragment_that_throws as queryFieldWithFragmentThatThrowsResolverType} from "../useFragment_nullability-test.js"; +// Type assertion validating that `queryFieldWithFragmentThatThrowsResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryFieldWithFragmentThatThrowsResolverType: ( + rootKey: useFragmentNullabilityTestFragmentWithFieldThatThrows$key, +) => number); +export type useFragmentNullabilityTest3Query$variables = {||}; +export type useFragmentNullabilityTest3Query$data = {| + +field_with_fragment_that_throws: ?number, +|}; +export type useFragmentNullabilityTest3Query = {| + response: useFragmentNullabilityTest3Query$data, + variables: useFragmentNullabilityTest3Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "useFragmentNullabilityTest3Query", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "useFragmentNullabilityTestFragmentWithFieldThatThrows" + }, + "kind": "RelayResolver", + "name": "field_with_fragment_that_throws", + "resolverModule": require('./../useFragment_nullability-test').field_with_fragment_that_throws, + "path": "field_with_fragment_that_throws" + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "useFragmentNullabilityTest3Query", + "selections": [ + { + "name": "field_with_fragment_that_throws", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "field_that_throws", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ] + }, + "params": { + "cacheID": "562cd30e823ff158eed24ab0375df95c", + "id": null, + "metadata": {}, + "name": "useFragmentNullabilityTest3Query", + "operationKind": "query", + "text": null + } +}; + +if (__DEV__) { + (node/*: any*/).hash = "10916d7ec8d66dc54d930f21305b5778"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + useFragmentNullabilityTest3Query$variables, + useFragmentNullabilityTest3Query$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentNullabilityTestFragmentWithFieldThatThrows.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentNullabilityTestFragmentWithFieldThatThrows.graphql.js new file mode 100644 index 0000000000000..56e624ffc81a4 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentNullabilityTestFragmentWithFieldThatThrows.graphql.js @@ -0,0 +1,72 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<193e5f3bc0b67452579b51c914fb6729>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +import {field_that_throws as queryFieldThatThrowsResolverType} from "../useFragment_nullability-test.js"; +// Type assertion validating that `queryFieldThatThrowsResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryFieldThatThrowsResolverType: () => number); +declare export opaque type useFragmentNullabilityTestFragmentWithFieldThatThrows$fragmentType: FragmentType; +export type useFragmentNullabilityTestFragmentWithFieldThatThrows$data = {| + +field_that_throws: number, + +$fragmentType: useFragmentNullabilityTestFragmentWithFieldThatThrows$fragmentType, +|}; +export type useFragmentNullabilityTestFragmentWithFieldThatThrows$key = { + +$data?: useFragmentNullabilityTestFragmentWithFieldThatThrows$data, + +$fragmentSpreads: useFragmentNullabilityTestFragmentWithFieldThatThrows$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "throwOnFieldError": true + }, + "name": "useFragmentNullabilityTestFragmentWithFieldThatThrows", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayResolver", + "name": "field_that_throws", + "resolverModule": require('./../useFragment_nullability-test').field_that_throws, + "path": "field_that_throws" + } + ] + } + ], + "type": "Query", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "dea9fca9b23eeb0d2dccfe5c68da7c2c"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + useFragmentNullabilityTestFragmentWithFieldThatThrows$fragmentType, + useFragmentNullabilityTestFragmentWithFieldThatThrows$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTest2Fragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTest2Fragment.graphql.js new file mode 100644 index 0000000000000..7a6345f8909fa --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTest2Fragment.graphql.js @@ -0,0 +1,69 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<488137e059826e157a95ea8a087abedd>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type useFragmentWithOperationTrackerSuspenseTest2Fragment$fragmentType: FragmentType; +export type useFragmentWithOperationTrackerSuspenseTest2Fragment$data = $ReadOnlyArray<{| + +id: string, + +name: ?string, + +$fragmentType: useFragmentWithOperationTrackerSuspenseTest2Fragment$fragmentType, +|}>; +export type useFragmentWithOperationTrackerSuspenseTest2Fragment$key = $ReadOnlyArray<{ + +$data?: useFragmentWithOperationTrackerSuspenseTest2Fragment$data, + +$fragmentSpreads: useFragmentWithOperationTrackerSuspenseTest2Fragment$fragmentType, + ... +}>; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "plural": true + }, + "name": "useFragmentWithOperationTrackerSuspenseTest2Fragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "ed248350d2cf54c3301858051ee10660"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + useFragmentWithOperationTrackerSuspenseTest2Fragment$fragmentType, + useFragmentWithOperationTrackerSuspenseTest2Fragment$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTest2Query.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTest2Query.graphql.js new file mode 100644 index 0000000000000..817b255ecca78 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTest2Query.graphql.js @@ -0,0 +1,146 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<1334d6dcfae83bd81ffbe11675646b38>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { useFragmentWithOperationTrackerSuspenseTest2Fragment$fragmentType } from "./useFragmentWithOperationTrackerSuspenseTest2Fragment.graphql"; +export type useFragmentWithOperationTrackerSuspenseTest2Query$variables = {| + ids: $ReadOnlyArray, +|}; +export type useFragmentWithOperationTrackerSuspenseTest2Query$data = {| + +nodes: ?$ReadOnlyArray, +|}; +export type useFragmentWithOperationTrackerSuspenseTest2Query = {| + response: useFragmentWithOperationTrackerSuspenseTest2Query$data, + variables: useFragmentWithOperationTrackerSuspenseTest2Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "ids" + } +], +v1 = [ + { + "kind": "Variable", + "name": "ids", + "variableName": "ids" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "useFragmentWithOperationTrackerSuspenseTest2Query", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "nodes", + "plural": true, + "selections": [ + (v2/*: any*/), + { + "args": null, + "kind": "FragmentSpread", + "name": "useFragmentWithOperationTrackerSuspenseTest2Fragment" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "useFragmentWithOperationTrackerSuspenseTest2Query", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "nodes", + "plural": true, + "selections": [ + (v2/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "4cb26a7bcb405059c32fa70e0c47fe25", + "id": null, + "metadata": {}, + "name": "useFragmentWithOperationTrackerSuspenseTest2Query", + "operationKind": "query", + "text": "query useFragmentWithOperationTrackerSuspenseTest2Query(\n $ids: [ID!]!\n) {\n nodes(ids: $ids) {\n __typename\n ...useFragmentWithOperationTrackerSuspenseTest2Fragment\n id\n }\n}\n\nfragment useFragmentWithOperationTrackerSuspenseTest2Fragment on User {\n id\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "06302c0cd4cc001ebbdab28a2512483d"; +} + +module.exports = ((node/*: any*/)/*: Query< + useFragmentWithOperationTrackerSuspenseTest2Query$variables, + useFragmentWithOperationTrackerSuspenseTest2Query$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTestFragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTestFragment.graphql.js new file mode 100644 index 0000000000000..88af461adf3ff --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTestFragment.graphql.js @@ -0,0 +1,67 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type useFragmentWithOperationTrackerSuspenseTestFragment$fragmentType: FragmentType; +export type useFragmentWithOperationTrackerSuspenseTestFragment$data = {| + +id: string, + +name: ?string, + +$fragmentType: useFragmentWithOperationTrackerSuspenseTestFragment$fragmentType, +|}; +export type useFragmentWithOperationTrackerSuspenseTestFragment$key = { + +$data?: useFragmentWithOperationTrackerSuspenseTestFragment$data, + +$fragmentSpreads: useFragmentWithOperationTrackerSuspenseTestFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "useFragmentWithOperationTrackerSuspenseTestFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "e67c0ce55318d8914b7c4ac00075e14e"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + useFragmentWithOperationTrackerSuspenseTestFragment$fragmentType, + useFragmentWithOperationTrackerSuspenseTestFragment$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTestQuery.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTestQuery.graphql.js new file mode 100644 index 0000000000000..5d366ce3fdbed --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTestQuery.graphql.js @@ -0,0 +1,146 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { useFragmentWithOperationTrackerSuspenseTestFragment$fragmentType } from "./useFragmentWithOperationTrackerSuspenseTestFragment.graphql"; +export type useFragmentWithOperationTrackerSuspenseTestQuery$variables = {| + id: string, +|}; +export type useFragmentWithOperationTrackerSuspenseTestQuery$data = {| + +node: ?{| + +__typename: string, + +$fragmentSpreads: useFragmentWithOperationTrackerSuspenseTestFragment$fragmentType, + |}, +|}; +export type useFragmentWithOperationTrackerSuspenseTestQuery = {| + response: useFragmentWithOperationTrackerSuspenseTestQuery$data, + variables: useFragmentWithOperationTrackerSuspenseTestQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "useFragmentWithOperationTrackerSuspenseTestQuery", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + { + "args": null, + "kind": "FragmentSpread", + "name": "useFragmentWithOperationTrackerSuspenseTestFragment" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "useFragmentWithOperationTrackerSuspenseTestQuery", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "71ebeb7db09a62e0d966856af3751c81", + "id": null, + "metadata": {}, + "name": "useFragmentWithOperationTrackerSuspenseTestQuery", + "operationKind": "query", + "text": "query useFragmentWithOperationTrackerSuspenseTestQuery(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ...useFragmentWithOperationTrackerSuspenseTestFragment\n id\n }\n}\n\nfragment useFragmentWithOperationTrackerSuspenseTestFragment on User {\n id\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "d9a040d89152cefea35ea45be32edaa5"; +} + +module.exports = ((node/*: any*/)/*: Query< + useFragmentWithOperationTrackerSuspenseTestQuery$variables, + useFragmentWithOperationTrackerSuspenseTestQuery$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery.graphql.js new file mode 100644 index 0000000000000..4d4742c930817 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery.graphql.js @@ -0,0 +1,286 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<66d13cb0c31403be58da98b3df0c1c47>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { useFragmentWithOperationTrackerSuspenseTestFragment$fragmentType } from "./useFragmentWithOperationTrackerSuspenseTestFragment.graphql"; +export type useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery$variables = {||}; +export type useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery$data = {| + +viewer: ?{| + +actor: ?{| + +friends: ?{| + +edges: ?$ReadOnlyArray, + |}, + |}, + |}, +|}; +export type useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery = {| + response: useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery$data, + variables: useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}, +v1 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "cursor", + "storageKey": null +}, +v2 = { + "alias": null, + "args": null, + "concreteType": "PageInfo", + "kind": "LinkedField", + "name": "pageInfo", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "endCursor", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "hasNextPage", + "storageKey": null + } + ], + "storageKey": null +}, +v3 = [ + { + "kind": "Literal", + "name": "first", + "value": 1 + } +], +v4 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "Viewer", + "kind": "LinkedField", + "name": "viewer", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "actor", + "plural": false, + "selections": [ + { + "alias": "friends", + "args": null, + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "__Viewer_friends_connection", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "useFragmentWithOperationTrackerSuspenseTestFragment" + }, + (v0/*: any*/) + ], + "storageKey": null + }, + (v1/*: any*/) + ], + "storageKey": null + }, + (v2/*: any*/) + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "Viewer", + "kind": "LinkedField", + "name": "viewer", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "actor", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": (v3/*: any*/), + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "friends", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v4/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + (v0/*: any*/) + ], + "storageKey": null + }, + (v1/*: any*/) + ], + "storageKey": null + }, + (v2/*: any*/) + ], + "storageKey": "friends(first:1)" + }, + { + "alias": null, + "args": (v3/*: any*/), + "filters": null, + "handle": "connection", + "key": "Viewer_friends", + "kind": "LinkedHandle", + "name": "friends" + }, + (v4/*: any*/) + ], + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "324797efa803121bc414ac02c0869b8e", + "id": null, + "metadata": { + "connection": [ + { + "count": null, + "cursor": null, + "direction": "forward", + "path": [ + "viewer", + "actor", + "friends" + ] + } + ] + }, + "name": "useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery", + "operationKind": "query", + "text": "query useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery {\n viewer {\n actor {\n __typename\n friends(first: 1) {\n edges {\n node {\n ...useFragmentWithOperationTrackerSuspenseTestFragment\n id\n __typename\n }\n cursor\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n id\n }\n }\n}\n\nfragment useFragmentWithOperationTrackerSuspenseTestFragment on User {\n id\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "45c401e9c5820b46ccf333e9b64bfe28"; +} + +module.exports = ((node/*: any*/)/*: Query< + useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery$variables, + useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithRequiredTestQuery.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithRequiredTestQuery.graphql.js new file mode 100644 index 0000000000000..b712fb7ca1959 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithRequiredTestQuery.graphql.js @@ -0,0 +1,150 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { useFragmentWithRequiredTestUserFragment$fragmentType } from "./useFragmentWithRequiredTestUserFragment.graphql"; +export type useFragmentWithRequiredTestQuery$variables = {| + id: string, +|}; +export type useFragmentWithRequiredTestQuery$data = {| + +node: ?{| + +$fragmentSpreads: useFragmentWithRequiredTestUserFragment$fragmentType, + |}, +|}; +export type useFragmentWithRequiredTestQuery = {| + response: useFragmentWithRequiredTestQuery$data, + variables: useFragmentWithRequiredTestQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } +]; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "useFragmentWithRequiredTestQuery", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "useFragmentWithRequiredTestUserFragment" + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "useFragmentWithRequiredTestQuery", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "d92c3958e46586a5b24ead47bb7f2a33", + "id": null, + "metadata": {}, + "name": "useFragmentWithRequiredTestQuery", + "operationKind": "query", + "text": "query useFragmentWithRequiredTestQuery(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ... on User {\n ...useFragmentWithRequiredTestUserFragment\n }\n id\n }\n}\n\nfragment useFragmentWithRequiredTestUserFragment on User {\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "66a4cfb191113d8dc82023073e6a8884"; +} + +module.exports = ((node/*: any*/)/*: Query< + useFragmentWithRequiredTestQuery$variables, + useFragmentWithRequiredTestQuery$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithRequiredTestUserFragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithRequiredTestUserFragment.graphql.js new file mode 100644 index 0000000000000..e4b543c29f6cf --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useFragmentWithRequiredTestUserFragment.graphql.js @@ -0,0 +1,64 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<262ea8ff7a0a3ff34d7fa1f80f2cea04>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type useFragmentWithRequiredTestUserFragment$fragmentType: FragmentType; +export type useFragmentWithRequiredTestUserFragment$data = ?{| + +name: string, + +$fragmentType: useFragmentWithRequiredTestUserFragment$fragmentType, +|}; +export type useFragmentWithRequiredTestUserFragment$key = { + +$data?: useFragmentWithRequiredTestUserFragment$data, + +$fragmentSpreads: useFragmentWithRequiredTestUserFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "useFragmentWithRequiredTestUserFragment", + "selections": [ + { + "kind": "RequiredField", + "field": { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + "action": "LOG", + "path": "name" + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "9e3297104a693133e2546618d76ce8d8"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + useFragmentWithRequiredTestUserFragment$fragmentType, + useFragmentWithRequiredTestUserFragment$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTest3Fragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTest3Fragment.graphql.js index 6fafdbca83c76..eb905e486855e 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTest3Fragment.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTest3Fragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<06910f3103bbd9cacf1d49c8ceb423d0>> + * @generated SignedSource<<287a510a57f147dc9086ffd1d27ddaae>> * @flow * @lightSyntaxTransform * @nogrep @@ -42,7 +42,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./usePaginationFragmentTest3FragmentRefetchQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "usePaginationFragmentTest3Fragment", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTestStoryFragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTestStoryFragment.graphql.js index dfd166b6b4bf7..b5449c614b58f 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTestStoryFragment.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTestStoryFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<78d2548c61fb6ef49166b87871b6e52c>> * @flow * @lightSyntaxTransform * @nogrep @@ -80,7 +80,10 @@ return { "fetch__NonNodeStory" ], "operation": require('./usePaginationFragmentTestStoryFragmentRefetchQuery.graphql'), - "identifierField": "fetch_id" + "identifierInfo": { + "identifierField": "fetch_id", + "identifierQueryVariableName": "id" + } } }, "name": "usePaginationFragmentTestStoryFragment", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTestUserFragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTestUserFragment.graphql.js index d0c207e77edb9..5917ddb113524 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTestUserFragment.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTestUserFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<38db53050ac518827b1a14880489e2fb>> * @flow * @lightSyntaxTransform * @nogrep @@ -121,7 +121,10 @@ return { "node" ], "operation": require('./usePaginationFragmentTestUserFragmentPaginationQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "usePaginationFragmentTestUserFragment", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTestUserFragmentWithStreaming.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTestUserFragmentWithStreaming.graphql.js index f9d61f3ac5d40..04e020b63add3 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTestUserFragmentWithStreaming.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/usePaginationFragmentTestUserFragmentWithStreaming.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<8fac414c6ad6a4410aa7b98ba736f4b7>> + * @generated SignedSource<<43148d01576a91b8c9b8380b998b1063>> * @flow * @lightSyntaxTransform * @nogrep @@ -122,7 +122,10 @@ return { "node" ], "operation": require('./usePaginationFragmentTestUserFragmentStreamingPaginationQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "usePaginationFragmentTestUserFragmentWithStreaming", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/usePreloadedQueryProvidedVariablesTest_Query.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/usePreloadedQueryProvidedVariablesTest_Query.graphql.js index b6951b0f04581..9d2d1501ed299 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/usePreloadedQueryProvidedVariablesTest_Query.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/usePreloadedQueryProvidedVariablesTest_Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<1bd7619ea2f07d3500636a825112fa19>> * @flow * @lightSyntaxTransform * @nogrep @@ -32,21 +32,19 @@ export type usePreloadedQueryProvidedVariablesTest_Query = {| response: usePreloadedQueryProvidedVariablesTest_Query$data, variables: usePreloadedQueryProvidedVariablesTest_Query$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider'), + "__relay_internal__pv__RelayProvider_returnsFalserelayprovider": require('./../RelayProvider_returnsFalse.relayprovider') +}: {| +__relay_internal__pv__RelayProvider_returnsFalserelayprovider: {| +get: () => boolean, |}, +__relay_internal__pv__RelayProvider_returnsTruerelayprovider: {| +get: () => boolean, |}, -|}; +|}); */ -var providedVariablesDefinition/*: ProvidedVariablesType*/ = { - "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider'), - "__relay_internal__pv__RelayProvider_returnsFalserelayprovider": require('./../RelayProvider_returnsFalse.relayprovider') -}; - var node/*: ConcreteRequest*/ = (function(){ var v0 = { "defaultValue": null, @@ -206,7 +204,10 @@ return { "name": "usePreloadedQueryProvidedVariablesTest_Query", "operationKind": "query", "text": "query usePreloadedQueryProvidedVariablesTest_Query(\n $id: ID!\n $__relay_internal__pv__RelayProvider_returnsTruerelayprovider: Boolean!\n $__relay_internal__pv__RelayProvider_returnsFalserelayprovider: Boolean!\n) {\n node(id: $id) {\n __typename\n id\n ...usePreloadedQueryProvidedVariablesTest_Fragment\n }\n}\n\nfragment usePreloadedQueryProvidedVariablesTest_Fragment on User {\n name @include(if: $__relay_internal__pv__RelayProvider_returnsTruerelayprovider)\n firstName @include(if: $__relay_internal__pv__RelayProvider_returnsFalserelayprovider)\n lastName @skip(if: $__relay_internal__pv__RelayProvider_returnsFalserelayprovider)\n username @skip(if: $__relay_internal__pv__RelayProvider_returnsTruerelayprovider)\n}\n", - "providedVariables": providedVariablesDefinition + "providedVariables": { + "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider'), + "__relay_internal__pv__RelayProvider_returnsFalserelayprovider": require('./../RelayProvider_returnsFalse.relayprovider') + } } }; })(); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/usePreloadedQueryProvidedVariablesTest_badQuery.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/usePreloadedQueryProvidedVariablesTest_badQuery.graphql.js index 5e30e1a2cde3e..3aae2d82cff4d 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/usePreloadedQueryProvidedVariablesTest_badQuery.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/usePreloadedQueryProvidedVariablesTest_badQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<940e03983e6e001ebaf61ce67f5941e5>> * @flow * @lightSyntaxTransform * @nogrep @@ -31,17 +31,15 @@ export type usePreloadedQueryProvidedVariablesTest_badQuery = {| response: usePreloadedQueryProvidedVariablesTest_badQuery$data, variables: usePreloadedQueryProvidedVariablesTest_badQuery$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__RelayProvider_impurerelayprovider": require('./../RelayProvider_impure.relayprovider') +}: {| +__relay_internal__pv__RelayProvider_impurerelayprovider: {| +get: () => number, |}, -|}; +|}); */ -var providedVariablesDefinition/*: ProvidedVariablesType*/ = { - "__relay_internal__pv__RelayProvider_impurerelayprovider": require('./../RelayProvider_impure.relayprovider') -}; - var node/*: ConcreteRequest*/ = (function(){ var v0 = { "defaultValue": null, @@ -162,7 +160,9 @@ return { "name": "usePreloadedQueryProvidedVariablesTest_badQuery", "operationKind": "query", "text": "query usePreloadedQueryProvidedVariablesTest_badQuery(\n $id: ID!\n $__relay_internal__pv__RelayProvider_impurerelayprovider: Float!\n) {\n node(id: $id) {\n __typename\n ...usePreloadedQueryProvidedVariablesTest_badFragment\n id\n }\n}\n\nfragment usePreloadedQueryProvidedVariablesTest_badFragment on User {\n profile_picture(scale: $__relay_internal__pv__RelayProvider_impurerelayprovider) {\n uri\n }\n}\n", - "providedVariables": providedVariablesDefinition + "providedVariables": { + "__relay_internal__pv__RelayProvider_impurerelayprovider": require('./../RelayProvider_impure.relayprovider') + } } }; })(); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTest1Fragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTest1Fragment.graphql.js index 82356118917e1..15d027a463743 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTest1Fragment.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTest1Fragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<7c2c05ec0d836f9fab77060b8dc4447e>> * @flow * @lightSyntaxTransform * @nogrep @@ -46,7 +46,10 @@ var node/*: ReaderFragment*/ = { "fetch__NonNodeStory" ], "operation": require('./useRefetchableFragmentNodeTest1FragmentRefetchQuery.graphql'), - "identifierField": "fetch_id" + "identifierInfo": { + "identifierField": "fetch_id", + "identifierQueryVariableName": "id" + } } }, "name": "useRefetchableFragmentNodeTest1Fragment", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTest3Fragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTest3Fragment.graphql.js index 4d9e5c135e1b0..82c8159aae111 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTest3Fragment.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTest3Fragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<9a8e6d62d25a5d79047dcc1db10f00d4>> * @flow * @lightSyntaxTransform * @nogrep @@ -53,7 +53,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./useRefetchableFragmentNodeTest3FragmentRefetchQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "useRefetchableFragmentNodeTest3Fragment", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTestIdentityTestFragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTestIdentityTestFragment.graphql.js new file mode 100644 index 0000000000000..c644fc38ae23c --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTestIdentityTestFragment.graphql.js @@ -0,0 +1,113 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<5ab2198beca4c50643d04bfb5df90de3>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ReaderFragment, RefetchableFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type useRefetchableFragmentNodeTestIdentityTestFragment$fragmentType: FragmentType; +type useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery$variables = any; +export type useRefetchableFragmentNodeTestIdentityTestFragment$data = {| + +id: string, + +name: ?string, + +profile_picture: ?{| + +uri: ?string, + |}, + +$fragmentType: useRefetchableFragmentNodeTestIdentityTestFragment$fragmentType, +|}; +export type useRefetchableFragmentNodeTestIdentityTestFragment$key = { + +$data?: useRefetchableFragmentNodeTestIdentityTestFragment$data, + +$fragmentSpreads: useRefetchableFragmentNodeTestIdentityTestFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [ + { + "kind": "RootArgument", + "name": "scale" + } + ], + "kind": "Fragment", + "metadata": { + "refetch": { + "connection": null, + "fragmentPathInResult": [ + "node" + ], + "operation": require('./useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery.graphql'), + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } + } + }, + "name": "useRefetchableFragmentNodeTestIdentityTestFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "scale", + "variableName": "scale" + } + ], + "concreteType": "Image", + "kind": "LinkedField", + "name": "profile_picture", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "uri", + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "0b309ceb5fea8ea44abb827cce31328b"; +} + +module.exports = ((node/*: any*/)/*: RefetchableFragment< + useRefetchableFragmentNodeTestIdentityTestFragment$fragmentType, + useRefetchableFragmentNodeTestIdentityTestFragment$data, + useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery$variables, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery.graphql.js new file mode 100644 index 0000000000000..fa851a9ce67e2 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery.graphql.js @@ -0,0 +1,178 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +import type { useRefetchableFragmentNodeTestIdentityTestFragment$fragmentType } from "./useRefetchableFragmentNodeTestIdentityTestFragment.graphql"; +export type useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery$variables = {| + id: string, + scale?: ?number, +|}; +export type useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery$data = {| + +node: ?{| + +$fragmentSpreads: useRefetchableFragmentNodeTestIdentityTestFragment$fragmentType, + |}, +|}; +export type useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery = {| + response: useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery$data, + variables: useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" +}, +v1 = { + "defaultValue": null, + "kind": "LocalArgument", + "name": "scale" +}, +v2 = [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } +]; +return { + "fragment": { + "argumentDefinitions": [ + (v0/*: any*/), + (v1/*: any*/) + ], + "kind": "Fragment", + "metadata": null, + "name": "useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery", + "selections": [ + { + "alias": null, + "args": (v2/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "useRefetchableFragmentNodeTestIdentityTestFragment" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [ + (v1/*: any*/), + (v0/*: any*/) + ], + "kind": "Operation", + "name": "useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery", + "selections": [ + { + "alias": null, + "args": (v2/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "scale", + "variableName": "scale" + } + ], + "concreteType": "Image", + "kind": "LinkedField", + "name": "profile_picture", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "uri", + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "56cdd0ad080963adb4022f3886d0a160", + "id": null, + "metadata": {}, + "name": "useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery", + "operationKind": "query", + "text": "query useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery(\n $scale: Float\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ...useRefetchableFragmentNodeTestIdentityTestFragment\n id\n }\n}\n\nfragment useRefetchableFragmentNodeTestIdentityTestFragment on User {\n id\n name\n profile_picture(scale: $scale) {\n uri\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "0b309ceb5fea8ea44abb827cce31328b"; +} + +module.exports = ((node/*: any*/)/*: Query< + useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery$variables, + useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery$data, +>*/); diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTestUserFragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTestUserFragment.graphql.js index b7ae7de39b77a..9d9096b5c9bd2 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTestUserFragment.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTestUserFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -53,7 +53,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./useRefetchableFragmentNodeTestUserFragmentRefetchQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "useRefetchableFragmentNodeTestUserFragment", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTestUserFragmentWithArgs.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTestUserFragmentWithArgs.graphql.js index cf380bbfab984..707eb03786bd3 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTestUserFragmentWithArgs.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeTestUserFragmentWithArgs.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<7794d486d753cec98e1680f705d910b6>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -54,7 +54,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./useRefetchableFragmentNodeTestUserFragmentWithArgsRefetchQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "useRefetchableFragmentNodeTestUserFragmentWithArgs", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeWithSuspenseTransitionTestUserFragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeWithSuspenseTransitionTestUserFragment.graphql.js index 9662abdb4c3fc..2f365bbc8f868 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeWithSuspenseTransitionTestUserFragment.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentNodeWithSuspenseTransitionTestUserFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<2a7b050b23f0fb4fbccf0e8e669e4dd8>> + * @generated SignedSource<<78ea09cf1853f6f29afab6b209aea558>> * @flow * @lightSyntaxTransform * @nogrep @@ -53,7 +53,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./useRefetchableFragmentNodeWithSuspenseTransitionTestUserFragmentRefetchQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "useRefetchableFragmentNodeWithSuspenseTransitionTestUserFragment", diff --git a/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentTestUserFragment.graphql.js b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentTestUserFragment.graphql.js index 125213c163d20..521a072aa1c07 100644 --- a/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentTestUserFragment.graphql.js +++ b/packages/react-relay/relay-hooks/__tests__/__generated__/useRefetchableFragmentTestUserFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<799d1c5f3e177c392081edd3d9fc38f9>> * @flow * @lightSyntaxTransform * @nogrep @@ -53,7 +53,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./useRefetchableFragmentTestUserFragmentRefetchQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "useRefetchableFragmentTestUserFragment", diff --git a/packages/react-relay/relay-hooks/__tests__/loadQuery-source-behavior-test.js b/packages/react-relay/relay-hooks/__tests__/loadQuery-source-behavior-test.js index b2f7762daf24b..60e1ebacb9b73 100644 --- a/packages/react-relay/relay-hooks/__tests__/loadQuery-source-behavior-test.js +++ b/packages/react-relay/relay-hooks/__tests__/loadQuery-source-behavior-test.js @@ -15,7 +15,8 @@ import type { LoadQueryOptions, PreloadableConcreteRequest, } from '../EntryPointTypes.flow'; -import type {GraphQLTaggedNode, OperationType} from 'relay-runtime'; +import type {loadQuerySourceBehaviorTestQuery} from './__generated__/loadQuerySourceBehaviorTestQuery.graphql'; +import type {OperationType, Query} from 'relay-runtime'; import type {GraphQLResponse} from 'relay-runtime/network/RelayNetworkTypes'; const {loadQuery} = require('../loadQuery'); @@ -43,10 +44,11 @@ const query = graphql` } `; -const preloadableConcreteRequest = { - kind: 'PreloadableConcreteRequest', - params: query.params, -}; +const preloadableConcreteRequest: PreloadableConcreteRequest = + { + kind: 'PreloadableConcreteRequest', + params: query.params, + }; const response = { data: { @@ -148,11 +150,7 @@ beforeEach(() => { }); writeDataToStore = () => { - loadQuery<$FlowFixMe, _>( - environment, - preloadableConcreteRequest, - variables, - ); + loadQuery(environment, preloadableConcreteRequest, variables); sink.next(response); sink.complete(); PreloadableQueryRegistry.set(ID, query); @@ -163,12 +161,14 @@ beforeEach(() => { PreloadableQueryRegistry.clear(); }; - callLoadQuery = ( - queryAstOrRequest: GraphQLTaggedNode | PreloadableConcreteRequest, + callLoadQuery = ( + queryAstOrRequest: + | Query + | PreloadableConcreteRequest, options?: LoadQueryOptions, // $FlowFixMe[missing-local-annot] ) => { - const loadedQuery = loadQuery<$FlowFixMe, _>( + const loadedQuery = loadQuery( environment, queryAstOrRequest, variables, @@ -425,7 +425,7 @@ describe('when passed a PreloadableConcreteRequest', () => { // calls to load will get disposed // Start initial load of query - const queryRef1 = loadQuery<$FlowFixMe, _>( + const queryRef1 = loadQuery( environment, preloadableConcreteRequest, variables, @@ -446,7 +446,7 @@ describe('when passed a PreloadableConcreteRequest', () => { expect(environment.executeWithSource).toBeCalledTimes(1); // Start second load of query - const queryRef2 = loadQuery<$FlowFixMe, _>( + const queryRef2 = loadQuery( environment, preloadableConcreteRequest, variables, @@ -487,7 +487,7 @@ describe('when passed a PreloadableConcreteRequest', () => { describe('when passed a query AST', () => { it('should pass network responses onto source', () => { - callLoadQuery(query); + callLoadQuery(query); expect(next).not.toHaveBeenCalled(); sink.next(response); @@ -495,7 +495,7 @@ describe('when passed a query AST', () => { }); it('should pass network errors onto source', () => { - callLoadQuery(query); + callLoadQuery(query); expect(error).not.toHaveBeenCalled(); sink.error(networkError); @@ -504,14 +504,14 @@ describe('when passed a query AST', () => { describe('when dispose is called before the network response is available', () => { it('should not pass network responses onto source', () => { - const {dispose} = callLoadQuery(query); + const {dispose} = callLoadQuery(query); dispose(); sink.next(response); expect(next).not.toHaveBeenCalled(); }); it('should not pass network errors onto source', done => { - const {dispose} = callLoadQuery(query); + const {dispose} = callLoadQuery(query); dispose(); diff --git a/packages/react-relay/relay-hooks/__tests__/loadQuery-store-behavior-test.js b/packages/react-relay/relay-hooks/__tests__/loadQuery-store-behavior-test.js index 977be235ff865..060a19b4632aa 100644 --- a/packages/react-relay/relay-hooks/__tests__/loadQuery-store-behavior-test.js +++ b/packages/react-relay/relay-hooks/__tests__/loadQuery-store-behavior-test.js @@ -19,13 +19,15 @@ import type { CacheConfig, Variables, } from '../../../relay-runtime/util/RelayRuntimeTypes'; +import type {PreloadableConcreteRequest} from '../EntryPointTypes.flow'; import type { + loadQueryStoreBehaviorTestQuery, loadQueryStoreBehaviorTestQuery$data, loadQueryStoreBehaviorTestQuery$variables, } from './__generated__/loadQueryStoreBehaviorTestQuery.graphql'; import type {GraphQLSingularResponse} from 'relay-runtime/network/RelayNetworkTypes'; import type {Sink} from 'relay-runtime/network/RelayObservable'; -import type {OperationType, Query} from 'relay-runtime/util/RelayRuntimeTypes'; +import type {Query} from 'relay-runtime/util/RelayRuntimeTypes'; const {loadQuery} = require('../loadQuery'); const { @@ -57,10 +59,11 @@ const query = graphql` const ID = '12345'; (query.params: $FlowFixMe).id = ID; -const preloadableConcreteRequest = { - kind: 'PreloadableConcreteRequest', - params: query.params, -}; +const preloadableConcreteRequest: PreloadableConcreteRequest = + { + kind: 'PreloadableConcreteRequest', + params: query.params, + }; const response: GraphQLSingularResponse = { data: { @@ -130,11 +133,7 @@ beforeEach(() => { .mockImplementation(() => resolvedModule); writeDataToStore = () => { - loadQuery( - environment, - preloadableConcreteRequest, - variables, - ); + loadQuery(environment, preloadableConcreteRequest, variables); sink.next(response); sink.complete(); PreloadableQueryRegistry.set(ID, query); @@ -157,11 +156,7 @@ describe('when passed a PreloadableConcreteRequest', () => { }); it('should write the data to the store after the query AST and network response are available', () => { expect(store.check(operation).status).toBe('missing'); - loadQuery( - environment, - preloadableConcreteRequest, - variables, - ); + loadQuery(environment, preloadableConcreteRequest, variables); expect(fetch).toHaveBeenCalled(); expect(store.check(operation).status).toBe('missing'); PreloadableQueryRegistry.set(ID, query); @@ -172,11 +167,7 @@ describe('when passed a PreloadableConcreteRequest', () => { it('should write the data to the store after the network response and query AST are available', () => { expect(store.check(operation).status).toBe('missing'); - loadQuery( - environment, - preloadableConcreteRequest, - variables, - ); + loadQuery(environment, preloadableConcreteRequest, variables); expect(store.check(operation).status).toBe('missing'); sink.next(response); expect(store.check(operation).status).toBe('missing'); @@ -186,7 +177,7 @@ describe('when passed a PreloadableConcreteRequest', () => { it('should not write the data to the store if dispose is called before the query AST and network response are available', () => { expect(store.check(operation).status).toBe('missing'); - const {dispose} = loadQuery( + const {dispose} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -200,7 +191,7 @@ describe('when passed a PreloadableConcreteRequest', () => { it('should not write the data to the store if dispose is called before the network response and query AST are available', () => { expect(store.check(operation).status).toBe('missing'); - const {dispose} = loadQuery( + const {dispose} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -214,7 +205,7 @@ describe('when passed a PreloadableConcreteRequest', () => { it('should not write the data to the store if dispose is called after the query AST is available, but before the network response is available', () => { expect(store.check(operation).status).toBe('missing'); - const {dispose} = loadQuery( + const {dispose} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -228,7 +219,7 @@ describe('when passed a PreloadableConcreteRequest', () => { it('should not write the data to the store if dispose is called after the network response is available, but before the query AST is available', () => { expect(store.check(operation).status).toBe('missing'); - const {dispose} = loadQuery( + const {dispose} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -244,18 +235,14 @@ describe('when passed a PreloadableConcreteRequest', () => { describe('when the query AST is available synchronously', () => { it('should write data to the store when the network response is available', () => { expect(store.check(operation).status).toBe('missing'); - loadQuery( - environment, - preloadableConcreteRequest, - variables, - ); + loadQuery(environment, preloadableConcreteRequest, variables); sink.next(response); expect(store.check(operation).status).toBe('available'); }); it('should not write data to the store if dispose is called before the network response is available', () => { expect(store.check(operation).status).toBe('missing'); - const {dispose} = loadQuery( + const {dispose} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -271,14 +258,9 @@ describe('when passed a PreloadableConcreteRequest', () => { beforeEach(() => writeDataToStore()); describe('when the query AST is available synchronously', () => { it('should write updated data to the store when the network response is available', () => { - loadQuery( - environment, - preloadableConcreteRequest, - variables, - { - fetchPolicy: 'network-only', - }, - ); + loadQuery(environment, preloadableConcreteRequest, variables, { + fetchPolicy: 'network-only', + }); expect( (store.lookup(operation.fragment): $FlowFixMe)?.data?.node?.name, @@ -290,7 +272,7 @@ describe('when passed a PreloadableConcreteRequest', () => { }); it('should not write updated data to the store if dispose is called before the network response is available', () => { - const {dispose} = loadQuery( + const {dispose} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -311,14 +293,9 @@ describe('when passed a PreloadableConcreteRequest', () => { resolvedModule = undefined; }); it('should write updated data to the store when the network response and query AST are available', () => { - loadQuery( - environment, - preloadableConcreteRequest, - variables, - { - fetchPolicy: 'network-only', - }, - ); + loadQuery(environment, preloadableConcreteRequest, variables, { + fetchPolicy: 'network-only', + }); expect( (store.lookup(operation.fragment): $FlowFixMe)?.data?.node?.name, @@ -334,14 +311,9 @@ describe('when passed a PreloadableConcreteRequest', () => { ).toEqual('Mark'); }); it('should write updated data to the store when the query AST and network response are available', () => { - loadQuery( - environment, - preloadableConcreteRequest, - variables, - { - fetchPolicy: 'network-only', - }, - ); + loadQuery(environment, preloadableConcreteRequest, variables, { + fetchPolicy: 'network-only', + }); expect( (store.lookup(operation.fragment): $FlowFixMe)?.data?.node?.name, @@ -358,7 +330,7 @@ describe('when passed a PreloadableConcreteRequest', () => { }); it('should not write updated data to the store if dispose is called before the network response and query AST are available', () => { - const {dispose} = loadQuery( + const {dispose} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -379,7 +351,7 @@ describe('when passed a PreloadableConcreteRequest', () => { }); it('should not write updated data to the store if dispose is called before the query AST and network response are available', () => { - const {dispose} = loadQuery( + const {dispose} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -400,7 +372,7 @@ describe('when passed a PreloadableConcreteRequest', () => { }); it('should not write updated data to the store if dispose is called after the query AST is available and before the network response is available', () => { - const {dispose} = loadQuery( + const {dispose} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -424,7 +396,7 @@ describe('when passed a PreloadableConcreteRequest', () => { }); it('should not write updated data to the store if dispose is called after ·the network repsonse is available and before the query AST is available', () => { - const {dispose} = loadQuery( + const {dispose} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -454,14 +426,14 @@ describe('when passed a query AST', () => { describe('when data is unavailable in the store', () => { it('should write data to the store when the network response is available', () => { expect(store.check(operation).status).toBe('missing'); - loadQuery(environment, query, variables); + loadQuery(environment, query, variables); sink.next(response); expect(store.check(operation).status).toBe('available'); }); it('should not write data to the store if dispose is called before the network response is available', () => { expect(store.check(operation).status).toBe('missing'); - const {dispose} = loadQuery(environment, query, variables); + const {dispose} = loadQuery(environment, query, variables); dispose(); sink.next(response); expect(store.check(operation).status).toBe('missing'); @@ -470,7 +442,7 @@ describe('when passed a query AST', () => { describe("when data is available in the store, but the fetch policy is 'network-only'", () => { beforeEach(() => writeDataToStore()); it('should write updated data to the store when the network response is available', () => { - loadQuery(environment, query, variables, { + loadQuery(environment, query, variables, { fetchPolicy: 'network-only', }); @@ -484,14 +456,9 @@ describe('when passed a query AST', () => { }); it('should not write updated data to the store if dispose is called before the network response is available', () => { - const {dispose} = loadQuery( - environment, - query, - variables, - { - fetchPolicy: 'network-only', - }, - ); + const {dispose} = loadQuery(environment, query, variables, { + fetchPolicy: 'network-only', + }); dispose(); sink.next(updatedResponse); diff --git a/packages/react-relay/relay-hooks/__tests__/loadQuery-test.js b/packages/react-relay/relay-hooks/__tests__/loadQuery-test.js index 08e6d66e31e78..13b6a11f21b97 100644 --- a/packages/react-relay/relay-hooks/__tests__/loadQuery-test.js +++ b/packages/react-relay/relay-hooks/__tests__/loadQuery-test.js @@ -10,20 +10,20 @@ */ 'use strict'; +import type {PreloadableConcreteRequest} from '../EntryPointTypes.flow'; import type { - LogRequestInfoFunction, - UploadableMap, -} from '../../../relay-runtime/network/RelayNetworkTypes'; -import type {RequestParameters} from '../../../relay-runtime/util/RelayConcreteNode'; -import type { - CacheConfig, - Variables, -} from '../../../relay-runtime/util/RelayRuntimeTypes'; -import type { + loadQueryTestQuery, loadQueryTestQuery$data, loadQueryTestQuery$variables, } from './__generated__/loadQueryTestQuery.graphql'; -import type {OperationType, Query} from 'relay-runtime/util/RelayRuntimeTypes'; +import type { + CacheConfig, + LogRequestInfoFunction, + Query, + RequestParameters, + UploadableMap, + Variables, +} from 'relay-runtime'; const {loadQuery, useTrackLoadQueryInRender} = require('../loadQuery'); // Need React require for OSS build @@ -60,10 +60,11 @@ describe('loadQuery', () => { (query.params: $FlowFixMe).id = ID; (query.params: $FlowFixMe).cacheID = ID; - const preloadableConcreteRequest = { - kind: 'PreloadableConcreteRequest', - params: query.params, - }; + const preloadableConcreteRequest: PreloadableConcreteRequest = + { + kind: 'PreloadableConcreteRequest', + params: query.params, + }; const response = { data: { @@ -189,29 +190,21 @@ describe('loadQuery', () => { describe('when passed a PreloadableConcreteRequest', () => { it('checks whether the query ast is available synchronously', () => { - loadQuery( - environment, - preloadableConcreteRequest, - variables, - ); + loadQuery(environment, preloadableConcreteRequest, variables); // $FlowFixMe[method-unbinding] added when improving typing for this parameters expect(PreloadableQueryRegistry.get).toHaveBeenCalled(); }); describe('when the query AST is available synchronously', () => { it('synchronously checks whether the query can be fulfilled by the store', () => { - loadQuery( - environment, - preloadableConcreteRequest, - variables, - ); + loadQuery(environment, preloadableConcreteRequest, variables); // $FlowFixMe[method-unbinding] added when improving typing for this parameters expect(environment.check).toHaveBeenCalled(); }); describe("with fetchPolicy === 'store-or-network'", () => { it('should not call fetch if the query can be fulfilled by the store', () => { - const {source} = loadQuery( + const {source} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -233,7 +226,7 @@ describe('loadQuery', () => { mockAvailability = {status: 'missing'}; }); it('makes a network request', done => { - const {source} = loadQuery( + const {source} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -275,7 +268,7 @@ describe('loadQuery', () => { }); it('should mark failed network requests', () => { - const preloadedQuery = loadQuery( + const preloadedQuery = loadQuery( environment, preloadableConcreteRequest, variables, @@ -294,7 +287,7 @@ describe('loadQuery', () => { it('calling dispose unsubscribes from executeWithSource', () => { // This ensures that no data is written to the store - const preloadedQuery = loadQuery( + const preloadedQuery = loadQuery( environment, preloadableConcreteRequest, variables, @@ -337,7 +330,7 @@ describe('loadQuery', () => { it('calling dispose unsubscribes from the network request', () => { // This ensures that live queries stop issuing network requests - const preloadedQuery = loadQuery( + const preloadedQuery = loadQuery( environment, preloadableConcreteRequest, variables, @@ -357,7 +350,7 @@ describe('loadQuery', () => { describe("with fetchPolicy === 'store-only'", () => { it('should not call fetch if the query can be fulfilled by the store', () => { - const {source} = loadQuery( + const {source} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -376,7 +369,7 @@ describe('loadQuery', () => { it('should not call fetch if the query cannot be fulfilled by the store', () => { mockAvailability = {status: 'missing'}; - const {source} = loadQuery( + const {source} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -394,7 +387,7 @@ describe('loadQuery', () => { }); it('calling dispose releases the query', () => { - const preloadedQuery = loadQuery( + const preloadedQuery = loadQuery( environment, preloadableConcreteRequest, variables, @@ -413,7 +406,7 @@ describe('loadQuery', () => { resolvedModule = null; }); it('should make a network request', done => { - const {source} = loadQuery( + const {source} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -456,7 +449,7 @@ describe('loadQuery', () => { expect(nextCallback).toHaveBeenCalledWith(response); }); it('should mark failed network requests', () => { - const preloadedQuery = loadQuery( + const preloadedQuery = loadQuery( environment, preloadableConcreteRequest, variables, @@ -475,7 +468,7 @@ describe('loadQuery', () => { it('calling dispose after the AST loads unsubscribes from executeWithSource', () => { // This ensures that no data is written to the store - const preloadedQuery = loadQuery( + const preloadedQuery = loadQuery( environment, preloadableConcreteRequest, variables, @@ -523,7 +516,7 @@ describe('loadQuery', () => { it('calling dispose after the AST loads unsubscribes from the network request', () => { // This ensures that live queries stop issuing network requests - const preloadedQuery = loadQuery( + const preloadedQuery = loadQuery( environment, preloadableConcreteRequest, variables, @@ -541,7 +534,7 @@ describe('loadQuery', () => { }); it('calling dispose before the AST loads clears the onLoad callback', () => { - const preloadedQuery = loadQuery( + const preloadedQuery = loadQuery( environment, preloadableConcreteRequest, variables, @@ -564,11 +557,7 @@ describe('loadQuery', () => { }); it('passes a callback to onLoad that calls executeWithSource', () => { - loadQuery( - environment, - preloadableConcreteRequest, - variables, - ); + loadQuery(environment, preloadableConcreteRequest, variables); // $FlowFixMe[method-unbinding] added when improving typing for this parameters expect(environment.executeWithSource).not.toHaveBeenCalled(); executeOnloadCallback(query); @@ -593,7 +582,7 @@ describe('loadQuery', () => { describe("with fetchPolicy === 'store-only'", () => { it('should not call fetch if the query can be fulfilled by the store', () => { - const {source} = loadQuery( + const {source} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -612,7 +601,7 @@ describe('loadQuery', () => { it('should not call fetch if the query cannot be fulfilled by the store', () => { mockAvailability = {status: 'missing'}; - const {source} = loadQuery( + const {source} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -630,7 +619,7 @@ describe('loadQuery', () => { }); it('calling dispose releases the query', () => { - const preloadedQuery = loadQuery( + const preloadedQuery = loadQuery( environment, preloadableConcreteRequest, variables, @@ -646,13 +635,13 @@ describe('loadQuery', () => { describe('when passed a query AST', () => { it('checks whether the query can be fulfilled by the store synchronously', () => { - loadQuery(environment, query, variables); + loadQuery(environment, query, variables); // $FlowFixMe[method-unbinding] added when improving typing for this parameters expect(environment.check).toHaveBeenCalled(); }); describe('when the query can be fulfilled by the store', () => { it("when fetchPolicy === 'store-or-network', it avoids a network request", () => { - loadQuery(environment, query, variables, { + loadQuery(environment, query, variables, { fetchPolicy: 'store-or-network', }); expect(fetch).not.toHaveBeenCalled(); @@ -665,14 +654,9 @@ describe('loadQuery', () => { describe("when fetchPolicy === 'network-only'", () => { it('should make a network request', done => { - const {source} = loadQuery( - environment, - query, - variables, - { - fetchPolicy: 'network-only', - }, - ); + const {source} = loadQuery(environment, query, variables, { + fetchPolicy: 'network-only', + }); const nextCallback = jest.fn(() => done()); if (source) { // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file @@ -705,14 +689,9 @@ describe('loadQuery', () => { }); it('should mark failed network requests', () => { - const preloadedQuery = loadQuery( - environment, - query, - variables, - { - fetchPolicy: 'network-only', - }, - ); + const preloadedQuery = loadQuery(environment, query, variables, { + fetchPolicy: 'network-only', + }); expect(preloadedQuery.networkError).toBeNull(); @@ -724,14 +703,9 @@ describe('loadQuery', () => { it('calling dispose unsubscribes from environment.executeWithSource', () => { // This ensures that no data is written to the store - const preloadedQuery = loadQuery( - environment, - query, - variables, - { - fetchPolicy: 'network-only', - }, - ); + const preloadedQuery = loadQuery(environment, query, variables, { + fetchPolicy: 'network-only', + }); expect(fetch).toHaveBeenCalled(); // $FlowFixMe[method-unbinding] added when improving typing for this parameters expect(environment.executeWithSource).toHaveBeenCalledTimes(1); @@ -768,14 +742,9 @@ describe('loadQuery', () => { it('calling dispose unsubscribes from the network request', () => { // This ensures that live queries stop issuing network requests - const preloadedQuery = loadQuery( - environment, - query, - variables, - { - fetchPolicy: 'network-only', - }, - ); + const preloadedQuery = loadQuery(environment, query, variables, { + fetchPolicy: 'network-only', + }); preloadedQuery.dispose(); expect(networkUnsubscribe).not.toBe(null); @@ -792,11 +761,7 @@ describe('loadQuery', () => { }); it('should make a network request', done => { - const {source} = loadQuery( - environment, - query, - variables, - ); + const {source} = loadQuery(environment, query, variables); const nextCallback = jest.fn(() => done()); if (source) { // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file @@ -829,11 +794,7 @@ describe('loadQuery', () => { }); it('should mark failed network requests', () => { - const preloadedQuery = loadQuery( - environment, - query, - variables, - ); + const preloadedQuery = loadQuery(environment, query, variables); expect(preloadedQuery.networkError).toBeNull(); @@ -845,11 +806,7 @@ describe('loadQuery', () => { it('calling dispose unsubscribes from environment.executeWithSource', () => { // This ensures that no data is written to the store - const preloadedQuery = loadQuery( - environment, - query, - variables, - ); + const preloadedQuery = loadQuery(environment, query, variables); expect(fetch).toHaveBeenCalled(); // $FlowFixMe[method-unbinding] added when improving typing for this parameters expect(environment.executeWithSource).toHaveBeenCalledTimes(1); @@ -874,11 +831,7 @@ describe('loadQuery', () => { it('calling dispose unsubscribes from the network request', () => { // This ensures that live queries stop issuing network requests - const preloadedQuery = loadQuery( - environment, - query, - variables, - ); + const preloadedQuery = loadQuery(environment, query, variables); preloadedQuery.dispose(); expect(networkUnsubscribe).not.toBe(null); @@ -890,7 +843,7 @@ describe('loadQuery', () => { describe("with fetchPolicy === 'store-only'", () => { it('should not call fetch if the query can be fulfilled by the store', () => { - const {source} = loadQuery( + const {source} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -909,7 +862,7 @@ describe('loadQuery', () => { it('should not call fetch if the query cannot be fulfilled by the store', () => { mockAvailability = {status: 'missing'}; - const {source} = loadQuery( + const {source} = loadQuery( environment, preloadableConcreteRequest, variables, @@ -927,7 +880,7 @@ describe('loadQuery', () => { }); it('calling dispose releases the query', () => { - const preloadedQuery = loadQuery( + const preloadedQuery = loadQuery( environment, preloadableConcreteRequest, variables, @@ -947,19 +900,15 @@ describe('loadQuery', () => { beforeEach(() => { Container = (props: {children: React.Node}) => { + // $FlowFixMe[react-rule-hook] useTrackLoadQueryInRender(); return props.children; }; LoadDuringRender = (props: {name?: ?string}) => { - loadQuery( - environment, - preloadableConcreteRequest, - variables, - { - fetchPolicy: 'store-or-network', - __nameForWarning: props.name, - }, - ); + loadQuery(environment, preloadableConcreteRequest, variables, { + fetchPolicy: 'store-or-network', + __nameForWarning: props.name, + }); return null; }; }); diff --git a/packages/react-relay/relay-hooks/__tests__/prepareEntryPoint_DEPRECATED-test.js b/packages/react-relay/relay-hooks/__tests__/prepareEntryPoint_DEPRECATED-test.js index 6765774f34d77..04bb0832dbfc7 100644 --- a/packages/react-relay/relay-hooks/__tests__/prepareEntryPoint_DEPRECATED-test.js +++ b/packages/react-relay/relay-hooks/__tests__/prepareEntryPoint_DEPRECATED-test.js @@ -137,6 +137,7 @@ test('it should preload entry point with nested entry points', () => { { getEnvironment: () => env, }, + // $FlowFixMe[incompatible-call] Added after improved typing of PreloadProps entryPoint, {id: 'my-id'}, ); @@ -211,6 +212,7 @@ test('it should preload entry point with both queries and nested entry points', { getEnvironment: () => env, }, + // $FlowFixMe[incompatible-call] Added after improved typing of PreloadProps entryPoint, {id: 'my-id'}, ); diff --git a/packages/react-relay/relay-hooks/__tests__/useBlockingPaginationFragment-test.js b/packages/react-relay/relay-hooks/__tests__/useBlockingPaginationFragment-test.js index 3cb737c7f1bb2..3e97b7590b5a6 100644 --- a/packages/react-relay/relay-hooks/__tests__/useBlockingPaginationFragment-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useBlockingPaginationFragment-test.js @@ -13,7 +13,7 @@ import type {Direction, OperationDescriptor, Variables} from 'relay-runtime'; -const useBlockingPaginationFragmentOriginal = require('../useBlockingPaginationFragment'); +const useBlockingPaginationFragmentOriginal = require('../legacy/useBlockingPaginationFragment'); const invariant = require('invariant'); const React = require('react'); const ReactRelayContext = require('react-relay/ReactRelayContext'); @@ -28,12 +28,12 @@ const { graphql, } = require('relay-runtime'); const {createMockEnvironment} = require('relay-test-utils'); -const { - disallowWarnings, - expectWarningWillFire, -} = require('relay-test-utils-internal'); const Scheduler = require('scheduler'); +const {disallowWarnings, expectWarningWillFire} = (jest.requireActual( + 'relay-test-utils-internal', +): $FlowFixMe); + const {useMemo, useState} = React; disallowWarnings(); @@ -71,10 +71,10 @@ describe('useBlockingPaginationFragment', () => { this.setState({error}); } render(): any | React.Node { - const {children, fallback} = this.props; + const {children, fallback: Fallback} = this.props; const {error} = this.state; if (error) { - return React.createElement(fallback, {error}); + return ; } return children; } @@ -135,7 +135,6 @@ describe('useBlockingPaginationFragment', () => { [fragmentName]: {}, }, [FRAGMENT_OWNER_KEY]: owner.request, - __isWithinUnmatchedTypeRefinement: false, }; } @@ -2492,7 +2491,6 @@ describe('useBlockingPaginationFragment', () => { // the component twice: `expectFragmentResults` will fail in the next // test jest.resetModules(); - disallowWarnings(); }); it('preserves pagination request if re-rendered with same fragment ref', () => { diff --git a/packages/react-relay/relay-hooks/__tests__/useBlockingPaginationFragment-with-suspense-transition-test.js b/packages/react-relay/relay-hooks/__tests__/useBlockingPaginationFragment-with-suspense-transition-test.js index ab2eb9109ca76..06b250155f909 100644 --- a/packages/react-relay/relay-hooks/__tests__/useBlockingPaginationFragment-with-suspense-transition-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useBlockingPaginationFragment-with-suspense-transition-test.js @@ -13,7 +13,7 @@ import type {Direction, OperationDescriptor, Variables} from 'relay-runtime'; import type {Disposable} from 'relay-runtime/util/RelayRuntimeTypes'; -const useBlockingPaginationFragmentOriginal = require('../useBlockingPaginationFragment'); +const useBlockingPaginationFragmentOriginal = require('../legacy/useBlockingPaginationFragment'); const invariant = require('invariant'); const React = require('react'); const ReactRelayContext = require('react-relay/ReactRelayContext'); @@ -62,10 +62,10 @@ describe('useBlockingPaginationFragment with useTransition', () => { this.setState({error}); } render(): any | React.Node { - const {children, fallback} = this.props; + const {children, fallback: Fallback} = this.props; const {error} = this.state; if (error) { - return React.createElement(fallback, {error}); + return ; } return children; } @@ -94,14 +94,14 @@ describe('useBlockingPaginationFragment with useTransition', () => { result.isPendingNext = isPendingNext; useEffect(() => { - Scheduler.unstable_yieldValue({data, ...result}); + Scheduler.log({data, ...result}); }); return {data, ...result}; } function assertYieldsWereCleared() { - const actualYields = Scheduler.unstable_clearYields(); + const actualYields = Scheduler.unstable_clearLog(); if (actualYields.length !== 0) { throw new Error( 'Log of yielded values is not empty. ' + @@ -135,7 +135,7 @@ describe('useBlockingPaginationFragment with useTransition', () => { ) { assertYieldsWereCleared(); Scheduler.unstable_flushNumberOfYields(expectedYields.length); - const actualYields = Scheduler.unstable_clearYields(); + const actualYields = Scheduler.unstable_clearLog(); expect(actualYields.length).toEqual(expectedYields.length); expectedYields.forEach((expected, idx) => assertYield(expected, actualYields[idx]), @@ -186,7 +186,6 @@ describe('useBlockingPaginationFragment with useTransition', () => { {}, }, [FRAGMENT_OWNER_KEY]: owner.request, - __isWithinUnmatchedTypeRefinement: false, }; } @@ -194,9 +193,7 @@ describe('useBlockingPaginationFragment with useTransition', () => { // Set up mocks jest.resetModules(); jest.mock('warning'); - jest.mock('scheduler', () => { - return jest.requireActual('scheduler/unstable_mock'); - }); + jest.mock('scheduler', () => require('../../__tests__/mockScheduler')); // Supress `act` warnings since we are intentionally not // using it for most tests here. `act` currently always @@ -429,7 +426,7 @@ describe('useBlockingPaginationFragment with useTransition', () => { const Fallback = () => { useEffect(() => { - Scheduler.unstable_yieldValue('Fallback'); + Scheduler.log('Fallback'); }); return 'Fallback'; @@ -1052,7 +1049,7 @@ describe('useBlockingPaginationFragment with useTransition', () => { }); Scheduler.unstable_flushNumberOfYields(1); - const actualYields = Scheduler.unstable_clearYields(); + const actualYields = Scheduler.unstable_clearLog(); if (flushFallback) { // Flushing fallbacks by running a timer could cause other side-effects @@ -1083,7 +1080,8 @@ describe('useBlockingPaginationFragment with useTransition', () => { ); } - it('loads more items correctly after refetching', () => { + // TODO: T150701964 + xit('loads more items correctly after refetching', () => { const renderer = renderFragment(); expectFragmentResults([ { diff --git a/packages/react-relay/relay-hooks/__tests__/useEntryPointLoader-react-double-effects-test.js b/packages/react-relay/relay-hooks/__tests__/useEntryPointLoader-react-double-effects-test.js index 766e98959b567..768a634d0689e 100644 --- a/packages/react-relay/relay-hooks/__tests__/useEntryPointLoader-react-double-effects-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useEntryPointLoader-react-double-effects-test.js @@ -142,13 +142,14 @@ describe.skip('useEntryPointLoader-react-double-effects', () => { query = createOperationDescriptor(gqlQuery, variables); queryRenderLogs = []; - QueryComponent = function (props: any) { + QueryComponent = function TestQueryComponent(props: any) { const result = usePreloadedQuery( gqlQuery, (props.queries.TestQuery: $FlowFixMe), ); const name = result?.node?.name ?? 'Empty'; + // $FlowFixMe[react-rule-hook] useEffect(() => { queryRenderLogs.push(`commit: ${name}`); return () => { @@ -161,7 +162,7 @@ describe.skip('useEntryPointLoader-react-double-effects', () => { }; loaderRenderLogs = []; - LoaderComponent = function (props: any) { + LoaderComponent = function TestLoaderComponent(props: any) { // $FlowFixMe[underconstrained-implicit-instantiation] const [entryPointRef] = useEntryPointLoader< _, @@ -185,6 +186,7 @@ describe.skip('useEntryPointLoader-react-double-effects', () => { } else { entryPointRefId = entryPointRef.queries.TestQuery?.id ?? 'Unknown'; } + // $FlowFixMe[react-rule-hook] useEffect(() => { loaderRenderLogs.push(`commit: ${entryPointRefId}`); return () => { diff --git a/packages/react-relay/relay-hooks/__tests__/useEntryPointLoader-test.js b/packages/react-relay/relay-hooks/__tests__/useEntryPointLoader-test.js index 2fef1f05b6627..59df975fc115a 100644 --- a/packages/react-relay/relay-hooks/__tests__/useEntryPointLoader-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useEntryPointLoader-test.js @@ -78,6 +78,7 @@ beforeEach(() => { }) { renderCount = (renderCount || 0) + 1; [loadedEntryPoint, entryPointLoaderCallback, disposeEntryPoint] = + // $FlowFixMe[react-rule-hook] useEntryPointLoader<{...}, any, any, any, any, any, any>( environmentProvider, entryPoint, @@ -153,7 +154,7 @@ it('disposes the entry point and nullifies the state when the disposeEntryPoint }); beforeEach(() => { - jest.mock('scheduler', () => require('scheduler/unstable_mock')); + jest.mock('scheduler', () => require('../../__tests__/mockScheduler')); }); afterEach(() => { diff --git a/packages/react-relay/relay-hooks/__tests__/useFragment-WithOperationTrackerSuspense-test.js b/packages/react-relay/relay-hooks/__tests__/useFragment-WithOperationTrackerSuspense-test.js new file mode 100644 index 0000000000000..41a5b473310b1 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/useFragment-WithOperationTrackerSuspense-test.js @@ -0,0 +1,350 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow + * @format + * @oncall relay + */ + +'use strict'; + +import type {LogEvent} from 'relay-runtime/store/RelayStoreTypes'; + +const ReactRelayContext = require('../../ReactRelayContext'); +const useFragment = require('../useFragment'); +const React = require('react'); +const ReactTestRenderer = require('react-test-renderer'); +const { + createOperationDescriptor, + createReaderSelector, + graphql, +} = require('relay-runtime'); +const RelayOperationTracker = require('relay-runtime/store/RelayOperationTracker'); +const RelayFeatureFlags = require('relay-runtime/util/RelayFeatureFlags'); +const {createMockEnvironment} = require('relay-test-utils'); +const {disallowWarnings} = require('relay-test-utils-internal'); + +disallowWarnings(); + +describe('useFragment with Operation Tracker and Suspense behavior', () => { + let environment; + let UserFragment; + let operationTracker; + let nodeOperation; + let logger; + let UserQuery; + let ViewerFriendsQuery; + let viewerOperation; + let UsersFragment; + let UsersQuery; + let pluralOperation; + let render; + + const pluralVariables = {ids: ['user-id-1']}; + + beforeEach(() => { + RelayFeatureFlags.ENABLE_OPERATION_TRACKER_OPTIMISTIC_UPDATES = true; + RelayFeatureFlags.ENABLE_RELAY_OPERATION_TRACKER_SUSPENSE = true; + operationTracker = new RelayOperationTracker(); + logger = jest.fn<[LogEvent], void>(); + environment = createMockEnvironment({ + operationTracker, + log: logger, + }); + + UserFragment = graphql` + fragment useFragmentWithOperationTrackerSuspenseTestFragment on User { + id + name + } + `; + UserQuery = graphql` + query useFragmentWithOperationTrackerSuspenseTestQuery($id: ID!) { + node(id: $id) { + __typename + ...useFragmentWithOperationTrackerSuspenseTestFragment + } + } + `; + + UsersFragment = graphql` + fragment useFragmentWithOperationTrackerSuspenseTest2Fragment on User + @relay(plural: true) { + id + name + } + `; + + UsersQuery = graphql` + query useFragmentWithOperationTrackerSuspenseTest2Query($ids: [ID!]!) { + nodes(ids: $ids) { + __typename + ...useFragmentWithOperationTrackerSuspenseTest2Fragment + } + } + `; + + ViewerFriendsQuery = graphql` + query useFragmentWithOperationTrackerSuspenseTestViewerFriendsQuery { + viewer { + actor { + friends(first: 1) @connection(key: "Viewer_friends") { + edges { + node { + ...useFragmentWithOperationTrackerSuspenseTestFragment + } + } + } + } + } + } + `; + nodeOperation = createOperationDescriptor(UserQuery, { + id: 'user-id-1', + }); + viewerOperation = createOperationDescriptor(ViewerFriendsQuery, {}); + pluralOperation = createOperationDescriptor(UsersQuery, pluralVariables); + + environment.execute({operation: viewerOperation}).subscribe({}); + environment.execute({operation: nodeOperation}).subscribe({}); + environment.execute({operation: pluralOperation}).subscribe({}); + + environment.subscribe( + environment.lookup(viewerOperation.fragment), + jest.fn(), + ); + + // We need to subscribe to a fragment in order for OperationTracker + // to be able to notify owners if they are affected by any pending operation + environment.subscribe( + environment.lookup( + createReaderSelector( + UserFragment, + 'user-id-1', + viewerOperation.request.variables, + viewerOperation.request, + ), + ), + jest.fn(), + ); + environment.subscribe( + environment.lookup( + createReaderSelector( + UsersFragment, + 'user-id-1', + pluralOperation.request.variables, + pluralOperation.request, + ), + ), + jest.fn(), + ); + environment.subscribe( + environment.lookup( + createReaderSelector( + UserFragment, + 'user-id-1', + nodeOperation.request.variables, + nodeOperation.request, + ), + ), + jest.fn(), + ); + + const ContextProvider = ({children}: {children: React.Node}) => { + return ( + + {children} + + ); + }; + + const Container = (props: {userRef: $FlowFixMe, ...}) => { + const isPlural = Array.isArray(props.userRef); + // $FlowFixMe[incompatible-call] + const userData = useFragment( + isPlural ? UsersFragment : UserFragment, + props.userRef, + ); + return Array.isArray(userData) + ? userData.map(user => ( + {user.name} + )) + : userData.name; + }; + + render = function (props: $FlowFixMe) { + return ReactTestRenderer.create( + + + + + , + ); + }; + }); + + afterEach(() => { + RelayFeatureFlags.ENABLE_OPERATION_TRACKER_OPTIMISTIC_UPDATES = false; + RelayFeatureFlags.ENABLE_RELAY_OPERATION_TRACKER_SUSPENSE = false; + }); + + it('should throw promise for pending operation affecting fragment owner', () => { + environment.commitPayload(viewerOperation, { + viewer: { + actor: { + id: 'viewer-id', + __typename: 'User', + friends: { + pageInfo: { + hasNextPage: true, + hasPrevPage: false, + startCursor: 'cursor-1', + endCursor: 'cursor-1', + }, + edges: [ + { + cursor: 'cursor-1', + node: { + id: 'user-id-1', + name: 'Alice', + __typename: 'User', + }, + }, + ], + }, + }, + }, + }); + + const fragmentRef = { + __id: 'user-id-1', + __fragments: { + useFragmentWithOperationTrackerSuspenseTestFragment: {}, + }, + __fragmentOwner: nodeOperation.request, + }; + + const renderer = render({userRef: fragmentRef}); + expect(renderer.toJSON()).toBe('Alice'); // should show the name + + ReactTestRenderer.act(() => { + environment + .executeMutation({ + operation: nodeOperation, + optimisticUpdater: store => { + const record = store.get('user-id-1'); + record?.setValue(undefined, 'name'); + }, + }) + .subscribe({}); + jest.runAllImmediates(); + }); + + expect(renderer.toJSON()).toBe('Singular Fallback'); // Component is suspended now for optimistic update + ReactTestRenderer.act(() => { + environment.mock.nextValue(nodeOperation, { + data: { + node: { + __typename: 'User', + id: 'user-id-1', + name: 'Alice222', + }, + }, + }); + environment.mock.complete(nodeOperation.request.node); + }); + + expect(renderer.toJSON()).toBe('Alice222'); + }); + + it('should throw promise for plural fragment', () => { + environment.commitPayload(viewerOperation, { + viewer: { + actor: { + id: 'viewer-id', + __typename: 'User', + friends: { + pageInfo: { + hasNextPage: true, + hasPrevPage: false, + startCursor: 'cursor-1', + endCursor: 'cursor-1', + }, + edges: [ + { + cursor: 'cursor-1', + node: { + id: 'user-id-1', + name: 'Alice', + __typename: 'User', + }, + }, + { + cursor: 'cursor-2', + node: { + id: 'user-id-2', + name: 'Bob', + __typename: 'User', + }, + }, + ], + }, + }, + }, + }); + + const fragmentPluralRef = [ + { + __id: 'user-id-1', + __fragments: { + useFragmentWithOperationTrackerSuspenseTest2Fragment: {}, + }, + __fragmentOwner: pluralOperation.request, + }, + { + __id: 'user-id-2', + __fragments: { + useFragmentWithOperationTrackerSuspenseTest2Fragment: {}, + }, + __fragmentOwner: pluralOperation.request, + }, + ]; + + const rendererPlural = render({userRef: fragmentPluralRef}); + expect(rendererPlural.toJSON()).toEqual(['Alice', 'Bob']); + + ReactTestRenderer.act(() => { + // Execute the nodeOperation query with executeMutation and set the record as undefined in optimistic updater + environment + .executeMutation({ + operation: nodeOperation, + optimisticUpdater: store => { + const record = store.get('user-id-1'); + record?.setValue(undefined, 'name'); + }, + }) + .subscribe({}); + jest.runAllImmediates(); + }); + + expect(rendererPlural.toJSON()).toEqual(['Singular Fallback']); + + ReactTestRenderer.act(() => { + environment.mock.nextValue(nodeOperation, { + data: { + node: { + __typename: 'User', + id: 'user-id-1', + name: 'Alice222', + }, + }, + }); + environment.mock.complete(nodeOperation.request.node); + }); + + expect(rendererPlural.toJSON()).toEqual(['Alice222', 'Bob']); + }); +}); diff --git a/packages/react-relay/relay-hooks/__tests__/useFragment-test.js b/packages/react-relay/relay-hooks/__tests__/useFragment-test.js index 3122da51ebb25..eeef3e8a836d8 100644 --- a/packages/react-relay/relay-hooks/__tests__/useFragment-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useFragment-test.js @@ -10,6 +10,7 @@ */ 'use strict'; + import type { useFragmentTestUserFragment$data, useFragmentTestUserFragment$fragmentType, @@ -21,8 +22,7 @@ import type { import type {OperationDescriptor} from 'relay-runtime/store/RelayStoreTypes'; import type {Fragment} from 'relay-runtime/util/RelayRuntimeTypes'; -const useFragmentOriginal_REACT_CACHE = require('../react-cache/useFragment_REACT_CACHE'); -const useFragmentOriginal_LEGACY = require('../useFragment'); +const useFragmentImpl = require('../useFragment'); const React = require('react'); const ReactRelayContext = require('react-relay/ReactRelayContext'); const TestRenderer = require('react-test-renderer'); @@ -30,7 +30,6 @@ const { FRAGMENT_OWNER_KEY, FRAGMENTS_KEY, ID_KEY, - RelayFeatureFlags, createOperationDescriptor, graphql, } = require('relay-runtime'); @@ -43,316 +42,301 @@ const { disallowWarnings(); disallowConsoleErrors(); -describe.each([ - ['React Cache', useFragmentOriginal_REACT_CACHE], - ['Legacy', useFragmentOriginal_LEGACY], -])('useFragment (%s)', (_hookName, useFragmentOriginal) => { - let originalReactCacheFeatureFlag; - beforeEach(() => { - originalReactCacheFeatureFlag = RelayFeatureFlags.USE_REACT_CACHE; - RelayFeatureFlags.USE_REACT_CACHE = - useFragmentOriginal === useFragmentOriginal_REACT_CACHE; - }); - afterEach(() => { - RelayFeatureFlags.USE_REACT_CACHE = originalReactCacheFeatureFlag; - }); +let environment; +let gqlSingularQuery; +let gqlSingularFragment; +let gqlPluralQuery; +let gqlPluralFragment; +let singularQuery; +let pluralQuery; +let singularVariables; +let pluralVariables; +let renderSingularFragment; +let renderPluralFragment; +let renderSpy; +let SingularRenderer; +let PluralRenderer; +let ContextProvider; - let environment; - let gqlSingularQuery; - let gqlSingularFragment; - let gqlPluralQuery; - let gqlPluralFragment; - let singularQuery; - let pluralQuery; - let singularVariables; - let pluralVariables; - let renderSingularFragment; - let renderPluralFragment; - let renderSpy; - let SingularRenderer; - let PluralRenderer; - let ContextProvider; +hook useFragment( + fragmentNode: + | Fragment< + useFragmentTestUserFragment$fragmentType, + useFragmentTestUserFragment$data, + > + | Fragment< + useFragmentTestUsersFragment$fragmentType, + useFragmentTestUsersFragment$data, + >, + fragmentRef: any, +) { + // $FlowFixMe[incompatible-call] + const data = useFragmentImpl(fragmentNode, fragmentRef); + renderSpy(data); + return data; +} - function useFragment( - fragmentNode: - | Fragment< - useFragmentTestUserFragment$fragmentType, - useFragmentTestUserFragment$data, - > - | Fragment< - useFragmentTestUsersFragment$fragmentType, - useFragmentTestUsersFragment$data, - >, - fragmentRef: any, - ) { - const data = useFragmentOriginal(fragmentNode, fragmentRef); - renderSpy(data); - return data; - } +function assertFragmentResults(expected: any) { + // This ensures that useEffect runs + jest.runAllImmediates(); + expect(renderSpy).toBeCalledTimes(1); + const actualData = renderSpy.mock.calls[0][0]; + expect(actualData).toEqual(expected); + renderSpy.mockClear(); +} - function assertFragmentResults(expected: any) { - // This ensures that useEffect runs - jest.runAllImmediates(); - expect(renderSpy).toBeCalledTimes(1); - const actualData = renderSpy.mock.calls[0][0]; - expect(actualData).toEqual(expected); - renderSpy.mockClear(); - } +function createFragmentRef(id: string, owner: OperationDescriptor) { + return { + [ID_KEY]: id, + [FRAGMENTS_KEY]: { + useFragmentTestNestedUserFragment: {}, + }, + [FRAGMENT_OWNER_KEY]: owner.request, + }; +} - function createFragmentRef(id: string, owner: OperationDescriptor) { - return { - [ID_KEY]: id, - [FRAGMENTS_KEY]: { - useFragmentTestNestedUserFragment: {}, - }, - [FRAGMENT_OWNER_KEY]: owner.request, - __isWithinUnmatchedTypeRefinement: false, - }; - } +beforeEach(() => { + renderSpy = jest.fn< + [useFragmentTestUserFragment$data | useFragmentTestUsersFragment$data], + mixed, + >(); - beforeEach(() => { - renderSpy = jest.fn< - [useFragmentTestUserFragment$data | useFragmentTestUsersFragment$data], - mixed, - >(); - - // Set up environment and base data - environment = createMockEnvironment(); - graphql` - fragment useFragmentTestNestedUserFragment on User { - username - } - `; - singularVariables = {id: '1'}; - pluralVariables = {ids: ['1', '2']}; - gqlSingularQuery = graphql` - query useFragmentTestUserQuery($id: ID!) { - node(id: $id) { - ...useFragmentTestUserFragment - } - } - `; - gqlSingularFragment = graphql` - fragment useFragmentTestUserFragment on User { - id - name - ...useFragmentTestNestedUserFragment + // Set up environment and base data + environment = createMockEnvironment(); + graphql` + fragment useFragmentTestNestedUserFragment on User { + username + } + `; + singularVariables = {id: '1'}; + pluralVariables = {ids: ['1', '2']}; + gqlSingularQuery = graphql` + query useFragmentTestUserQuery($id: ID!) { + node(id: $id) { + ...useFragmentTestUserFragment } - `; - gqlPluralQuery = graphql` - query useFragmentTestUsersQuery($ids: [ID!]!) { - nodes(ids: $ids) { - ...useFragmentTestUsersFragment - } + } + `; + gqlSingularFragment = graphql` + fragment useFragmentTestUserFragment on User { + id + name + ...useFragmentTestNestedUserFragment + } + `; + gqlPluralQuery = graphql` + query useFragmentTestUsersQuery($ids: [ID!]!) { + nodes(ids: $ids) { + ...useFragmentTestUsersFragment } - `; - gqlPluralFragment = graphql` - fragment useFragmentTestUsersFragment on User @relay(plural: true) { - id - name - ...useFragmentTestNestedUserFragment - } - `; - singularQuery = createOperationDescriptor( - gqlSingularQuery, - singularVariables, - ); - pluralQuery = createOperationDescriptor(gqlPluralQuery, pluralVariables); - environment.commitPayload(singularQuery, { - node: { + } + `; + gqlPluralFragment = graphql` + fragment useFragmentTestUsersFragment on User @relay(plural: true) { + id + name + ...useFragmentTestNestedUserFragment + } + `; + singularQuery = createOperationDescriptor( + gqlSingularQuery, + singularVariables, + ); + pluralQuery = createOperationDescriptor(gqlPluralQuery, pluralVariables); + environment.commitPayload(singularQuery, { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + username: 'useralice', + }, + }); + environment.commitPayload(pluralQuery, { + nodes: [ + { __typename: 'User', id: '1', name: 'Alice', username: 'useralice', + profile_picture: null, + }, + { + __typename: 'User', + id: '2', + name: 'Bob', + username: 'userbob', + profile_picture: null, }, - }); - environment.commitPayload(pluralQuery, { - nodes: [ - { - __typename: 'User', - id: '1', - name: 'Alice', - username: 'useralice', - profile_picture: null, - }, - { - __typename: 'User', - id: '2', - name: 'Bob', - username: 'userbob', - profile_picture: null, - }, - ], - }); + ], + }); - // Set up renderers - SingularRenderer = (props: { - user: ?( - | useFragmentTestUserFragment$data - | useFragmentTestUsersFragment$data - ), - }) => null; - PluralRenderer = (props: { - users: ?( - | useFragmentTestUserFragment$data - | useFragmentTestUsersFragment$data - ), - }) => null; - const SingularContainer = (props: { - userRef?: {$data?: {...}, ...}, - owner: $FlowFixMe, - ... - }) => { - // We need a render a component to run a Hook - const owner = props.owner; - const userRef = props.hasOwnProperty('userRef') - ? props.userRef - : { - [ID_KEY]: owner.request.variables.id, - [FRAGMENTS_KEY]: { - useFragmentTestUserFragment: {}, - }, - [FRAGMENT_OWNER_KEY]: owner.request, - }; - const userData = useFragment(gqlSingularFragment, userRef); - return ; - }; + // Set up renderers + SingularRenderer = (props: { + user: ?( + | useFragmentTestUserFragment$data + | useFragmentTestUsersFragment$data + ), + }) => null; + PluralRenderer = (props: { + users: ?( + | useFragmentTestUserFragment$data + | useFragmentTestUsersFragment$data + ), + }) => null; + const SingularContainer = (props: { + userRef?: {$data?: {...}, ...}, + owner: $FlowFixMe, + ... + }) => { + // We need a render a component to run a Hook + const owner = props.owner; + const userRef = props.hasOwnProperty('userRef') + ? props.userRef + : { + [ID_KEY]: owner.request.variables.id, + [FRAGMENTS_KEY]: { + useFragmentTestUserFragment: {}, + }, + [FRAGMENT_OWNER_KEY]: owner.request, + }; + const userData = useFragment(gqlSingularFragment, userRef); + return ; + }; - const PluralContainer = (props: { - usersRef?: $ReadOnlyArray<{$data?: {...}, ...}>, - owner: $FlowFixMe, - ... - }) => { - const owner = props.owner; - const usersRef = props.hasOwnProperty('usersRef') - ? props.usersRef - : owner.request.variables.ids.map(id => ({ - [ID_KEY]: id, - [FRAGMENTS_KEY]: { - useFragmentTestUsersFragment: {}, - }, - [FRAGMENT_OWNER_KEY]: owner.request, - })); + const PluralContainer = (props: { + usersRef?: $ReadOnlyArray<{$data?: {...}, ...}>, + owner: $FlowFixMe, + ... + }) => { + const owner = props.owner; + const usersRef = props.hasOwnProperty('usersRef') + ? props.usersRef + : owner.request.variables.ids.map(id => ({ + [ID_KEY]: id, + [FRAGMENTS_KEY]: { + useFragmentTestUsersFragment: {}, + }, + [FRAGMENT_OWNER_KEY]: owner.request, + })); - const usersData = useFragment(gqlPluralFragment, usersRef); - return ; - }; + const usersData = useFragment(gqlPluralFragment, usersRef); + return ; + }; - const relayContext = {environment}; - ContextProvider = ({children}: {children: React.Node}) => { - return ( - - {children} - - ); - }; + const relayContext = {environment}; + ContextProvider = ({children}: {children: React.Node}) => { + return ( + + {children} + + ); + }; - renderSingularFragment = ( - props?: { - owner?: $FlowFixMe, - userRef?: $FlowFixMe, - ... - }, - existing: $FlowFixMe, - ) => { - const elements = ( - - - - - - ); - if (existing) { - existing.update(elements); - return existing; - } else { - return TestRenderer.create(elements); - } - }; + renderSingularFragment = ( + props?: { + owner?: $FlowFixMe, + userRef?: $FlowFixMe, + ... + }, + existing: $FlowFixMe, + ) => { + const elements = ( + + + + + + ); + if (existing) { + existing.update(elements); + return existing; + } else { + return TestRenderer.create(elements); + } + }; - renderPluralFragment = ( - props?: { - owner?: $FlowFixMe, - userRef?: $FlowFixMe, - ... - }, - existing: $FlowFixMe, - ) => { - const elements = ( - - - - - - ); - if (existing) { - existing.update(elements); - return existing; - } else { - return TestRenderer.create(elements); - } - }; - }); + renderPluralFragment = ( + props?: { + owner?: $FlowFixMe, + userRef?: $FlowFixMe, + ... + }, + existing: $FlowFixMe, + ) => { + const elements = ( + + + + + + ); + if (existing) { + existing.update(elements); + return existing; + } else { + return TestRenderer.create(elements); + } + }; +}); - afterEach(() => { - environment.mockClear(); - renderSpy.mockClear(); - }); +afterEach(() => { + environment.mockClear(); + renderSpy.mockClear(); +}); - it('should render singular fragment without error when data is available', () => { - renderSingularFragment(); - assertFragmentResults({ - id: '1', - name: 'Alice', - ...createFragmentRef('1', singularQuery), - }); +it('should render singular fragment without error when data is available', () => { + renderSingularFragment(); + assertFragmentResults({ + id: '1', + name: 'Alice', + ...createFragmentRef('1', singularQuery), }); +}); - it('should return the same data object if rendered multiple times: singular fragment', () => { - const container = renderSingularFragment(); - expect(renderSpy).toBeCalledTimes(1); - const actualData = renderSpy.mock.calls[0][0]; - renderSingularFragment({}, container); - expect(renderSpy).toBeCalledTimes(2); - const actualData2 = renderSpy.mock.calls[1][0]; - expect(actualData).toBe(actualData2); - }); +it('should return the same data object if rendered multiple times: singular fragment', () => { + const container = renderSingularFragment(); + expect(renderSpy).toBeCalledTimes(1); + const actualData = renderSpy.mock.calls[0][0]; + renderSingularFragment({}, container); + expect(renderSpy).toBeCalledTimes(2); + const actualData2 = renderSpy.mock.calls[1][0]; + expect(actualData).toBe(actualData2); +}); - it('should render plural fragment without error when data is available', () => { - renderPluralFragment(); - assertFragmentResults([ - { - id: '1', - name: 'Alice', - ...createFragmentRef('1', pluralQuery), - }, - { - id: '2', - name: 'Bob', - ...createFragmentRef('2', pluralQuery), - }, - ]); - }); +it('should render plural fragment without error when data is available', () => { + renderPluralFragment(); + assertFragmentResults([ + { + id: '1', + name: 'Alice', + ...createFragmentRef('1', pluralQuery), + }, + { + id: '2', + name: 'Bob', + ...createFragmentRef('2', pluralQuery), + }, + ]); +}); - it('should return the same data object if rendered multiple times: plural fragment', () => { - const container = renderPluralFragment(); - expect(renderSpy).toBeCalledTimes(1); - const actualData = renderSpy.mock.calls[0][0]; - renderPluralFragment({}, container); - expect(renderSpy).toBeCalledTimes(2); - const actualData2 = renderSpy.mock.calls[1][0]; - expect(actualData).toBe(actualData2); - }); +it('should return the same data object if rendered multiple times: plural fragment', () => { + const container = renderPluralFragment(); + expect(renderSpy).toBeCalledTimes(1); + const actualData = renderSpy.mock.calls[0][0]; + renderPluralFragment({}, container); + expect(renderSpy).toBeCalledTimes(2); + const actualData2 = renderSpy.mock.calls[1][0]; + expect(actualData).toBe(actualData2); +}); - it('Returns [] when the fragment ref is [] (for plural fragments)', () => { - const container = renderPluralFragment({usersRef: []}); - assertFragmentResults([]); - container.unmount(); - }); +it('Returns [] when the fragment ref is [] (for plural fragments)', () => { + const container = renderPluralFragment({usersRef: []}); + assertFragmentResults([]); + container.unmount(); +}); - it('Returns null when the fragment ref is null (for plural fragments)', () => { - const container = renderPluralFragment({usersRef: null}); - assertFragmentResults(null); - container.unmount(); - }); +it('Returns null when the fragment ref is null (for plural fragments)', () => { + const container = renderPluralFragment({usersRef: null}); + assertFragmentResults(null); + container.unmount(); }); diff --git a/packages/react-relay/relay-hooks/__tests__/useFragment-with-required-test.js b/packages/react-relay/relay-hooks/__tests__/useFragment-with-required-test.js new file mode 100644 index 0000000000000..ab3b216224ad6 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/useFragment-with-required-test.js @@ -0,0 +1,117 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow + * @format + * @oncall relay + */ + +'use strict'; + +import type {MutableRecordSource} from 'relay-runtime/store/RelayStoreTypes'; +import type {RelayFieldLoggerEvent} from 'relay-runtime/store/RelayStoreTypes'; + +const RelayEnvironmentProvider = require('../RelayEnvironmentProvider'); +const useFragment = require('../useFragment'); +const useLazyLoadQuery = require('../useLazyLoadQuery'); +const React = require('react'); +const TestRenderer = require('react-test-renderer'); +const {graphql} = require('relay-runtime'); +const RelayNetwork = require('relay-runtime/network/RelayNetwork'); +const LiveResolverStore = require('relay-runtime/store/experimental-live-resolvers/LiveResolverStore'); +const RelayModernEnvironment = require('relay-runtime/store/RelayModernEnvironment'); +const RelayRecordSource = require('relay-runtime/store/RelayRecordSource'); +const { + disallowConsoleErrors, + disallowWarnings, +} = require('relay-test-utils-internal'); + +disallowWarnings(); +disallowConsoleErrors(); + +test('@required(action: LOG) gets logged even if no data is "missing"', () => { + function InnerTestComponent({id}: {id: string}) { + const data = useLazyLoadQuery( + graphql` + query useFragmentWithRequiredTestQuery($id: ID!) { + node(id: $id) { + ... on User { + ...useFragmentWithRequiredTestUserFragment + } + } + } + `, + {id}, + {fetchPolicy: 'store-only'}, + ); + const user = useFragment( + graphql` + fragment useFragmentWithRequiredTestUserFragment on User { + name @required(action: LOG) + } + `, + data.node, + ); + return `${user?.name ?? 'Unknown name'}`; + } + + function TestComponent({ + environment, + ...rest + }: { + environment: RelayModernEnvironment, + id: string, + }) { + return ( + + + + + + ); + } + const relayFieldLogger = jest.fn< + $FlowFixMe | [RelayFieldLoggerEvent], + void, + >(); + function createEnvironment(source: MutableRecordSource) { + return new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store: new LiveResolverStore(source), + relayFieldLogger, + }); + } + + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + 'node(id:"1")': {__ref: '1'}, + }, + '1': { + __id: '1', + __typename: 'User', + name: null, + }, + }); + const environment = createEnvironment(source); + + const renderer = TestRenderer.create( + , + ); + + // Validate that the missing required field was logged. + expect(relayFieldLogger.mock.calls).toEqual([ + [ + { + fieldPath: 'name', + kind: 'missing_field.log', + owner: 'useFragmentWithRequiredTestUserFragment', + }, + ], + ]); + expect(renderer.toJSON()).toEqual('Unknown name'); +}); diff --git a/packages/react-relay/relay-hooks/__tests__/useFragmentNode-react-double-effects-test.js b/packages/react-relay/relay-hooks/__tests__/useFragmentNode-react-double-effects-test.js index ee6fb2ce6c26f..281cb74d6cb58 100644 --- a/packages/react-relay/relay-hooks/__tests__/useFragmentNode-react-double-effects-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useFragmentNode-react-double-effects-test.js @@ -11,8 +11,8 @@ 'use strict'; +const useFragmentNode = require('../legacy/useFragmentNode'); const RelayEnvironmentProvider = require('../RelayEnvironmentProvider'); -const useFragmentNode = require('../useFragmentNode'); const React = require('react'); const {useEffect} = require('react'); const ReactTestRenderer = require('react-test-renderer'); @@ -30,7 +30,7 @@ let renderSpy; // TODO(T83890478): enable once double invoked effects lands in xplat describe.skip('useFragmentNode-react-double-effects-test', () => { beforeEach(() => { - jest.mock('scheduler', () => require('scheduler/unstable_mock')); + jest.mock('scheduler', () => require('../../__tests__/mockScheduler')); jest.mock('warning'); jest.spyOn(console, 'warn').mockImplementationOnce(() => {}); renderSpy = jest.fn<$ReadOnlyArray, mixed>(); diff --git a/packages/react-relay/relay-hooks/__tests__/useFragmentNode-required-test.js b/packages/react-relay/relay-hooks/__tests__/useFragmentNode-required-test.js index b3cf7a4caf31d..969ffab8ad921 100644 --- a/packages/react-relay/relay-hooks/__tests__/useFragmentNode-required-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useFragmentNode-required-test.js @@ -13,7 +13,7 @@ import type {ReaderFragment} from '../../../relay-runtime/util/ReaderNode'; import type {RequestDescriptor} from 'relay-runtime/store/RelayStoreTypes'; -const useFragmentNodeOriginal = require('../useFragmentNode'); +const useFragmentNodeOriginal = require('../legacy/useFragmentNode'); const React = require('react'); const ReactRelayContext = require('react-relay/ReactRelayContext'); const TestRenderer = require('react-test-renderer'); @@ -32,7 +32,7 @@ let singularQuery; let renderSingularFragment; let renderSpy; -function useFragmentNode( +hook useFragmentNode( fragmentNode: ReaderFragment, fragmentRef: $TEMPORARY$object<{ __fragmentOwner: RequestDescriptor, @@ -40,7 +40,6 @@ function useFragmentNode( useFragmentNodeRequiredTestUserFragment: $TEMPORARY$object<{...}>, }>, __id: any, - __isWithinUnmatchedTypeRefinement: boolean, }>, ) { const result = useFragmentNodeOriginal( @@ -108,7 +107,6 @@ beforeEach(() => { useFragmentNodeRequiredTestUserFragment: {}, }, [FRAGMENT_OWNER_KEY]: singularQuery.request, - __isWithinUnmatchedTypeRefinement: false, }; useFragmentNode(gqlSingularFragment, userRef); diff --git a/packages/react-relay/relay-hooks/__tests__/useFragmentNode-test.js b/packages/react-relay/relay-hooks/__tests__/useFragmentNode-test.js index 0cedebbc601f5..db599b3dba20c 100644 --- a/packages/react-relay/relay-hooks/__tests__/useFragmentNode-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useFragmentNode-test.js @@ -24,8 +24,8 @@ import type {OperationDescriptor} from 'relay-runtime'; import type {Fragment} from 'relay-runtime/util/RelayRuntimeTypes'; const {act: internalAct} = require('../../jest-react'); -const useFragmentInternal_REACT_CACHE = require('../react-cache/useFragmentInternal_REACT_CACHE'); -const useFragmentNode_LEGACY = require('../useFragmentNode'); +const useFragmentNode_LEGACY = require('../legacy/useFragmentNode'); +const useFragmentInternal = require('../useFragmentInternal'); const React = require('react'); const ReactRelayContext = require('react-relay/ReactRelayContext'); const TestRenderer = require('react-test-renderer'); @@ -34,7 +34,6 @@ const { FRAGMENT_OWNER_KEY, FRAGMENTS_KEY, ID_KEY, - RelayFeatureFlags, createOperationDescriptor, graphql, } = require('relay-runtime'); @@ -50,7 +49,7 @@ disallowWarnings(); const {useEffect, useMemo, useState} = React; function assertYieldsWereCleared(_scheduler: any) { - const actualYields = _scheduler.unstable_clearYields(); + const actualYields = _scheduler.unstable_clearLog(); if (actualYields.length !== 0) { throw new Error( 'Log of yielded values is not empty. ' + @@ -61,7 +60,7 @@ function assertYieldsWereCleared(_scheduler: any) { function expectSchedulerToHaveYielded(expectedYields: any) { const Scheduler = require('scheduler'); - const actualYields = Scheduler.unstable_clearYields(); + const actualYields = Scheduler.unstable_clearLog(); expect(actualYields).toEqual(expectedYields); } @@ -69,7 +68,7 @@ function flushScheduler() { const Scheduler = require('scheduler'); assertYieldsWereCleared(Scheduler); Scheduler.unstable_flushAllWithoutAsserting(); - return Scheduler.unstable_clearYields(); + return Scheduler.unstable_clearLog(); } function expectSchedulerToFlushAndYield(expectedYields: any) { @@ -81,19 +80,19 @@ function expectSchedulerToFlushAndYieldThrough(expectedYields: any) { const Scheduler = require('scheduler'); assertYieldsWereCleared(Scheduler); Scheduler.unstable_flushNumberOfYields(expectedYields.length); - const actualYields = Scheduler.unstable_clearYields(); + const actualYields = Scheduler.unstable_clearLog(); expect(actualYields).toEqual(expectedYields); } // The current tests are against useFragmentNode which as a different Flow signature // than the external API useFragment. I want to keep the more accurate types -// for useFragmentInternal_REACT_CACHE, though, so this wrapper adapts it. +// for useFragmentInternal, though, so this wrapper adapts it. type ReturnType = { data: TFragmentData, disableStoreUpdates: () => void, enableStoreUpdates: () => void, }; -function useFragmentNode_REACT_CACHE( +hook useFragmentNode_NEW( fragment: | Fragment< useFragmentNodeTestUserFragment$fragmentType, @@ -106,7 +105,7 @@ function useFragmentNode_REACT_CACHE( key: any, displayName: string, ): ReturnType { - const data = useFragmentInternal_REACT_CACHE(fragment, key, displayName); + const data = useFragmentInternal(fragment, key, displayName); return { // $FlowFixMe[incompatible-return] data, @@ -116,23 +115,13 @@ function useFragmentNode_REACT_CACHE( } describe.each([ - ['React Cache', useFragmentNode_REACT_CACHE], + ['New', useFragmentNode_NEW], ['Legacy', useFragmentNode_LEGACY], ])( 'useFragmentNode / useFragment (%s)', (_hookName, useFragmentNodeOriginal) => { - let isUsingReactCacheImplementation; - let originalReactCacheFeatureFlag; - beforeEach(() => { - isUsingReactCacheImplementation = - useFragmentNodeOriginal === useFragmentNode_REACT_CACHE; - originalReactCacheFeatureFlag = RelayFeatureFlags.USE_REACT_CACHE; - RelayFeatureFlags.USE_REACT_CACHE = isUsingReactCacheImplementation; - }); - afterEach(() => { - RelayFeatureFlags.USE_REACT_CACHE = originalReactCacheFeatureFlag; - }); - + const isUsingNewImplementation = + useFragmentNodeOriginal === useFragmentNode_NEW; let environment; let disableStoreUpdates; let enableStoreUpdates; @@ -232,14 +221,11 @@ describe.each([ useFragmentNodeTestNestedUserFragment: {}, }, [FRAGMENT_OWNER_KEY]: owner.request, - __isWithinUnmatchedTypeRefinement: false, }; } beforeEach(() => { - jest.mock('scheduler', () => { - return jest.requireActual('scheduler/unstable_mock'); - }); + jest.mock('scheduler', () => require('../../__tests__/mockScheduler')); commitSpy = jest.fn(); renderSpy = jest.fn<[any], mixed>(); @@ -357,7 +343,6 @@ describe.each([ useFragmentNodeTestUserFragment: {}, }, [FRAGMENT_OWNER_KEY]: owner.request, - __isWithinUnmatchedTypeRefinement: false, }; setSingularOwner = _setOwner; @@ -382,7 +367,6 @@ describe.each([ useFragmentNodeTestUsersFragment: {}, }, [FRAGMENT_OWNER_KEY]: owner.request, - __isWithinUnmatchedTypeRefinement: false, })); const [usersData] = useFragmentNode(gqlPluralFragment, usersRef); @@ -677,6 +661,33 @@ describe.each([ ]); }); + it('should supsend when the environment changes and there is query in flight', () => { + const renderer = renderSingularFragment(); + assertFragmentResults([ + { + data: { + id: '1', + name: 'Alice', + profile_picture: null, + ...createFragmentRef('1', singularQuery), + }, + }, + ]); + + const newEnvironment = createMockEnvironment(); + + internalAct(() => { + // Let there be an operation in flight + fetchQuery(newEnvironment, singularQuery).subscribe({}); + + setEnvironment(newEnvironment); + }); + + // It should suspend when the environment changes and there is a query + // in flight. + expect(renderer.toJSON()).toEqual('Singular Fallback'); + }); + it('should re-read and resubscribe to fragment when fragment pointers change', () => { renderSingularFragment(); assertRenderBatch([ @@ -720,7 +731,7 @@ describe.each([ // Assert that ref now points to newQuery owner ...createFragmentRef('200', newQuery), }; - if (isUsingReactCacheImplementation) { + if (isUsingNewImplementation) { // React Cache renders twice (because it has to update state for derived data), // but avoids rendering with stale data on the initial update assertRenderBatch([{data: expectedUser}, {data: expectedUser}]); @@ -852,11 +863,21 @@ describe.each([ }); it('should ignore updates to initially rendered data when fragment pointers change', () => { + // Requires the `allowConcurrentByDefault` feature flag. Only run if + // we detect support for `unstable_concurrentUpdatesByDefault`. + if ( + !TestRenderer.create + .toString() + .includes('unstable_concurrentUpdatesByDefault') + ) { + return; + } + const Scheduler = require('scheduler'); const YieldChild = (props: any) => { // NOTE the unstable_yield method will move to the static renderer. // When React sync runs we need to update this. - Scheduler.unstable_yieldValue(props.children); + Scheduler.log(props.children); return props.children; }; const YieldyUserComponent = ({user}: any) => ( @@ -935,7 +956,7 @@ describe.each([ ...createFragmentRef('200', newQuery), }, }; - if (isUsingReactCacheImplementation) { + if (isUsingNewImplementation) { // The new implementation simply finishes the render in progress. expectSchedulerToFlushAndYield([['with id ', '200', '!']]); assertFragmentResults([expectedData]); @@ -980,6 +1001,195 @@ describe.each([ }); }); + it('should ignore updates to initially rendered data when fragment pointers change, but still handle updates to the new data', () => { + // Requires the `allowConcurrentByDefault` feature flag. Only run if + // we detect support for `unstable_concurrentUpdatesByDefault`. + if ( + !TestRenderer.create + .toString() + .includes('unstable_concurrentUpdatesByDefault') + ) { + return; + } + + const Scheduler = require('scheduler'); + const YieldChild = (props: any) => { + // NOTE the unstable_yield method will move to the static renderer. + // When React sync runs we need to update this. + Scheduler.log(props.children); + return props.children; + }; + const YieldyUserComponent = ({user}: any) => ( + <> + Hey user, + {user.name} + with id {user.id}! + + ); + + // Assert initial render + // $FlowFixMe[incompatible-type] + SingularRenderer = YieldyUserComponent; + internalAct(() => { + renderSingularFragment({isConcurrent: true}); + }); + expectSchedulerToHaveYielded([ + 'Hey user,', + 'Alice', + ['with id ', '1', '!'], + ]); + assertFragmentResults([ + { + data: { + id: '1', + name: 'Alice', + profile_picture: null, + ...createFragmentRef('1', singularQuery), + }, + }, + ]); + + const newVariables = {...singularVariables, id: '200'}; + const newQuery = createOperationDescriptor( + gqlSingularQuery, + newVariables, + ); + internalAct(() => { + environment.commitPayload(newQuery, { + node: { + __typename: 'User', + id: '200', + name: 'Foo', + username: 'userfoo', + profile_picture: null, + }, + }); + }); + + internalAct(() => { + // Pass new fragment ref that points to new ID 200 + setSingularOwner(newQuery); + + // Flush some of the changes, but don't commit + expectSchedulerToFlushAndYieldThrough(['Hey user,', 'Foo']); + + // Trigger an update for initially rendered data and for the new data + // while second render is in progress + environment.commitUpdate(store => { + store.get('1')?.setValue('Alice in Wonderland', 'name'); + store.get('200')?.setValue('Foo Bar', 'name'); + }); + + // Assert the component renders the data from newQuery/newVariables, + // ignoring any updates triggered while render was in progress. + const expectedData = { + data: { + id: '200', + name: 'Foo', + profile_picture: null, + ...createFragmentRef('200', newQuery), + }, + }; + expectSchedulerToFlushAndYield([ + ['with id ', '200', '!'], + 'Hey user,', + 'Foo Bar', + ['with id ', '200', '!'], + ]); + assertFragmentResults([ + expectedData, + { + data: { + id: '200', + name: 'Foo Bar', + profile_picture: null, + ...createFragmentRef('200', newQuery), + }, + }, + ]); + + // Update latest rendered data + environment.commitPayload(newQuery, { + node: { + __typename: 'User', + id: '200', + // Update name + name: 'Foo Updated', + username: 'userfoo', + profile_picture: null, + }, + }); + expectSchedulerToFlushAndYield([ + 'Hey user,', + 'Foo Updated', + ['with id ', '200', '!'], + ]); + assertFragmentResults([ + { + data: { + id: '200', + // Assert name is updated + name: 'Foo Updated', + profile_picture: null, + ...createFragmentRef('200', newQuery), + }, + }, + ]); + }); + }); + + it('should return the latest data when the hi-priority update happens at the same time as the low-priority store update', () => { + const startTransition = React.startTransition; + if (startTransition != null) { + internalAct(() => { + renderSingularFragment({ + isConcurrent: true, + }); + }); + assertFragmentResults([ + { + data: { + id: '1', + name: 'Alice', + profile_picture: null, + ...createFragmentRef('1', singularQuery), + }, + }, + ]); + + internalAct(() => { + // Trigger store update with the lower priority + startTransition(() => { + environment.commitUpdate(store => { + store.get('1')?.setValue('Alice Updated Name', 'name'); + }); + }); + // Trigger a hi-pri update with the higher priority, that should force component to re-render + forceSingularUpdate(); + }); + + // Assert that the component re-renders twice, both times with the latest data + assertFragmentResults([ + { + data: { + id: '1', + name: 'Alice Updated Name', + profile_picture: null, + ...createFragmentRef('1', singularQuery), + }, + }, + { + data: { + id: '1', + name: 'Alice Updated Name', + profile_picture: null, + ...createFragmentRef('1', singularQuery), + }, + }, + ]); + } + }); + it('should re-read and resubscribe to fragment when variables change', () => { renderSingularFragment(); assertFragmentResults([ @@ -1056,9 +1266,19 @@ describe.each([ }); it('should ignore updates to initially rendered data when variables change', () => { + // Requires the `allowConcurrentByDefault` feature flag. Only run if + // we detect support for `unstable_concurrentUpdatesByDefault`. + if ( + !TestRenderer.create + .toString() + .includes('unstable_concurrentUpdatesByDefault') + ) { + return; + } + const Scheduler = require('scheduler'); const YieldChild = (props: any) => { - Scheduler.unstable_yieldValue(props.children); + Scheduler.log(props.children); return props.children; }; const YieldyUserComponent = ({user}: any) => ( @@ -1145,7 +1365,7 @@ describe.each([ ...createFragmentRef('1', newQuery), }, }; - if (isUsingReactCacheImplementation) { + if (isUsingNewImplementation) { // The new implementation simply finishes the render in progress. expectSchedulerToFlushAndYield([['with id ', '1', '!']]); assertFragmentResults([expectedData]); @@ -1385,9 +1605,19 @@ describe.each([ }); it('upon commit, it should pick up changes in data that happened before comitting', () => { + // Requires the `allowConcurrentByDefault` feature flag. Only run if + // we detect support for `unstable_concurrentUpdatesByDefault`. + if ( + !TestRenderer.create + .toString() + .includes('unstable_concurrentUpdatesByDefault') + ) { + return; + } + const Scheduler = require('scheduler'); const YieldChild = (props: any) => { - Scheduler.unstable_yieldValue(props.children); + Scheduler.log(props.children); return props.children; }; const YieldyUserComponent = ({user}: any) => { diff --git a/packages/react-relay/relay-hooks/__tests__/useFragment_nullability-test.js b/packages/react-relay/relay-hooks/__tests__/useFragment_nullability-test.js new file mode 100644 index 0000000000000..f390450188086 --- /dev/null +++ b/packages/react-relay/relay-hooks/__tests__/useFragment_nullability-test.js @@ -0,0 +1,159 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow + * @format + * @oncall relay + */ + +'use strict'; + +import type {useFragmentNullabilityTestFragmentWithFieldThatThrows$key} from './__generated__/useFragmentNullabilityTestFragmentWithFieldThatThrows.graphql'; + +const RelayEnvironmentProvider = require('../RelayEnvironmentProvider'); +const useClientQuery = require('../useClientQuery'); +const React = require('react'); +const TestRenderer = require('react-test-renderer'); +const {graphql} = require('relay-runtime'); +const {RelayFeatureFlags} = require('relay-runtime'); +const {readFragment} = require('relay-runtime/store/ResolverFragments'); +const {createMockEnvironment} = require('relay-test-utils'); + +/* + * @RelayResolver Query.field_that_throws: Int @semanticNonNull + */ +export function field_that_throws(): number { + throw new Error('There was an error!'); +} + +/* + * @RelayResolver Query.field_with_fragment_that_throws: Int @semanticNonNull + * @rootFragment useFragmentNullabilityTestFragmentWithFieldThatThrows + */ +export function field_with_fragment_that_throws( + rootKey: useFragmentNullabilityTestFragmentWithFieldThatThrows$key, +): number { + const {field_that_throws} = readFragment( + graphql` + fragment useFragmentNullabilityTestFragmentWithFieldThatThrows on Query + @throwOnFieldError { + field_that_throws + } + `, + rootKey, + ); + return field_that_throws; +} + +describe('useFragment_nullability-test.js', () => { + beforeEach(() => { + RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; + jest.spyOn(console, 'error').mockImplementation(() => {}); + }); + + afterEach(() => { + RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; + }); + + it('should throw when a resolver in throwOnFieldError-fragment throws', async () => { + const environment = createMockEnvironment(); + + const TestComponent = () => { + const data = useClientQuery( + graphql` + query useFragmentNullabilityTest1Query @throwOnFieldError { + field_that_throws + } + `, + {}, + ); + return
{data.field_that_throws}
; + }; + const renderer = TestRenderer.create( + + `Error: ${error}`}> + + + , + , + ); + await TestRenderer.act(() => jest.runAllTimers()); + expect( + String(renderer.toJSON()).includes('Unexpected resolver exception'), + ).toEqual(true); + }); + + it('should throw when a resolver in throwOnFieldError-fragment has a throwing throwOnFieldError-fragment', async () => { + const environment = createMockEnvironment(); + + const TestComponent = () => { + const data = useClientQuery( + graphql` + query useFragmentNullabilityTest2Query @throwOnFieldError { + field_with_fragment_that_throws + } + `, + {}, + ); + return
{data.field_with_fragment_that_throws}
; + }; + const renderer = TestRenderer.create( + + `Error: ${error}`}> + + + , + , + ); + await TestRenderer.act(() => jest.runAllTimers()); + expect( + String(renderer.toJSON()).includes('Unexpected resolver exception'), + ).toEqual(true); + }); + + it('should not throw when a resolver in non-throwing-fragment has a throwing throwOnFieldError-fragment', async () => { + const environment = createMockEnvironment(); + + const TestComponent = () => { + const data = useClientQuery( + graphql` + query useFragmentNullabilityTest3Query { + field_with_fragment_that_throws + } + `, + {}, + ); + return
{data.field_with_fragment_that_throws}
; + }; + const renderer = TestRenderer.create( + + `Error: ${error}`}> + + + , + , + ); + await TestRenderer.act(() => jest.runAllTimers()); + expect( + String(renderer.toJSON()).includes('Unexpected resolver exception'), + ).toEqual(false); + }); +}); + +class ErrorBoundary extends React.Component { + state: any | {error: null} = {error: null}; + componentDidCatch(error: Error) { + this.setState({error}); + } + render(): React.Node { + const {children, fallback: Fallback} = this.props; + const {error} = this.state; + if (error) { + return ; + } + return children; + } +} diff --git a/packages/react-relay/relay-hooks/__tests__/useIsParentQueryActive-test.js b/packages/react-relay/relay-hooks/__tests__/useIsParentQueryActive-test.js index 9d46d0340b891..8fe35fc0c1b9c 100644 --- a/packages/react-relay/relay-hooks/__tests__/useIsParentQueryActive-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useIsParentQueryActive-test.js @@ -431,7 +431,7 @@ it('should only update if the latest owner completes the query', () => { }, }); const snapshot = environment.lookup(newOperation.fragment); - const newFragmentRef = (snapshot.data?.node: $FlowFixMe); + const newFragmentRef: $FlowFixMe = snapshot.data?.node; expect(mockFn.mock.calls[0]).toEqual([true]); TestRenderer.act(() => { diff --git a/packages/react-relay/relay-hooks/__tests__/useLazyLoadQueryNode-fast-refresh-test.js b/packages/react-relay/relay-hooks/__tests__/useLazyLoadQueryNode-fast-refresh-test.js index e72a4a5de3e52..69258461f4409 100644 --- a/packages/react-relay/relay-hooks/__tests__/useLazyLoadQueryNode-fast-refresh-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useLazyLoadQueryNode-fast-refresh-test.js @@ -10,6 +10,7 @@ */ 'use strict'; + import type {RelayMockEnvironment} from '../../../relay-test-utils/RelayModernMockEnvironment'; import type { OperationDescriptor, diff --git a/packages/react-relay/relay-hooks/__tests__/useLazyLoadQueryNode-test.js b/packages/react-relay/relay-hooks/__tests__/useLazyLoadQueryNode-test.js index 6b8a940d51c2b..1fe1a06d52e61 100644 --- a/packages/react-relay/relay-hooks/__tests__/useLazyLoadQueryNode-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useLazyLoadQueryNode-test.js @@ -10,6 +10,7 @@ */ 'use strict'; + import type {LogEvent} from '../../../relay-runtime/store/RelayStoreTypes'; import type {RelayMockEnvironment} from '../../../relay-test-utils/RelayModernMockEnvironment'; import type { @@ -28,7 +29,7 @@ import type { import type {Query} from 'relay-runtime/util/RelayRuntimeTypes'; const RelayEnvironmentProvider = require('../RelayEnvironmentProvider'); -const useFragmentNode = require('../useFragmentNode'); +const useFragment = require('../useFragment'); const useLazyLoadQueryNode = require('../useLazyLoadQueryNode'); const React = require('react'); const ReactTestRenderer = require('react-test-renderer'); @@ -46,7 +47,7 @@ const { disallowWarnings, expectToWarn, expectWarningWillFire, -} = require('relay-test-utils-internal'); +} = (jest.requireActual('relay-test-utils-internal'): $FlowFixMe); const defaultFetchPolicy = 'network-only'; @@ -63,9 +64,6 @@ function expectToBeRendered( renderFn.mockClear(); } -disallowWarnings(); -disallowConsoleErrors(); - function expectToHaveFetched( environment: RelayMockEnvironment, query: OperationDescriptor, @@ -92,162 +90,140 @@ type Props = { extraData?: number, }; -describe('useLazyLoadQueryNode', () => { - let environment; - let gqlQuery: - | Query< - useLazyLoadQueryNodeTest1Query$variables, - useLazyLoadQueryNodeTest1Query$data, - > - | Query< - useLazyLoadQueryNodeTestUserQuery$variables, - useLazyLoadQueryNodeTestUserQuery$data, - >; - let renderFn; - let render; - let release; - let query; - let variables; - let Container; - let setProps; - let setKey; - let logs: Array; - let errorBoundaryDidCatchFn; - - beforeEach(() => { - errorBoundaryDidCatchFn = jest.fn<[Error], mixed>(); - - class ErrorBoundary extends React.Component { - state: any | {error: null} = {error: null}; - componentDidCatch(error: Error) { - errorBoundaryDidCatchFn(error); - this.setState({error}); - } - render(): any | React.Node { - const {children, fallback} = this.props; - const {error} = this.state; - if (error) { - return React.createElement(fallback, {error}); - } - return children; - } - } +let environment; +let gqlQuery: + | Query< + useLazyLoadQueryNodeTest1Query$variables, + useLazyLoadQueryNodeTest1Query$data, + > + | Query< + useLazyLoadQueryNodeTestUserQuery$variables, + useLazyLoadQueryNodeTestUserQuery$data, + >; +let renderFn; +let render; +let release; +let query; +let variables; +let Container; +let setProps; +let setKey; +let logs: Array; +let errorBoundaryDidCatchFn; +let useFragmentImpl: typeof useFragment; - const Renderer = (props: Props) => { - const _query = createOperationDescriptor(gqlQuery, props.variables); - const data = useLazyLoadQueryNode({ - query: _query, - fetchObservable: __internal.fetchQuery(environment, _query), - fetchPolicy: props.fetchPolicy || defaultFetchPolicy, - componentDisplayName: 'TestDisplayName', - }); - return renderFn(data); - }; +disallowWarnings(); +disallowConsoleErrors(); - Container = (props: Props, key?: number) => { - const [nextProps, setNextProps] = React.useState(props); - const [nextKey, setNextKey] = React.useState(key); - setProps = setNextProps; - setKey = setNextKey; - return ; - }; +beforeEach(() => { + jest.resetModules(); - render = (env: RelayMockEnvironment, children: React.Node) => { - return ReactTestRenderer.create( - - - `Error: ${error.message + ': ' + error.stack}` - }> - {children} - - , - ); - }; + useFragmentImpl = useFragment; - logs = []; - environment = createMockEnvironment({ - log: event => { - logs.push(event); - }, - store: new Store(new RecordSource(), {gcReleaseBufferSize: 0}), - }); - release = jest.fn<[mixed], mixed>(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - const originalRetain = environment.retain.bind(environment); - // $FlowFixMe[cannot-write] - // $FlowFixMe[missing-local-annot] error found when enabling Flow LTI mode - environment.retain = jest.fn((...args) => { - const originalDisposable = originalRetain(...args); - return { - dispose: () => { - release(args[0].variables); - originalDisposable.dispose(); - }, - }; - }); + errorBoundaryDidCatchFn = jest.fn<[Error], mixed>(); - gqlQuery = graphql` - query useLazyLoadQueryNodeTestUserQuery($id: ID) { - node(id: $id) { - id - name - ...useLazyLoadQueryNodeTestUserFragment - } - } - `; - graphql` - fragment useLazyLoadQueryNodeTestUserFragment on User { - name + class ErrorBoundary extends React.Component { + state: any | {error: null} = {error: null}; + componentDidCatch(error: Error) { + errorBoundaryDidCatchFn(error); + this.setState({error}); + } + render(): any | React.Node { + const {children, fallback: Fallback} = this.props; + const {error} = this.state; + if (error) { + return ; } - `; + return children; + } + } + + const Renderer = (props: Props) => { + const _query = createOperationDescriptor(gqlQuery, props.variables); + const data = useLazyLoadQueryNode({ + query: _query, + fetchObservable: __internal.fetchQuery(environment, _query), + fetchPolicy: props.fetchPolicy || defaultFetchPolicy, + componentDisplayName: 'TestDisplayName', + }); + return renderFn(data); + }; + + Container = function TestContainer(props: Props, key?: number) { + // $FlowFixMe[react-rule-hook] + const [nextProps, setNextProps] = React.useState(props); + // $FlowFixMe[react-rule-hook] + const [nextKey, setNextKey] = React.useState(key); + setProps = setNextProps; + setKey = setNextKey; + return ; + }; + + render = (env: RelayMockEnvironment, children: React.Node) => { + return ReactTestRenderer.create( + + + `Error: ${error.message + ': ' + error.stack}` + }> + {children} + + , + ); + }; - variables = {id: '1'}; - query = createOperationDescriptor(gqlQuery, variables); - renderFn = jest.fn((result: any) => result?.node?.name ?? 'Empty'); + logs = []; + environment = createMockEnvironment({ + log: event => { + logs.push(event); + }, + store: new Store(new RecordSource(), {gcReleaseBufferSize: 0}), }); - - afterEach(() => { - environment.mockClear(); - jest.clearAllTimers(); + release = jest.fn<[mixed], mixed>(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + const originalRetain = environment.retain.bind(environment); + // $FlowFixMe[cannot-write] + // $FlowFixMe[missing-local-annot] error found when enabling Flow LTI mode + environment.retain = jest.fn((...args) => { + const originalDisposable = originalRetain(...args); + return { + dispose: () => { + release(args[0].variables); + originalDisposable.dispose(); + }, + }; }); - it('fetches and renders the query data', () => { - const instance = render(environment, ); - - expect(instance.toJSON()).toEqual('Fallback'); - expectToHaveFetched(environment, query); - expect(renderFn).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(1); - - ReactTestRenderer.act(() => { - environment.mock.resolve(gqlQuery, { - data: { - node: { - __typename: 'User', - id: variables.id, - name: 'Alice', - }, - }, - }); - jest.runAllImmediates(); - }); + gqlQuery = graphql` + query useLazyLoadQueryNodeTestUserQuery($id: ID) { + node(id: $id) { + id + name + ...useLazyLoadQueryNodeTestUserFragment + } + } + `; + graphql` + fragment useLazyLoadQueryNodeTestUserFragment on User { + name + } + `; - const data = environment.lookup(query.fragment).data; - // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file - expectToBeRendered(renderFn, data); - }); + variables = {id: '1'}; + query = createOperationDescriptor(gqlQuery, variables); + renderFn = jest.fn((result: any) => result?.node?.name ?? 'Empty'); +}); - it('subscribes to query fragment results and preserves object identity', () => { - const instance = render(environment, ); +it('fetches and renders the query data', () => { + const instance = render(environment, ); - expect(instance.toJSON()).toEqual('Fallback'); - expectToHaveFetched(environment, query); - expect(renderFn).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(1); + expect(instance.toJSON()).toEqual('Fallback'); + expectToHaveFetched(environment, query); + expect(renderFn).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(1); + ReactTestRenderer.act(() => { environment.mock.resolve(gqlQuery, { data: { node: { @@ -257,408 +233,657 @@ describe('useLazyLoadQueryNode', () => { }, }, }); + jest.runAllImmediates(); + }); - ReactTestRenderer.act(() => { - jest.runAllImmediates(); - }); - expect(renderFn).toBeCalledTimes(1); - const prevData = renderFn.mock.calls[0][0]; - expect(prevData.node.name).toBe('Alice'); - renderFn.mockClear(); - ReactTestRenderer.act(() => { - jest.runAllImmediates(); - }); + const data = environment.lookup(query.fragment).data; + // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file + expectToBeRendered(renderFn, data); +}); - ReactTestRenderer.act(() => { - environment.commitUpdate(store => { - const alice = store.get('1'); - if (alice != null) { - alice.setValue('ALICE', 'name'); - } - }); - }); - expect(renderFn).toBeCalledTimes(1); - const nextData = renderFn.mock.calls[0][0]; - expect(nextData.node.name).toBe('ALICE'); - renderFn.mockClear(); +it('subscribes to query fragment results and preserves object identity', () => { + const instance = render(environment, ); + + expect(instance.toJSON()).toEqual('Fallback'); + expectToHaveFetched(environment, query); + expect(renderFn).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(1); - // object identity is preserved for unchanged data such as fragment references - expect(nextData.node.__fragments).toBe(prevData.node.__fragments); + environment.mock.resolve(gqlQuery, { + data: { + node: { + __typename: 'User', + id: variables.id, + name: 'Alice', + }, + }, }); - it('fetches and renders correctly even if fetched query data still has missing data', () => { - // This scenario might happen if for example we are making selections on - // abstract types which the concrete type doesn't implement + ReactTestRenderer.act(() => { + jest.runAllImmediates(); + }); + expect(renderFn).toBeCalledTimes(1); + const prevData = renderFn.mock.calls[0][0]; + expect(prevData.node.name).toBe('Alice'); + renderFn.mockClear(); + ReactTestRenderer.act(() => { + jest.runAllImmediates(); + }); - const instance = render(environment, ); + ReactTestRenderer.act(() => { + environment.commitUpdate(store => { + const alice = store.get('1'); + if (alice != null) { + alice.setValue('ALICE', 'name'); + } + }); + }); + expect(renderFn).toBeCalledTimes(1); + const nextData = renderFn.mock.calls[0][0]; + expect(nextData.node.name).toBe('ALICE'); + renderFn.mockClear(); - expect(instance.toJSON()).toEqual('Fallback'); - expectToHaveFetched(environment, query); - expect(renderFn).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(1); + // object identity is preserved for unchanged data such as fragment references + expect(nextData.node.__fragments).toBe(prevData.node.__fragments); +}); - expectWarningWillFire( - 'RelayResponseNormalizer: Payload did not contain a value for field `name: name`. Check that you are parsing with the same query that was used to fetch the payload.', - ); +it('fetches and renders correctly even if fetched query data still has missing data', () => { + // This scenario might happen if for example we are making selections on + // abstract types which the concrete type doesn't implement - ReactTestRenderer.act(() => { - environment.mock.resolve(gqlQuery, { - data: { - node: { - __typename: 'User', - id: variables.id, - // name is missing in response - }, - }, - }); - }); + const instance = render(environment, ); - const data = environment.lookup(query.fragment).data; - // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file - expectToBeRendered(renderFn, data); - }); + expect(instance.toJSON()).toEqual('Fallback'); + expectToHaveFetched(environment, query); + expect(renderFn).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(1); - it('fetches and renders correctly if component unmounts before it can commit', () => { - const payload = { + expectWarningWillFire( + 'RelayResponseNormalizer: Payload did not contain a value for field `name: name`. Check that you are parsing with the same query that was used to fetch the payload.', + ); + + ReactTestRenderer.act(() => { + environment.mock.resolve(gqlQuery, { data: { node: { __typename: 'User', id: variables.id, - name: 'Alice', + // name is missing in response }, }, - }; + }); + }); - let instance = render(environment, ); + const data = environment.lookup(query.fragment).data; + // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file + expectToBeRendered(renderFn, data); +}); - expect(instance.toJSON()).toEqual('Fallback'); - expectToHaveFetched(environment, query); - expect(renderFn).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(1); +it('fetches and renders correctly if component unmounts before it can commit', () => { + const payload = { + data: { + node: { + __typename: 'User', + id: variables.id, + name: 'Alice', + }, + }, + }; - ReactTestRenderer.act(() => { - environment.mock.resolve(gqlQuery, payload); - }); + let instance = render(environment, ); - // Unmount the component before it gets to permanently retain the data - instance.unmount(); - expect(renderFn).not.toBeCalled(); + expect(instance.toJSON()).toEqual('Fallback'); + expectToHaveFetched(environment, query); + expect(renderFn).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(1); - // Running all immediates makes sure all useEffects run and GC isn't - // Triggered by mistake - ReactTestRenderer.act(() => jest.runAllImmediates()); - // Trigger timeout and GC to clear all references - ReactTestRenderer.act(() => jest.runAllTimers()); - // Verify GC has run - expect(environment.getStore().getSource().toJSON()).toEqual({}); + ReactTestRenderer.act(() => { + environment.mock.resolve(gqlQuery, payload); + }); - renderFn.mockClear(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - environment.retain.mockClear(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - environment.execute.mockClear(); + // Unmount the component before it gets to permanently retain the data + instance.unmount(); + expect(renderFn).not.toBeCalled(); - instance = render(environment, ); + // Running all immediates makes sure all useEffects run and GC isn't + // Triggered by mistake + ReactTestRenderer.act(() => jest.runAllImmediates()); + // Trigger timeout and GC to clear all references + ReactTestRenderer.act(() => jest.runAllTimers()); + // Verify GC has run + expect(environment.getStore().getSource().toJSON()).toEqual({}); - expect(instance.toJSON()).toEqual('Fallback'); - expectToHaveFetched(environment, query); - expect(renderFn).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(1); + renderFn.mockClear(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + environment.retain.mockClear(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + environment.execute.mockClear(); - ReactTestRenderer.act(() => { - environment.mock.resolve(gqlQuery, payload); - }); + instance = render(environment, ); - const data = environment.lookup(query.fragment).data; - // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file - expectToBeRendered(renderFn, data); + expect(instance.toJSON()).toEqual('Fallback'); + expectToHaveFetched(environment, query); + expect(renderFn).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(1); + + ReactTestRenderer.act(() => { + environment.mock.resolve(gqlQuery, payload); }); - it('fetches and renders correctly when switching between queries', () => { - // Render the component - const initialQuery = createOperationDescriptor(gqlQuery, { + const data = environment.lookup(query.fragment).data; + // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file + expectToBeRendered(renderFn, data); +}); + +it('fetches and renders correctly when switching between queries', () => { + // Render the component + const initialQuery = createOperationDescriptor(gqlQuery, { + id: 'first-render', + }); + environment.commitPayload(initialQuery, { + node: { + __typename: 'User', id: 'first-render', - }); - environment.commitPayload(initialQuery, { - node: { - __typename: 'User', - id: 'first-render', - name: 'Bob', - }, - }); + name: 'Bob', + }, + }); - const instance = render( - environment, - , - ); - expect(instance.toJSON()).toEqual('Bob'); - renderFn.mockClear(); + const instance = render( + environment, + , + ); + expect(instance.toJSON()).toEqual('Bob'); + renderFn.mockClear(); - // Suspend on the first query - ReactTestRenderer.act(() => { - setProps({variables}); - }); + // Suspend on the first query + ReactTestRenderer.act(() => { + setProps({variables}); + }); - expect(instance.toJSON()).toEqual('Fallback'); - expectToHaveFetched(environment, query); - expect(renderFn).not.toBeCalled(); - renderFn.mockClear(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - environment.retain.mockClear(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - environment.execute.mockClear(); + expect(instance.toJSON()).toEqual('Fallback'); + expectToHaveFetched(environment, query); + expect(renderFn).not.toBeCalled(); + renderFn.mockClear(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + environment.retain.mockClear(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + environment.execute.mockClear(); - // Switch to the second query - const nextVariables = {id: '2'}; - const nextQuery = createOperationDescriptor(gqlQuery, nextVariables); - ReactTestRenderer.act(() => { - setProps({variables: nextVariables}); - }); + // Switch to the second query + const nextVariables = {id: '2'}; + const nextQuery = createOperationDescriptor(gqlQuery, nextVariables); + ReactTestRenderer.act(() => { + setProps({variables: nextVariables}); + }); - expect(instance.toJSON()).toEqual('Fallback'); - expectToHaveFetched(environment, nextQuery); - expect(renderFn).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(1); - renderFn.mockClear(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - environment.retain.mockClear(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - environment.execute.mockClear(); + expect(instance.toJSON()).toEqual('Fallback'); + expectToHaveFetched(environment, nextQuery); + expect(renderFn).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(1); + renderFn.mockClear(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + environment.retain.mockClear(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + environment.execute.mockClear(); - // Switch back to the first query, it shouldn't request again - ReactTestRenderer.act(() => { - setProps({variables}); - }); + // Switch back to the first query, it shouldn't request again + ReactTestRenderer.act(() => { + setProps({variables}); + }); - expect(instance.toJSON()).toEqual('Fallback'); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.execute).toBeCalledTimes(0); - expect(renderFn).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(0); + expect(instance.toJSON()).toEqual('Fallback'); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.execute).toBeCalledTimes(0); + expect(renderFn).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(0); + + const payload = { + data: { + node: { + __typename: 'User', + id: variables.id, + name: 'Alice', + }, + }, + }; + ReactTestRenderer.act(() => { + environment.mock.resolve(gqlQuery, payload); + jest.runAllImmediates(); + }); + const data = environment.lookup(query.fragment).data; + expect(renderFn.mock.calls[0][0]).toEqual(data); + expect(instance.toJSON()).toEqual('Alice'); +}); - const payload = { +it('fetches and renders correctly when re-mounting the same query (even if GC runs synchronously)', () => { + const store = new Store(new RecordSource(), { + gcScheduler: run => run(), + gcReleaseBufferSize: 0, + }); + jest.spyOn(store, 'scheduleGC'); + environment = createMockEnvironment({ + store, + }); + // Render the component + const instance = render( + environment, + , + ); + + expect(instance.toJSON()).toEqual('Fallback'); + expectToHaveFetched(environment, query); + expect(renderFn).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(1); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + environment.execute.mockClear(); + renderFn.mockClear(); + + ReactTestRenderer.act(() => { + environment.mock.resolve(gqlQuery, { data: { node: { __typename: 'User', - id: variables.id, - name: 'Alice', + id: '1', + name: 'Bob', }, }, - }; - ReactTestRenderer.act(() => { - environment.mock.resolve(gqlQuery, payload); - jest.runAllImmediates(); }); - const data = environment.lookup(query.fragment).data; - expect(renderFn.mock.calls[0][0]).toEqual(data); - expect(instance.toJSON()).toEqual('Alice'); + jest.runAllImmediates(); }); - it('fetches and renders correctly when re-mounting the same query (even if GC runs synchronously)', () => { - const store = new Store(new RecordSource(), { - gcScheduler: run => run(), - gcReleaseBufferSize: 0, + const data = environment.lookup(query.fragment).data; + // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file + expectToBeRendered(renderFn, data); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(1); + renderFn.mockClear(); + + ReactTestRenderer.act(() => { + // Pass a new key to force a re-mount + setProps({variables}); + setKey(1); + jest.runAllImmediates(); + }); + + // Assert that GC doesn't run since the query doesn't + // incorrectly get fully released (which would trigger GC) + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(store.scheduleGC).toHaveBeenCalledTimes(0); + + // Assert that a new request was not started + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.execute).toHaveBeenCalledTimes(0); + + // Expect to still be able to render the same data + // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file + expectToBeRendered(renderFn, data); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(1); +}); + +it('disposes the temporary retain when the component is re-rendered and switches to another query', () => { + // Render the component + const instance = render( + environment, + , + ); + + expect(instance.toJSON()).toEqual('Fallback'); + expectToHaveFetched(environment, query); + expect(renderFn).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(1); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + environment.execute.mockClear(); + renderFn.mockClear(); + + ReactTestRenderer.act(() => { + environment.mock.resolve(gqlQuery, { + data: { + node: { + __typename: 'User', + id: '1', + name: 'Bob', + }, + }, }); - jest.spyOn(store, 'scheduleGC'); - environment = createMockEnvironment({ - store, + jest.runAllImmediates(); + }); + + const data = environment.lookup(query.fragment).data; + // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file + expectToBeRendered(renderFn, data); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(1); + renderFn.mockClear(); + + ReactTestRenderer.act(() => { + // Update `extraData` to trigger a re-render + setProps({variables, extraData: 1}); + }); + + // Nothing to release here since variables didn't change + expect(release).toHaveBeenCalledTimes(0); + + ReactTestRenderer.act(() => { + // Update `variables` to fetch new data + setProps({variables: {id: '2'}, extraData: 1}); + }); + + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.execute).toHaveBeenCalledTimes(1); + ReactTestRenderer.act(() => { + environment.mock.resolve(gqlQuery, { + data: { + node: { + __typename: 'User', + id: '2', + name: 'Bob', + }, + }, }); - // Render the component + jest.runAllImmediates(); + }); + + // Variables were changed and the retain for the previous query + // should be released + expect(release).toHaveBeenCalledTimes(1); +}); + +it('does not cancel ongoing network request when component unmounts while suspended', () => { + const initialVariables = {id: 'first-render'}; + const initialQuery = createOperationDescriptor(gqlQuery, initialVariables); + environment.commitPayload(initialQuery, { + node: { + __typename: 'User', + id: 'first-render', + name: 'Bob', + }, + }); + + const instance = render( + environment, + , + ); + + expect(instance.toJSON()).toEqual('Bob'); + renderFn.mockClear(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + environment.execute.mockClear(); + + // Suspend on the first query + ReactTestRenderer.act(() => { + setProps({variables, fetchPolicy: 'store-or-network'}); + }); + + expect(instance.toJSON()).toEqual('Fallback'); + expectToHaveFetched(environment, query); + expect(renderFn).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(2); + renderFn.mockClear(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + environment.retain.mockClear(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + environment.execute.mockClear(); + expect(environment.mock.isLoading(query.request.node, variables, {})).toEqual( + true, + ); + + ReactTestRenderer.act(() => { + instance.unmount(); + }); + + // Assert data is released + expect(release).toBeCalledTimes(1); + + // Assert request in flight is not cancelled + expect(environment.mock.isLoading(query.request.node, variables, {})).toEqual( + true, + ); +}); + +it('does not cancel ongoing network request when component unmounts after committing', () => { + const instance = render(environment, ); + + expect(instance.toJSON()).toEqual('Fallback'); + expectToHaveFetched(environment, query); + expect(renderFn).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(1); + + // Resolve a payload but don't complete the network request + environment.mock.nextValue(gqlQuery, { + data: { + node: { + __typename: 'User', + id: variables.id, + name: 'Alice', + }, + }, + }); + + // Assert that the component unsuspended and mounted + const data = environment.lookup(query.fragment).data; + // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file + expectToBeRendered(renderFn, data); + + // Assert request was created + expect(environment.mock.isLoading(query.request.node, variables, {})).toEqual( + true, + ); + + ReactTestRenderer.act(() => { + instance.unmount(); + }); + + // Assert data is released + expect(release).toBeCalledTimes(1); + // Assert request in flight is not cancelled + expect(environment.mock.isLoading(query.request.node, variables, {})).toEqual( + true, + ); +}); + +it('does not cancel network request when temporarily retained component that never commits is disposed of after timeout', () => { + const instance = render(environment, ); + + expect(instance.toJSON()).toEqual('Fallback'); + expectToHaveFetched(environment, query); + expect(renderFn).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toHaveBeenCalledTimes(1); + ReactTestRenderer.act(() => { + instance.unmount(); + }); + // Resolve a payload but don't complete the network request + environment.mock.nextValue(gqlQuery, { + data: { + node: { + __typename: 'User', + id: variables.id, + name: 'Alice', + }, + }, + }); + // Assert request in created + expect(environment.mock.isLoading(query.request.node, variables, {})).toEqual( + true, + ); + + // Trigger releasing of the temporary retain + jest.runAllTimers(); + // Assert data is released + expect(release).toBeCalledTimes(1); + // Assert request in flight is not cancelled + expect(environment.mock.isLoading(query.request.node, variables, {})).toEqual( + true, + ); +}); + +describe('with @defer and re-rendering', () => { + beforeEach(() => { + graphql` + fragment useLazyLoadQueryNodeTestDeferFragment on User { + id + name + } + `; + gqlQuery = graphql` + query useLazyLoadQueryNodeTest1Query($id: ID) { + node(id: $id) { + ...useLazyLoadQueryNodeTestDeferFragment @defer + } + } + `; + variables = {id: 'user:1234'}; + query = createOperationDescriptor(gqlQuery, variables); + }); + + it('should handle errors ', () => { const instance = render( environment, - , + , ); expect(instance.toJSON()).toEqual('Fallback'); - expectToHaveFetched(environment, query); expect(renderFn).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(1); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - environment.execute.mockClear(); - renderFn.mockClear(); - ReactTestRenderer.act(() => { - environment.mock.resolve(gqlQuery, { - data: { - node: { - __typename: 'User', - id: '1', - name: 'Bob', - }, - }, - }); - jest.runAllImmediates(); - }); + const payloadError = new Error('Invalid Payload'); + // $FlowFixMe[prop-missing] This will make react suppress error logging for this error + payloadError._suppressLogging = true; - const data = environment.lookup(query.fragment).data; - // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file - expectToBeRendered(renderFn, data); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(1); - renderFn.mockClear(); + expect(errorBoundaryDidCatchFn).not.toBeCalled(); + + environment.mock.reject(query, payloadError); + // force re-rendering of the component, to read from the QueryResource + // by default, error responses do not trigger react updates ReactTestRenderer.act(() => { - // Pass a new key to force a re-mount setProps({variables}); setKey(1); - jest.runAllImmediates(); }); - // Assert that GC doesn't run since the query doesn't - // incorrectly get fully released (which would trigger GC) - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(store.scheduleGC).toHaveBeenCalledTimes(0); - - // Assert that a new request was not started - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.execute).toHaveBeenCalledTimes(0); - - // Expect to still be able to render the same data - // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file - expectToBeRendered(renderFn, data); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(1); + // This time, error boundary will render the error + expect(errorBoundaryDidCatchFn).toBeCalledWith(payloadError); + expect(renderFn).not.toBeCalled(); }); - it('disposes the temporary retain when the component is re-rendered and switches to another query', () => { - // Render the component + it('should render the query with defer payloads without errors for defer payloads', () => { const instance = render( environment, - , + , ); expect(instance.toJSON()).toEqual('Fallback'); - expectToHaveFetched(environment, query); expect(renderFn).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(1); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - environment.execute.mockClear(); - renderFn.mockClear(); ReactTestRenderer.act(() => { - environment.mock.resolve(gqlQuery, { + environment.mock.nextValue(query, { data: { node: { __typename: 'User', - id: '1', - name: 'Bob', + id: variables.id, }, }, }); - jest.runAllImmediates(); }); const data = environment.lookup(query.fragment).data; + // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file expectToBeRendered(renderFn, data); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(1); - renderFn.mockClear(); - ReactTestRenderer.act(() => { - // Update `extraData` to trigger a re-render - setProps({variables, extraData: 1}); - }); + expect(errorBoundaryDidCatchFn).not.toBeCalled(); - // Nothing to release here since variables didn't change - expect(release).toHaveBeenCalledTimes(0); + const payloadError = new Error('Invalid Payload'); + expectToWarn( + 'QueryResource: An incremental payload for query `useLazyLoadQueryNodeTest1Query` returned an error: `Invalid Payload`.', + () => { + environment.mock.reject(query, payloadError); + }, + ); + // force re-rendering of the component, to read from the QueryResource + // by default, error responses do not trigger react updates ReactTestRenderer.act(() => { - // Update `variables` to fetch new data - setProps({variables: {id: '2'}, extraData: 1}); + setProps({variables}); + setKey(1); }); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.execute).toHaveBeenCalledTimes(1); - ReactTestRenderer.act(() => { - environment.mock.resolve(gqlQuery, { - data: { - node: { - __typename: 'User', - id: '2', - name: 'Bob', - }, - }, - }); - jest.runAllImmediates(); - }); + // error boundary should not display that error + expect(errorBoundaryDidCatchFn).not.toBeCalled(); - // Variables were changed and the retain for the previous query - // should be released - expect(release).toHaveBeenCalledTimes(1); + // and we also should re-render the same view as for the initial response + // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file + expectToBeRendered(renderFn, data); }); +}); - it('does not cancel ongoing network request when component unmounts while suspended', () => { - const initialVariables = {id: 'first-render'}; - const initialQuery = createOperationDescriptor(gqlQuery, initialVariables); - environment.commitPayload(initialQuery, { - node: { - __typename: 'User', - id: 'first-render', - name: 'Bob', - }, - }); - - const instance = render( - environment, - , +describe('partial rendering', () => { + it('does not suspend at the root if query does not have direct data dependencies', () => { + const gqlFragment = graphql` + fragment useLazyLoadQueryNodeTestRootFragment on Query { + node(id: $id) { + id + name + } + } + `; + const gqlOnlyFragmentsQuery = graphql` + query useLazyLoadQueryNodeTestOnlyFragmentsQuery($id: ID) { + ...useLazyLoadQueryNodeTestRootFragment + } + `; + const onlyFragsQuery = createOperationDescriptor( + gqlOnlyFragmentsQuery, + variables, ); - expect(instance.toJSON()).toEqual('Bob'); - renderFn.mockClear(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - environment.execute.mockClear(); - - // Suspend on the first query - ReactTestRenderer.act(() => { - setProps({variables, fetchPolicy: 'store-or-network'}); - }); - - expect(instance.toJSON()).toEqual('Fallback'); - expectToHaveFetched(environment, query); - expect(renderFn).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(2); - renderFn.mockClear(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - environment.retain.mockClear(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - environment.execute.mockClear(); - expect( - environment.mock.isLoading(query.request.node, variables, {}), - ).toEqual(true); - - ReactTestRenderer.act(() => { - instance.unmount(); - }); - - // Assert data is released - expect(release).toBeCalledTimes(1); + function FragmentComponent(props: {query: mixed}) { + const fragment = getFragment(gqlFragment); + // $FlowFixMe + const data = useFragmentImpl(fragment, props.query); + renderFn(data); + return null; + } - // Assert request in flight is not cancelled - expect( - environment.mock.isLoading(query.request.node, variables, {}), - ).toEqual(true); - }); + const Renderer = (props: {variables: {id: string}}) => { + const _query = createOperationDescriptor( + gqlOnlyFragmentsQuery, + props.variables, + ); + const data = useLazyLoadQueryNode({ + componentDisplayName: 'TestDisplayName', + fetchObservable: __internal.fetchQuery(environment, _query), + fetchPolicy: 'store-or-network', + query: _query, + renderPolicy: 'partial', + }); + return ( + + + + ); + }; - it('does not cancel ongoing network request when component unmounts after committing', () => { - const instance = render(environment, ); + const instance = render(environment, ); - expect(instance.toJSON()).toEqual('Fallback'); - expectToHaveFetched(environment, query); + // Assert that we suspended at the fragment level and not at the root + expect(instance.toJSON()).toEqual('Fallback around fragment'); + expectToHaveFetched(environment, onlyFragsQuery); expect(renderFn).not.toBeCalled(); // $FlowFixMe[method-unbinding] added when improving typing for this parameters expect(environment.retain).toHaveBeenCalledTimes(1); - // Resolve a payload but don't complete the network request - environment.mock.nextValue(gqlQuery, { + environment.mock.resolve(gqlOnlyFragmentsQuery, { data: { node: { __typename: 'User', @@ -668,41 +893,21 @@ describe('useLazyLoadQueryNode', () => { }, }); - // Assert that the component unsuspended and mounted - const data = environment.lookup(query.fragment).data; // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file - expectToBeRendered(renderFn, data); - - // Assert request was created - expect( - environment.mock.isLoading(query.request.node, variables, {}), - ).toEqual(true); - - ReactTestRenderer.act(() => { - instance.unmount(); + expectToBeRendered(renderFn, { + node: { + id: variables.id, + name: 'Alice', + }, }); - - // Assert data is released - expect(release).toBeCalledTimes(1); - // Assert request in flight is not cancelled - expect( - environment.mock.isLoading(query.request.node, variables, {}), - ).toEqual(true); }); +}); - it('does not cancel network request when temporarily retained component that never commits is disposed of after timeout', () => { - const instance = render(environment, ); +describe('logging', () => { + test('simple fetch', () => { + render(environment, ); - expect(instance.toJSON()).toEqual('Fallback'); - expectToHaveFetched(environment, query); - expect(renderFn).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(1); - ReactTestRenderer.act(() => { - instance.unmount(); - }); - // Resolve a payload but don't complete the network request - environment.mock.nextValue(gqlQuery, { + environment.mock.resolve(gqlQuery, { data: { node: { __typename: 'User', @@ -711,456 +916,239 @@ describe('useLazyLoadQueryNode', () => { }, }, }); - // Assert request in created - expect( - environment.mock.isLoading(query.request.node, variables, {}), - ).toEqual(true); - - // Trigger releasing of the temporary retain - jest.runAllTimers(); - // Assert data is released - expect(release).toBeCalledTimes(1); - // Assert request in flight is not cancelled - expect( - environment.mock.isLoading(query.request.node, variables, {}), - ).toEqual(true); - }); - - describe('with @defer and re-rendering', () => { - beforeEach(() => { - graphql` - fragment useLazyLoadQueryNodeTestDeferFragment on User { - id - name - } - `; - gqlQuery = graphql` - query useLazyLoadQueryNodeTest1Query($id: ID) { - node(id: $id) { - ...useLazyLoadQueryNodeTestDeferFragment @defer - } - } - `; - variables = {id: 'user:1234'}; - query = createOperationDescriptor(gqlQuery, variables); - }); - - it('should handle errors ', () => { - const instance = render( - environment, - , - ); - - expect(instance.toJSON()).toEqual('Fallback'); - expect(renderFn).not.toBeCalled(); - - const payloadError = new Error('Invalid Payload'); - // $FlowFixMe[prop-missing] This will make react suppress error logging for this error - payloadError._suppressLogging = true; - - expect(errorBoundaryDidCatchFn).not.toBeCalled(); - - environment.mock.reject(query, payloadError); - - // force re-rendering of the component, to read from the QueryResource - // by default, error responses do not trigger react updates - ReactTestRenderer.act(() => { - setProps({variables}); - setKey(1); - }); - // This time, error boundary will render the error - expect(errorBoundaryDidCatchFn).toBeCalledWith(payloadError); - expect(renderFn).not.toBeCalled(); + ReactTestRenderer.act(() => { + jest.runAllImmediates(); }); - it('should render the query with defer payloads without errors for defer payloads', () => { - const instance = render( - environment, - , - ); - - expect(instance.toJSON()).toEqual('Fallback'); - expect(renderFn).not.toBeCalled(); - - ReactTestRenderer.act(() => { - environment.mock.nextValue(query, { - data: { - node: { - __typename: 'User', - id: variables.id, - }, + expect(logs).toMatchObject([ + { + name: 'execute.start', + executeId: 100001, + }, + { + name: 'network.start', + networkRequestId: 100000, + }, + { + name: 'queryresource.fetch', + resourceID: 200000, + profilerContext: expect.objectContaining({}), + }, + { + name: 'suspense.query', + fetchPolicy: 'network-only', + isPromiseCached: false, + operation: { + request: { + variables: variables, }, - }); - }); - - const data = environment.lookup(query.fragment).data; - - // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file - expectToBeRendered(renderFn, data); - - expect(errorBoundaryDidCatchFn).not.toBeCalled(); - - const payloadError = new Error('Invalid Payload'); - expectToWarn( - 'QueryResource: An incremental payload for query `useLazyLoadQueryNodeTest1Query` returned an error: `Invalid Payload`.', - () => { - environment.mock.reject(query, payloadError); }, - ); - - // force re-rendering of the component, to read from the QueryResource - // by default, error responses do not trigger react updates - ReactTestRenderer.act(() => { - setProps({variables}); - setKey(1); - }); - - // error boundary should not display that error - expect(errorBoundaryDidCatchFn).not.toBeCalled(); - - // and we also should re-render the same view as for the initial response - // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file - expectToBeRendered(renderFn, data); - }); + queryAvailability: {status: 'missing'}, + renderPolicy: 'partial', + }, + { + name: 'network.next', + networkRequestId: 100000, + }, + { + name: 'execute.next', + executeId: 100001, + }, + { + name: 'network.complete', + networkRequestId: 100000, + }, + { + name: 'execute.complete', + executeId: 100001, + }, + { + name: 'queryresource.retain', + resourceID: 200000, + profilerContext: expect.objectContaining({}), + }, + ]); }); - describe('partial rendering', () => { - it('does not suspend at the root if query does not have direct data dependencies', () => { - const gqlFragment = graphql` - fragment useLazyLoadQueryNodeTestRootFragment on Query { - node(id: $id) { - id - name - } - } - `; - const gqlOnlyFragmentsQuery = graphql` - query useLazyLoadQueryNodeTestOnlyFragmentsQuery($id: ID) { - ...useLazyLoadQueryNodeTestRootFragment - } - `; - const onlyFragsQuery = createOperationDescriptor( - gqlOnlyFragmentsQuery, - variables, - ); - - function FragmentComponent(props: {query: mixed}) { - const fragment = getFragment(gqlFragment); - const result: $FlowFixMe = useFragmentNode( - fragment, - props.query, - 'TestUseFragment', - ); - renderFn(result.data); - return null; - } - - const Renderer = (props: {variables: {id: string}}) => { - const _query = createOperationDescriptor( - gqlOnlyFragmentsQuery, - props.variables, - ); - const data = useLazyLoadQueryNode({ - componentDisplayName: 'TestDisplayName', - fetchObservable: __internal.fetchQuery(environment, _query), - fetchPolicy: 'store-or-network', - query: _query, - renderPolicy: 'partial', - }); - return ( - - - - ); - }; - - const instance = render(environment, ); + test('log when switching queries', () => { + const initialVariables = {id: 'first-render'}; + const variablesOne = {id: '1'}; + const variablesTwo = {id: '2'}; - // Assert that we suspended at the fragment level and not at the root - expect(instance.toJSON()).toEqual('Fallback around fragment'); - expectToHaveFetched(environment, onlyFragsQuery); - expect(renderFn).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toHaveBeenCalledTimes(1); + // Render the component + const initialQuery = createOperationDescriptor(gqlQuery, initialVariables); + environment.commitPayload(initialQuery, { + node: { + __typename: 'User', + id: 'first-render', + name: 'Bob', + }, + }); - environment.mock.resolve(gqlOnlyFragmentsQuery, { - data: { - node: { - __typename: 'User', - id: variables.id, - name: 'Alice', - }, - }, - }); + logs = []; + render( + environment, + , + ); - // $FlowFixMe[incompatible-call] Error found while enabling LTI on this file - expectToBeRendered(renderFn, { - node: { - id: variables.id, - name: 'Alice', - }, - }); + // Suspend on the first query + ReactTestRenderer.act(() => { + setProps({variables: variablesOne}); }); - }); - describe('logging', () => { - beforeEach(() => { - // we need to reset modules in order to test generated ID - jest.resetModules(); - disallowWarnings(); - disallowConsoleErrors(); + // Switch to the second query + ReactTestRenderer.act(() => { + setProps({variables: variablesTwo}); }); - test('simple fetch', () => { - render(environment, ); + // Switch back to the first query and it should not request again + ReactTestRenderer.act(() => { + setProps({variables: variablesOne}); + }); - environment.mock.resolve(gqlQuery, { + ReactTestRenderer.act(() => { + const queryOne = createOperationDescriptor(gqlQuery, variablesOne); + const payload = { data: { node: { __typename: 'User', - id: variables.id, + id: variablesOne.id, name: 'Alice', }, }, - }); - - ReactTestRenderer.act(() => { - jest.runAllImmediates(); - }); - - expect(logs).toMatchObject([ - { - name: 'execute.start', - executeId: 100001, - }, - { - name: 'network.start', - networkRequestId: 100000, - }, - { - name: 'queryresource.fetch', - resourceID: 200000, - profilerContext: expect.objectContaining({}), - }, - { - name: 'suspense.query', - fetchPolicy: 'network-only', - isPromiseCached: false, - operation: { - request: { - variables: variables, - }, - }, - queryAvailability: {status: 'missing'}, - renderPolicy: 'partial', - }, - { - name: 'network.next', - networkRequestId: 100000, - }, - { - name: 'execute.next', - executeId: 100001, - }, - { - name: 'network.complete', - networkRequestId: 100000, - }, - { - name: 'execute.complete', - executeId: 100001, - }, - { - name: 'queryresource.retain', - resourceID: 200000, - profilerContext: expect.objectContaining({}), - }, - ]); + }; + environment.mock.resolve(queryOne, payload); + jest.runAllImmediates(); }); - test('log when switching queries', () => { - const initialVariables = {id: 'first-render'}; - const variablesOne = {id: '1'}; - const variablesTwo = {id: '2'}; - - // Render the component - const initialQuery = createOperationDescriptor( - gqlQuery, - initialVariables, - ); - environment.commitPayload(initialQuery, { - node: { - __typename: 'User', - id: 'first-render', - name: 'Bob', - }, - }); - - logs = []; - render( - environment, - , - ); - - // Suspend on the first query - ReactTestRenderer.act(() => { - setProps({variables: variablesOne}); - }); - - // Switch to the second query - ReactTestRenderer.act(() => { - setProps({variables: variablesTwo}); - }); - - // Switch back to the first query and it should not request again - ReactTestRenderer.act(() => { - setProps({variables: variablesOne}); - }); - - ReactTestRenderer.act(() => { - const queryOne = createOperationDescriptor(gqlQuery, variablesOne); - const payload = { - data: { - node: { - __typename: 'User', - id: variablesOne.id, - name: 'Alice', - }, + expect(logs).toMatchObject([ + { + // initial fetch + name: 'queryresource.fetch', + resourceID: 200000, + profilerContext: expect.objectContaining({}), + shouldFetch: false, + operation: { + request: { + variables: initialVariables, }, - }; - environment.mock.resolve(queryOne, payload); - jest.runAllImmediates(); - }); - - expect(logs).toMatchObject([ - { - // initial fetch - name: 'queryresource.fetch', - resourceID: 200000, - profilerContext: expect.objectContaining({}), - shouldFetch: false, - operation: { - request: { - variables: initialVariables, - }, - }, - }, - { - // initial fetch completes, since it was fulfilled from cache - name: 'queryresource.retain', - resourceID: 200000, - profilerContext: expect.objectContaining({}), - }, - { - // execution for variables one starts - name: 'execute.start', - executeId: 100002, - variables: variablesOne, - }, - { - // request for variables one starts - name: 'network.start', - networkRequestId: 100001, - variables: variablesOne, }, - { - // fetch event for variables one - name: 'queryresource.fetch', - resourceID: 200001, - profilerContext: expect.objectContaining({}), - shouldFetch: true, - operation: { - request: { - variables: variablesOne, - }, + }, + { + // initial fetch completes, since it was fulfilled from cache + name: 'queryresource.retain', + resourceID: 200000, + profilerContext: expect.objectContaining({}), + }, + { + // execution for variables one starts + name: 'execute.start', + executeId: 100002, + variables: variablesOne, + }, + { + // request for variables one starts + name: 'network.start', + networkRequestId: 100001, + variables: variablesOne, + }, + { + // fetch event for variables one + name: 'queryresource.fetch', + resourceID: 200001, + profilerContext: expect.objectContaining({}), + shouldFetch: true, + operation: { + request: { + variables: variablesOne, }, }, - { - name: 'suspense.query', - fetchPolicy: 'network-only', - isPromiseCached: false, - operation: { - request: { - variables: variablesOne, - }, + }, + { + name: 'suspense.query', + fetchPolicy: 'network-only', + isPromiseCached: false, + operation: { + request: { + variables: variablesOne, }, - queryAvailability: {status: 'missing'}, - renderPolicy: 'partial', - }, - { - // execution for variables two starts - name: 'execute.start', - executeId: 100004, - variables: variablesTwo, }, - { - // request for variables two starts - name: 'network.start', - networkRequestId: 100003, - variables: variablesTwo, - }, - { - // fetch event for variables two - name: 'queryresource.fetch', - resourceID: 200002, - profilerContext: expect.objectContaining({}), - shouldFetch: true, - operation: { - request: { - variables: variablesTwo, - }, + queryAvailability: {status: 'missing'}, + renderPolicy: 'partial', + }, + { + // execution for variables two starts + name: 'execute.start', + executeId: 100004, + variables: variablesTwo, + }, + { + // request for variables two starts + name: 'network.start', + networkRequestId: 100003, + variables: variablesTwo, + }, + { + // fetch event for variables two + name: 'queryresource.fetch', + resourceID: 200002, + profilerContext: expect.objectContaining({}), + shouldFetch: true, + operation: { + request: { + variables: variablesTwo, }, }, - { - name: 'suspense.query', - fetchPolicy: 'network-only', - isPromiseCached: false, - operation: { - request: { - variables: variablesTwo, - }, + }, + { + name: 'suspense.query', + fetchPolicy: 'network-only', + isPromiseCached: false, + operation: { + request: { + variables: variablesTwo, }, - queryAvailability: {status: 'missing'}, - renderPolicy: 'partial', }, - { - name: 'suspense.query', - fetchPolicy: 'network-only', - isPromiseCached: true, - operation: { - request: { - variables: variablesOne, - }, + queryAvailability: {status: 'missing'}, + renderPolicy: 'partial', + }, + { + name: 'suspense.query', + fetchPolicy: 'network-only', + isPromiseCached: true, + operation: { + request: { + variables: variablesOne, }, - queryAvailability: {status: 'missing'}, - renderPolicy: 'partial', }, - // fetch event for variables one is skipped - // since it's already cached and reused - { - name: 'network.next', - networkRequestId: 100001, - }, - { - name: 'execute.next', - executeId: 100002, - }, - { - name: 'network.complete', - networkRequestId: 100001, - }, - { - name: 'execute.complete', - executeId: 100002, - }, - // retain event for variables one - { - name: 'queryresource.retain', - resourceID: 200001, - profilerContext: expect.objectContaining({}), - }, - ]); - }); + queryAvailability: {status: 'missing'}, + renderPolicy: 'partial', + }, + // fetch event for variables one is skipped + // since it's already cached and reused + { + name: 'network.next', + networkRequestId: 100001, + }, + { + name: 'execute.next', + executeId: 100002, + }, + { + name: 'network.complete', + networkRequestId: 100001, + }, + { + name: 'execute.complete', + executeId: 100002, + }, + // retain event for variables one + { + name: 'queryresource.retain', + resourceID: 200001, + profilerContext: expect.objectContaining({}), + }, + ]); }); }); diff --git a/packages/react-relay/relay-hooks/__tests__/useMutation-fast-refresh-test.js b/packages/react-relay/relay-hooks/__tests__/useMutation-fast-refresh-test.js index 8dc8780d862cb..7413cd3ab02f0 100644 --- a/packages/react-relay/relay-hooks/__tests__/useMutation-fast-refresh-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useMutation-fast-refresh-test.js @@ -42,7 +42,7 @@ describe('useLazyLoadQueryNode', () => { const variables = { input: { - commentId: '', + feedbackId: '', }, }; beforeEach(() => { @@ -82,9 +82,7 @@ describe('useLazyLoadQueryNode', () => { ReactRefreshRuntime.injectIntoGlobalHook(global); let commit; const V1 = function (props: {}) { - const [commitFn, isMutationInFlight] = useMutation( - CommentCreateMutation, - ); + const [commitFn, isMutationInFlight] = useMutation(CommentCreateMutation); commit = commitFn; return isInFlightFn(isMutationInFlight); }; @@ -118,9 +116,7 @@ describe('useLazyLoadQueryNode', () => { // Trigger a fast fresh function V2(props: any) { - const [commitFn, isMutationInFlight] = useMutation( - CommentCreateMutation, - ); + const [commitFn, isMutationInFlight] = useMutation(CommentCreateMutation); commit = commitFn; return isInFlightFn(isMutationInFlight); } diff --git a/packages/react-relay/relay-hooks/__tests__/useMutation-test.js b/packages/react-relay/relay-hooks/__tests__/useMutation-test.js index dd280b182fe8d..8d26d52a1f710 100644 --- a/packages/react-relay/relay-hooks/__tests__/useMutation-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useMutation-test.js @@ -10,6 +10,7 @@ */ 'use strict'; + import type {RelayMockEnvironment} from '../../../relay-test-utils/RelayModernMockEnvironment'; import type { useMutationTest1Mutation$data, @@ -94,7 +95,7 @@ beforeEach(() => { }) { const [mutation, setMutationFn] = useState(initialMutation); setMutation = setMutationFn; - const [commitFn, isMutationInFlight] = useMutation(mutation); + const [commitFn, isMutationInFlight] = useMutation(mutation); commit = (config: any) => ReactTestRenderer.act(() => { disposable = commitFn(config); diff --git a/packages/react-relay/relay-hooks/__tests__/usePaginationFragment-test.js b/packages/react-relay/relay-hooks/__tests__/usePaginationFragment-test.js index bda1910e7f0ba..5e82adb84742e 100644 --- a/packages/react-relay/relay-hooks/__tests__/usePaginationFragment-test.js +++ b/packages/react-relay/relay-hooks/__tests__/usePaginationFragment-test.js @@ -10,6 +10,7 @@ */ 'use strict'; + import type {Sink} from '../../../relay-runtime/network/RelayObservable'; import type {RequestParameters} from '../../../relay-runtime/util/RelayConcreteNode'; import type {CacheConfig} from '../../../relay-runtime/util/RelayRuntimeTypes'; @@ -32,8 +33,7 @@ import type { import type {Direction, OperationDescriptor, Variables} from 'relay-runtime'; import type {Query} from 'relay-runtime/util/RelayRuntimeTypes'; -const usePaginationFragmentInternal_REACT_CACHE = require('../react-cache/usePaginationFragment_REACT_CACHE'); -const usePaginationFragment_LEGACY = require('../usePaginationFragment'); +const usePaginationFragmentImpl = require('../usePaginationFragment'); const areEqual = require('areEqual'); const invariant = require('invariant'); const React = require('react'); @@ -49,7 +49,6 @@ const { Network, Observable, RecordSource, - RelayFeatureFlags, Store, createOperationDescriptor, graphql, @@ -57,468 +56,417 @@ const { const {useMemo, useState} = React; -describe.each([ - ['React Cache', usePaginationFragmentInternal_REACT_CACHE], - ['Legacy', usePaginationFragment_LEGACY], -])('usePaginationFragment (%s)', (_hookName, usePaginationFragmentOriginal) => { - let isUsingReactCacheImplementation; - let originalReactCacheFeatureFlag; - beforeEach(() => { - isUsingReactCacheImplementation = - usePaginationFragmentOriginal === - usePaginationFragmentInternal_REACT_CACHE; - originalReactCacheFeatureFlag = RelayFeatureFlags.USE_REACT_CACHE; - RelayFeatureFlags.USE_REACT_CACHE = isUsingReactCacheImplementation; - }); - afterEach(() => { - RelayFeatureFlags.USE_REACT_CACHE = originalReactCacheFeatureFlag; - }); - - let environment; - let initialUser; - let gqlQuery: - | Query< - usePaginationFragmentTestStoryQuery$variables, - usePaginationFragmentTestStoryQuery$data, - > - | Query< - usePaginationFragmentTestUserQuery$variables, - usePaginationFragmentTestUserQuery$data, - >; - let gqlQueryNestedFragment; - let gqlQueryWithoutID; - let gqlQueryWithLiteralArgs; - let gqlQueryWithStreaming; - let gqlPaginationQuery: - | Query< - usePaginationFragmentTestStoryFragmentRefetchQuery$variables, - usePaginationFragmentTestStoryFragmentRefetchQuery$data, - > - | Query< - usePaginationFragmentTestUserFragmentPaginationQuery$variables, - usePaginationFragmentTestUserFragmentPaginationQuery$data, - >; - let gqlFragment; - let gqlFragmentWithStreaming; - let query; - let queryNestedFragment; - let queryWithoutID; - let queryWithLiteralArgs; - let queryWithStreaming; - let paginationQuery; - let variables; - let variablesNestedFragment; - let variablesWithoutID; - let setEnvironment; - let setOwner; - let renderFragment; - let renderSpy; - let loadNext; - let refetch; - let Renderer; - let fetch; - let dataSource; - let unsubscribe; - - class ErrorBoundary extends React.Component { - state: {error: ?Error} = {error: null}; - componentDidCatch(error: Error) { - this.setState({error}); - } - render(): React.Node { - const {children, fallback} = this.props; - const {error} = this.state; - if (error) { - return React.createElement(fallback, {error}); - } - return children; - } +let environment; +let initialUser; +let gqlQuery: + | Query< + usePaginationFragmentTestStoryQuery$variables, + usePaginationFragmentTestStoryQuery$data, + > + | Query< + usePaginationFragmentTestUserQuery$variables, + usePaginationFragmentTestUserQuery$data, + >; +let gqlQueryNestedFragment; +let gqlQueryWithoutID; +let gqlQueryWithLiteralArgs; +let gqlQueryWithStreaming; +let gqlPaginationQuery: + | Query< + usePaginationFragmentTestStoryFragmentRefetchQuery$variables, + usePaginationFragmentTestStoryFragmentRefetchQuery$data, + > + | Query< + usePaginationFragmentTestUserFragmentPaginationQuery$variables, + usePaginationFragmentTestUserFragmentPaginationQuery$data, + >; +let gqlFragment; +let gqlFragmentWithStreaming; +let query; +let queryNestedFragment; +let queryWithoutID; +let queryWithLiteralArgs; +let queryWithStreaming; +let paginationQuery; +let variables; +let variablesNestedFragment; +let variablesWithoutID; +let setEnvironment; +let setOwner; +let renderFragment; +let renderSpy; +let loadNext; +let refetch; +let Renderer; +let fetch; +let dataSource; +let unsubscribe; + +class ErrorBoundary extends React.Component { + state: {error: ?Error} = {error: null}; + componentDidCatch(error: Error) { + this.setState({error}); } - - function usePaginationFragment(fragmentNode: any, fragmentRef: any) { - /* $FlowFixMe[underconstrained-implicit-instantiation] error found when - * enabling Flow LTI mode */ - const {data, ...result} = usePaginationFragmentOriginal( - fragmentNode, - fragmentRef, - ); - loadNext = result.loadNext; - refetch = result.refetch; - renderSpy(data, result); - return {data, ...result}; + render(): React.Node { + const {children, fallback: Fallback} = this.props; + const {error} = this.state; + if (error) { + return ; + } + return children; } - - function assertCall( - expected: { - data: any, - hasNext: boolean, - hasPrevious: boolean, - isLoadingNext: boolean, - isLoadingPrevious: boolean, +} + +hook usePaginationFragment(fragmentNode: any, fragmentRef: any) { + /* $FlowFixMe[underconstrained-implicit-instantiation] error found when + * enabling Flow LTI mode */ + const {data, ...result} = usePaginationFragmentImpl( + fragmentNode, + fragmentRef, + ); + loadNext = result.loadNext; + refetch = result.refetch; + renderSpy(data, result); + return {data, ...result}; +} + +function assertCall( + expected: { + data: any, + hasNext: boolean, + hasPrevious: boolean, + isLoadingNext: boolean, + isLoadingPrevious: boolean, + }, + idx: number, +) { + const actualData = renderSpy.mock.calls[idx][0]; + const actualResult = renderSpy.mock.calls[idx][1]; + const actualIsLoadingNext = actualResult.isLoadingNext; + const actualIsLoadingPrevious = actualResult.isLoadingPrevious; + const actualHasNext = actualResult.hasNext; + const actualHasPrevious = actualResult.hasPrevious; + + expect(actualData).toEqual(expected.data); + expect(actualIsLoadingNext).toEqual(expected.isLoadingNext); + expect(actualIsLoadingPrevious).toEqual(expected.isLoadingPrevious); + expect(actualHasNext).toEqual(expected.hasNext); + expect(actualHasPrevious).toEqual(expected.hasPrevious); +} + +function expectFragmentResults( + expectedCalls: $ReadOnlyArray<{ + data: $FlowFixMe, + isLoadingNext: boolean, + isLoadingPrevious: boolean, + hasNext: boolean, + hasPrevious: boolean, + }>, +) { + // This ensures that useEffect runs + TestRenderer.act(() => jest.runAllImmediates()); + expect(renderSpy).toBeCalledTimes(expectedCalls.length); + expectedCalls.forEach((expected, idx) => assertCall(expected, idx)); + renderSpy.mockClear(); +} + +function resolveQuery(payload: mixed) { + dataSource.next(payload); + dataSource.complete(); +} + +function createFragmentRef( + id: + | $TEMPORARY$string<'node:1'> + | $TEMPORARY$string<'node:100'> + | $TEMPORARY$string<'node:2'> + | $TEMPORARY$string<'node:200'>, + owner: OperationDescriptor, + fragmentName: string = 'usePaginationFragmentTestNestedUserFragment', +) { + return { + [ID_KEY]: id, + [FRAGMENTS_KEY]: { + [fragmentName]: {}, }, - idx: number, - ) { - const actualData = renderSpy.mock.calls[idx][0]; - const actualResult = renderSpy.mock.calls[idx][1]; - const actualIsLoadingNext = actualResult.isLoadingNext; - const actualIsLoadingPrevious = actualResult.isLoadingPrevious; - const actualHasNext = actualResult.hasNext; - const actualHasPrevious = actualResult.hasPrevious; - - expect(actualData).toEqual(expected.data); - expect(actualIsLoadingNext).toEqual(expected.isLoadingNext); - expect(actualIsLoadingPrevious).toEqual(expected.isLoadingPrevious); - expect(actualHasNext).toEqual(expected.hasNext); - expect(actualHasPrevious).toEqual(expected.hasPrevious); - } - - function expectFragmentResults( - expectedCalls: $ReadOnlyArray<{ - data: $FlowFixMe, - isLoadingNext: boolean, - isLoadingPrevious: boolean, - hasNext: boolean, - hasPrevious: boolean, - }>, - ) { - // This ensures that useEffect runs - TestRenderer.act(() => jest.runAllImmediates()); - expect(renderSpy).toBeCalledTimes(expectedCalls.length); - expectedCalls.forEach((expected, idx) => assertCall(expected, idx)); - renderSpy.mockClear(); - } - - function resolveQuery(payload: mixed) { - dataSource.next(payload); - dataSource.complete(); - } - - function createFragmentRef( - id: - | $TEMPORARY$string<'node:1'> - | $TEMPORARY$string<'node:100'> - | $TEMPORARY$string<'node:2'> - | $TEMPORARY$string<'node:200'>, - owner: OperationDescriptor, - fragmentName: string = 'usePaginationFragmentTestNestedUserFragment', - ) { - return { - [ID_KEY]: id, - [FRAGMENTS_KEY]: { - [fragmentName]: {}, - }, - [FRAGMENT_OWNER_KEY]: owner.request, - __isWithinUnmatchedTypeRefinement: false, - }; - } - - function createMockEnvironment() { - const source = RecordSource.create(); - const store = new Store(source); - const fetchFn = jest.fn( - ( - _query: RequestParameters, - _variables: Variables, - _cacheConfig: CacheConfig, - ) => { - return Observable.create((sink: Sink) => { - dataSource = sink; - unsubscribe = jest.fn<[], mixed>(); - // $FlowFixMe[incompatible-call] - return unsubscribe; - }); - }, - ); - const environment = new Environment({ - getDataID: (data: {[string]: mixed}, typename: string) => { - // This is the default, but making it explicit in case we need to override - return data.id; - }, - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - // $FlowFixMe[incompatible-call] error found when enabling Flow LTI mode - network: Network.create(fetchFn), - store, - handlerProvider: _name => { - return ConnectionHandler; - }, - }); - // $FlowFixMe[method-unbinding] - const originalRetain = environment.retain; - // $FlowFixMe[cannot-write] - environment.retain = jest.fn((...args: any) => - originalRetain.apply(environment, args), - ); - return [environment, fetchFn]; - } - - beforeEach(() => { - // Set up mocks - jest.spyOn(console, 'warn').mockImplementationOnce(() => {}); - jest.mock('warning'); - /* $FlowFixMe[underconstrained-implicit-instantiation] error found when - * enabling Flow LTI mode */ - renderSpy = jest.fn<_, mixed>(); - // Set up environment and base data - [environment, fetch] = createMockEnvironment(); - - variablesWithoutID = { - after: null, - first: 1, - before: null, - last: null, - isViewerFriend: false, - orderby: ['name'], - }; - variables = { - ...variablesWithoutID, - id: '1', - }; - variablesNestedFragment = { - ...variablesWithoutID, - id: '', - }; - graphql` - fragment usePaginationFragmentTestNestedUserFragment on User { - username + [FRAGMENT_OWNER_KEY]: owner.request, + }; +} + +function createMockEnvironment() { + const source = RecordSource.create(); + const store = new Store(source); + const fetchFn = jest.fn( + ( + _query: RequestParameters, + _variables: Variables, + _cacheConfig: CacheConfig, + ) => { + return Observable.create((sink: Sink) => { + dataSource = sink; + unsubscribe = jest.fn<[], mixed>(); + // $FlowFixMe[incompatible-call] + return unsubscribe; + }); + }, + ); + const environment = new Environment({ + getDataID: (data: {[string]: mixed}, typename: string) => { + // This is the default, but making it explicit in case we need to override + return data.id; + }, + // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file + // $FlowFixMe[incompatible-call] error found when enabling Flow LTI mode + network: Network.create(fetchFn), + store, + handlerProvider: _name => { + return ConnectionHandler; + }, + }); + // $FlowFixMe[method-unbinding] + const originalRetain = environment.retain; + // $FlowFixMe[cannot-write] + environment.retain = jest.fn((...args: any) => + originalRetain.apply(environment, args), + ); + return [environment, fetchFn]; +} + +beforeEach(() => { + // Set up mocks + jest.spyOn(console, 'warn').mockImplementationOnce(() => {}); + jest.mock('warning'); + /* $FlowFixMe[underconstrained-implicit-instantiation] error found when + * enabling Flow LTI mode */ + renderSpy = jest.fn<_, mixed>(); + // Set up environment and base data + [environment, fetch] = createMockEnvironment(); + + variablesWithoutID = { + after: null, + first: 1, + before: null, + last: null, + isViewerFriend: false, + orderby: ['name'], + }; + variables = { + ...variablesWithoutID, + id: '1', + }; + variablesNestedFragment = { + ...variablesWithoutID, + id: '', + }; + graphql` + fragment usePaginationFragmentTestNestedUserFragment on User { + username + } + `; + + gqlQuery = graphql` + query usePaginationFragmentTestUserQuery( + $id: ID! + $after: ID + $first: Int + $before: ID + $last: Int + $orderby: [String] + $isViewerFriend: Boolean + ) { + node(id: $id) { + ...usePaginationFragmentTestUserFragment + @arguments(isViewerFriendLocal: $isViewerFriend, orderby: $orderby) } - `; - - gqlQuery = graphql` - query usePaginationFragmentTestUserQuery( - $id: ID! - $after: ID - $first: Int - $before: ID - $last: Int - $orderby: [String] - $isViewerFriend: Boolean - ) { - node(id: $id) { + } + `; + gqlQueryNestedFragment = graphql` + query usePaginationFragmentTestUserQueryNestedFragmentQuery( + $id: ID! + $after: ID + $first: Int + $before: ID + $last: Int + $orderby: [String] + $isViewerFriend: Boolean + ) { + node(id: $id) { + actor { ...usePaginationFragmentTestUserFragment @arguments(isViewerFriendLocal: $isViewerFriend, orderby: $orderby) } } - `; - gqlQueryNestedFragment = graphql` - query usePaginationFragmentTestUserQueryNestedFragmentQuery( - $id: ID! - $after: ID - $first: Int - $before: ID - $last: Int - $orderby: [String] - $isViewerFriend: Boolean - ) { - node(id: $id) { - actor { - ...usePaginationFragmentTestUserFragment - @arguments( - isViewerFriendLocal: $isViewerFriend - orderby: $orderby - ) - } - } - } - `; - gqlQueryWithoutID = graphql` - query usePaginationFragmentTestUserQueryWithoutIDQuery( - $after: ID - $first: Int - $before: ID - $last: Int - $orderby: [String] - $isViewerFriend: Boolean - ) { - viewer { - actor { - ...usePaginationFragmentTestUserFragment - @arguments( - isViewerFriendLocal: $isViewerFriend - orderby: $orderby - ) - } - } - } - `; - gqlQueryWithLiteralArgs = graphql` - query usePaginationFragmentTestUserQueryWithLiteralArgsQuery( - $id: ID! - $after: ID - $first: Int - $before: ID - $last: Int - ) { - node(id: $id) { + } + `; + gqlQueryWithoutID = graphql` + query usePaginationFragmentTestUserQueryWithoutIDQuery( + $after: ID + $first: Int + $before: ID + $last: Int + $orderby: [String] + $isViewerFriend: Boolean + ) { + viewer { + actor { ...usePaginationFragmentTestUserFragment - @arguments(isViewerFriendLocal: true, orderby: ["name"]) - } - } - `; - gqlQueryWithStreaming = graphql` - query usePaginationFragmentTestUserQueryWithStreamingQuery( - $id: ID! - $after: ID - $first: Int - $before: ID - $last: Int - $orderby: [String] - $isViewerFriend: Boolean - ) { - node(id: $id) { - ...usePaginationFragmentTestUserFragmentWithStreaming @arguments(isViewerFriendLocal: $isViewerFriend, orderby: $orderby) } } - `; - gqlFragment = graphql` - fragment usePaginationFragmentTestUserFragment on User - @refetchable( - queryName: "usePaginationFragmentTestUserFragmentPaginationQuery" + } + `; + gqlQueryWithLiteralArgs = graphql` + query usePaginationFragmentTestUserQueryWithLiteralArgsQuery( + $id: ID! + $after: ID + $first: Int + $before: ID + $last: Int + ) { + node(id: $id) { + ...usePaginationFragmentTestUserFragment + @arguments(isViewerFriendLocal: true, orderby: ["name"]) + } + } + `; + gqlQueryWithStreaming = graphql` + query usePaginationFragmentTestUserQueryWithStreamingQuery( + $id: ID! + $after: ID + $first: Int + $before: ID + $last: Int + $orderby: [String] + $isViewerFriend: Boolean + ) { + node(id: $id) { + ...usePaginationFragmentTestUserFragmentWithStreaming + @arguments(isViewerFriendLocal: $isViewerFriend, orderby: $orderby) + } + } + `; + gqlFragment = graphql` + fragment usePaginationFragmentTestUserFragment on User + @refetchable( + queryName: "usePaginationFragmentTestUserFragmentPaginationQuery" + ) + @argumentDefinitions( + isViewerFriendLocal: {type: "Boolean", defaultValue: false} + orderby: {type: "[String]"} + scale: {type: "Float"} + ) { + id + name + friends( + after: $after + first: $first + before: $before + last: $last + orderby: $orderby + isViewerFriend: $isViewerFriendLocal + scale: $scale ) - @argumentDefinitions( - isViewerFriendLocal: {type: "Boolean", defaultValue: false} - orderby: {type: "[String]"} - scale: {type: "Float"} - ) { - id - name - friends( - after: $after - first: $first - before: $before - last: $last - orderby: $orderby - isViewerFriend: $isViewerFriendLocal - scale: $scale - ) - @connection( - key: "UserFragment_friends" - filters: ["orderby", "isViewerFriend"] - ) { - edges { - node { - id - name - ...usePaginationFragmentTestNestedUserFragment - } + @connection( + key: "UserFragment_friends" + filters: ["orderby", "isViewerFriend"] + ) { + edges { + node { + id + name + ...usePaginationFragmentTestNestedUserFragment } } } - `; - gqlFragmentWithStreaming = graphql` - fragment usePaginationFragmentTestUserFragmentWithStreaming on User - @refetchable( - queryName: "usePaginationFragmentTestUserFragmentStreamingPaginationQuery" + } + `; + gqlFragmentWithStreaming = graphql` + fragment usePaginationFragmentTestUserFragmentWithStreaming on User + @refetchable( + queryName: "usePaginationFragmentTestUserFragmentStreamingPaginationQuery" + ) + @argumentDefinitions( + isViewerFriendLocal: {type: "Boolean", defaultValue: false} + orderby: {type: "[String]"} + scale: {type: "Float"} + ) { + id + name + friends( + after: $after + first: $first + before: $before + last: $last + orderby: $orderby + isViewerFriend: $isViewerFriendLocal + scale: $scale ) - @argumentDefinitions( - isViewerFriendLocal: {type: "Boolean", defaultValue: false} - orderby: {type: "[String]"} - scale: {type: "Float"} - ) { - id - name - friends( - after: $after - first: $first - before: $before - last: $last - orderby: $orderby - isViewerFriend: $isViewerFriendLocal - scale: $scale - ) - @stream_connection( - initial_count: 1 - key: "UserFragment_friends" - filters: ["orderby", "isViewerFriend"] - ) { - edges { - node { - id - name - ...usePaginationFragmentTestNestedUserFragment - } + @stream_connection( + initial_count: 1 + key: "UserFragment_friends" + filters: ["orderby", "isViewerFriend"] + ) { + edges { + node { + id + name + ...usePaginationFragmentTestNestedUserFragment } } } - `; - gqlPaginationQuery = require('./__generated__/usePaginationFragmentTestUserFragmentPaginationQuery.graphql'); - - query = createOperationDescriptor(gqlQuery, variables); - queryNestedFragment = createOperationDescriptor( - gqlQueryNestedFragment, - variablesNestedFragment, - ); - queryWithoutID = createOperationDescriptor( - gqlQueryWithoutID, - variablesWithoutID, - ); - queryWithLiteralArgs = createOperationDescriptor( - gqlQueryWithLiteralArgs, - variables, - ); - queryWithStreaming = createOperationDescriptor( - gqlQueryWithStreaming, - variables, - ); - paginationQuery = createOperationDescriptor(gqlPaginationQuery, variables, { - force: true, - }); - environment.commitPayload(query, { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - username: 'username:node:1', - }, - }, - ], - pageInfo: { - endCursor: 'cursor:1', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', - }, - }, - }, - }); - environment.commitPayload(queryWithoutID, { - viewer: { - actor: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - username: 'username:node:1', - }, - }, - ], - pageInfo: { - endCursor: 'cursor:1', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', + } + `; + gqlPaginationQuery = require('./__generated__/usePaginationFragmentTestUserFragmentPaginationQuery.graphql'); + + query = createOperationDescriptor(gqlQuery, variables); + queryNestedFragment = createOperationDescriptor( + gqlQueryNestedFragment, + variablesNestedFragment, + ); + queryWithoutID = createOperationDescriptor( + gqlQueryWithoutID, + variablesWithoutID, + ); + queryWithLiteralArgs = createOperationDescriptor( + gqlQueryWithLiteralArgs, + variables, + ); + queryWithStreaming = createOperationDescriptor( + gqlQueryWithStreaming, + variables, + ); + paginationQuery = createOperationDescriptor(gqlPaginationQuery, variables, { + force: true, + }); + environment.commitPayload(query, { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + username: 'username:node:1', }, }, + ], + pageInfo: { + endCursor: 'cursor:1', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', }, }, - }); - environment.commitPayload(queryWithLiteralArgs, { - node: { + }, + }); + environment.commitPayload(queryWithoutID, { + viewer: { + actor: { __typename: 'User', id: '1', name: 'Alice', @@ -542,89 +490,11 @@ describe.each([ }, }, }, - }); - - // Set up renderers - Renderer = (props: {user: any}) => null; - - const Container = (props: { - userRef?: {...}, - owner: $FlowFixMe, - fragment?: $FlowFixMe, - ... - }) => { - // We need a render a component to run a Hook - const [owner, _setOwner] = useState(props.owner); - const fragment = props.fragment ?? gqlFragment; - const nodeUserRef = useMemo( - () => environment.lookup(owner.fragment).data?.node, - [owner], - ); - const ownerOperationRef = useMemo( - () => ({ - [ID_KEY]: - owner.request.variables.id ?? owner.request.variables.nodeID, - [FRAGMENTS_KEY]: { - // $FlowFixMe[invalid-computed-prop] Error found while enabling LTI on this file - [fragment.name]: {}, - }, - [FRAGMENT_OWNER_KEY]: owner.request, - __isWithinUnmatchedTypeRefinement: false, - }), - [owner, fragment.name], - ); - const userRef = props.hasOwnProperty('userRef') - ? props.userRef - : nodeUserRef ?? ownerOperationRef; - - setOwner = _setOwner; - - const {data: userData} = usePaginationFragment( - fragment, - (userRef: $FlowFixMe), - ); - return ; - }; - - const ContextProvider = ({children}: {children: React.Node}) => { - const [env, _setEnv] = useState(environment); - const relayContext = useMemo(() => ({environment: env}), [env]); - - setEnvironment = _setEnv; - - return ( - - {children} - - ); - }; - - renderFragment = (args?: { - isConcurrent?: boolean, - owner?: $FlowFixMe, - userRef?: $FlowFixMe, - fragment?: $FlowFixMe, - ... - }): $FlowFixMe => { - const {isConcurrent = false, ...props} = args ?? {}; - let renderer; - TestRenderer.act(() => { - renderer = TestRenderer.create( - `Error: ${error.message}`}> - - - - - - , - // $FlowFixMe[prop-missing] - error revealed when flow-typing ReactTestRenderer - {unstable_isConcurrent: isConcurrent}, - ); - }); - return renderer; - }; - - initialUser = { + }, + }); + environment.commitPayload(queryWithLiteralArgs, { + node: { + __typename: 'User', id: '1', name: 'Alice', friends: { @@ -635,7 +505,7 @@ describe.each([ __typename: 'User', id: 'node:1', name: 'name:node:1', - ...createFragmentRef('node:1', query), + username: 'username:node:1', }, }, ], @@ -646,115 +516,384 @@ describe.each([ startCursor: 'cursor:1', }, }, - }; - }); - - afterEach(() => { - renderSpy.mockClear(); + }, }); - describe('initial render', () => { - // The bulk of initial render behavior is covered in useFragmentNode-test, - // so this suite covers the basic cases as a sanity check. - it('should throw error if fragment is plural', () => { - jest.spyOn(console, 'error').mockImplementationOnce(() => {}); + // Set up renderers + Renderer = (props: {user: any}) => null; + + const Container = (props: { + userRef?: {...}, + owner: $FlowFixMe, + fragment?: $FlowFixMe, + ... + }) => { + // We need a render a component to run a Hook + const [owner, _setOwner] = useState(props.owner); + const fragment = props.fragment ?? gqlFragment; + const nodeUserRef = useMemo( + () => environment.lookup(owner.fragment).data?.node, + [owner], + ); + const ownerOperationRef = useMemo( + () => ({ + [ID_KEY]: owner.request.variables.id ?? owner.request.variables.nodeID, + [FRAGMENTS_KEY]: { + // $FlowFixMe[invalid-computed-prop] Error found while enabling LTI on this file + [fragment.name]: {}, + }, + [FRAGMENT_OWNER_KEY]: owner.request, + }), + [owner, fragment.name], + ); + const userRef = props.hasOwnProperty('userRef') + ? props.userRef + : nodeUserRef ?? ownerOperationRef; - const UserFragment = graphql` - fragment usePaginationFragmentTest1Fragment on User - @relay(plural: true) { - id - } - `; - const renderer = renderFragment({fragment: UserFragment}); - expect( - renderer - .toJSON() - .includes('Remove `@relay(plural: true)` from fragment'), - ).toEqual(true); - }); + setOwner = _setOwner; - it('should throw error if fragment is missing @refetchable directive', () => { - jest.spyOn(console, 'error').mockImplementationOnce(() => {}); + const {data: userData} = usePaginationFragment( + fragment, + (userRef: $FlowFixMe), + ); + return ; + }; - const UserFragment = graphql` - fragment usePaginationFragmentTest2Fragment on User { - id - } - `; - const renderer = renderFragment({fragment: UserFragment}); - expect( - renderer - .toJSON() - .includes( - 'Did you forget to add a @refetchable directive to the fragment?', - ), - ).toEqual(true); - }); + const ContextProvider = ({children}: {children: React.Node}) => { + const [env, _setEnv] = useState(environment); + const relayContext = useMemo(() => ({environment: env}), [env]); - it('should throw error if fragment is missing @connection directive', () => { - jest.spyOn(console, 'error').mockImplementationOnce(() => {}); + setEnvironment = _setEnv; - const UserFragment = graphql` - fragment usePaginationFragmentTest3Fragment on User - @refetchable( - queryName: "usePaginationFragmentTest3FragmentRefetchQuery" - ) { - id - } - `; - const renderer = renderFragment({fragment: UserFragment}); - expect( - renderer - .toJSON() - .includes( - 'Did you forget to add a @connection directive to the connection field in the fragment?', - ), - ).toEqual(true); + return ( + + {children} + + ); + }; + + renderFragment = (args?: { + isConcurrent?: boolean, + owner?: $FlowFixMe, + userRef?: $FlowFixMe, + fragment?: $FlowFixMe, + ... + }): $FlowFixMe => { + const {isConcurrent = false, ...props} = args ?? {}; + let renderer; + TestRenderer.act(() => { + renderer = TestRenderer.create( + `Error: ${error.message}`}> + + + + + + , + // $FlowFixMe[prop-missing] - error revealed when flow-typing ReactTestRenderer + {unstable_isConcurrent: isConcurrent}, + ); }); - - it('should render fragment without error when data is available', () => { - renderFragment(); - expectFragmentResults([ + return renderer; + }; + + initialUser = { + id: '1', + name: 'Alice', + friends: { + edges: [ { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - - hasNext: true, - hasPrevious: false, + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', query), + }, }, - ]); - }); + ], + pageInfo: { + endCursor: 'cursor:1', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', + }, + }, + }; +}); - it('should render fragment without error when ref is null', () => { - renderFragment({userRef: null}); - expectFragmentResults([ - { - data: null, - isLoadingNext: false, - isLoadingPrevious: false, +afterEach(() => { + renderSpy.mockClear(); +}); - hasNext: false, - hasPrevious: false, - }, - ]); - }); +describe('initial render', () => { + // The bulk of initial render behavior is covered in useFragmentNode-test, + // so this suite covers the basic cases as a sanity check. + it('should throw error if fragment is plural', () => { + jest.spyOn(console, 'error').mockImplementationOnce(() => {}); - it('should render fragment without error when ref is undefined', () => { - renderFragment({userRef: undefined}); - expectFragmentResults([ - { - data: null, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: false, - hasPrevious: false, - }, - ]); + const UserFragment = graphql` + fragment usePaginationFragmentTest1Fragment on User @relay(plural: true) { + id + } + `; + const renderer = renderFragment({fragment: UserFragment}); + expect( + renderer.toJSON().includes('Remove `@relay(plural: true)` from fragment'), + ).toEqual(true); + }); + + it('should throw error if fragment is missing @refetchable directive', () => { + jest.spyOn(console, 'error').mockImplementationOnce(() => {}); + + const UserFragment = graphql` + fragment usePaginationFragmentTest2Fragment on User { + id + } + `; + const renderer = renderFragment({fragment: UserFragment}); + expect( + renderer + .toJSON() + .includes( + 'Did you forget to add a @refetchable directive to the fragment?', + ), + ).toEqual(true); + }); + + it('should throw error if fragment is missing @connection directive', () => { + jest.spyOn(console, 'error').mockImplementationOnce(() => {}); + + const UserFragment = graphql` + fragment usePaginationFragmentTest3Fragment on User + @refetchable( + queryName: "usePaginationFragmentTest3FragmentRefetchQuery" + ) { + id + } + `; + const renderer = renderFragment({fragment: UserFragment}); + expect( + renderer + .toJSON() + .includes( + 'Did you forget to add a @connection directive to the connection field in the fragment?', + ), + ).toEqual(true); + }); + + it('should render fragment without error when data is available', () => { + renderFragment(); + expectFragmentResults([ + { + data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, + + hasNext: true, + hasPrevious: false, + }, + ]); + }); + + it('should render fragment without error when ref is null', () => { + renderFragment({userRef: null}); + expectFragmentResults([ + { + data: null, + isLoadingNext: false, + isLoadingPrevious: false, + + hasNext: false, + hasPrevious: false, + }, + ]); + }); + + it('should render fragment without error when ref is undefined', () => { + renderFragment({userRef: undefined}); + expectFragmentResults([ + { + data: null, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: false, + hasPrevious: false, + }, + ]); + }); + + it('should update when fragment data changes', () => { + renderFragment(); + expectFragmentResults([ + { + data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + + // Update parent record + environment.commitPayload(query, { + node: { + __typename: 'User', + id: '1', + // Update name + name: 'Alice in Wonderland', + }, }); + expectFragmentResults([ + { + data: { + ...initialUser, + // Assert that name is updated + name: 'Alice in Wonderland', + }, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); - it('should update when fragment data changes', () => { - renderFragment(); + // Update edge + environment.commitPayload(query, { + node: { + __typename: 'User', + id: 'node:1', + // Update name + name: 'name:node:1-updated', + }, + }); + expectFragmentResults([ + { + data: { + ...initialUser, + name: 'Alice in Wonderland', + friends: { + ...initialUser.friends, + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + // Assert that name is updated + name: 'name:node:1-updated', + ...createFragmentRef('node:1', query), + }, + }, + ], + }, + }, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + }); + + it('should throw a promise if data is missing for fragment and request is in flight', () => { + // This prevents console.error output in the test, which is expected + jest.spyOn(console, 'error').mockImplementationOnce(() => {}); + + const missingDataVariables = {...variables, id: '4'}; + const missingDataQuery = createOperationDescriptor( + gqlQuery, + missingDataVariables, + ); + + // Commit a payload with name and profile_picture are missing + environment.commitPayload(missingDataQuery, { + node: { + __typename: 'User', + id: '4', + }, + }); + + // Make sure query is in flight + fetchQuery(environment, missingDataQuery).subscribe({}); + + const renderer = renderFragment({owner: missingDataQuery}); + expect(renderer.toJSON()).toEqual('Fallback'); + }); +}); + +describe('pagination', () => { + let release; + + beforeEach(() => { + release = jest.fn<$ReadOnlyArray, mixed>(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + environment.retain.mockImplementation((...args) => { + return { + dispose: release, + }; + }); + }); + + function expectRequestIsInFlight(expected: any) { + expect(fetch).toBeCalledTimes(expected.requestCount); + const fetchCall = fetch.mock.calls.find(call => { + return ( + call[0] === + (expected.gqlPaginationQuery ?? gqlPaginationQuery).params && + areEqual(call[1], expected.paginationVariables) && + areEqual(call[2], {force: true}) + ); + }); + const isInFlight = fetchCall != null; + expect(isInFlight).toEqual(expected.inFlight); + } + + function expectFragmentIsLoadingMore( + renderer: any, + direction: Direction, + expected: { + data: mixed, + hasNext: boolean, + hasPrevious: boolean, + paginationVariables: Variables, + gqlPaginationQuery?: $FlowFixMe, + }, + ) { + // Assert fragment sets isLoading to true + expect(renderSpy).toBeCalledTimes(1); + assertCall( + { + data: expected.data, + isLoadingNext: direction === 'forward', + isLoadingPrevious: direction === 'backward', + hasNext: expected.hasNext, + hasPrevious: expected.hasPrevious, + }, + 0, + ); + renderSpy.mockClear(); + + // Assert refetch query was fetched + expectRequestIsInFlight({...expected, inFlight: true, requestCount: 1}); + } + + // TODO + // - backward pagination + // - simultaneous pagination + // - TODO(T41131846): Fetch/Caching policies for loadMore / when network + // returns or errors synchronously + // - TODO(T41140071): Handle loadMore while refetch is in flight and vice-versa + + describe('loadNext', () => { + const direction = 'forward'; + + it('does not load more if component has unmounted', () => { + const warning = require('warning'); + // $FlowFixMe[prop-missing] + warning.mockClear(); + + const renderer = renderFragment(); expectFragmentResults([ { data: initialUser, @@ -765,299 +904,266 @@ describe.each([ }, ]); - // Update parent record - environment.commitPayload(query, { - node: { - __typename: 'User', - id: '1', - // Update name - name: 'Alice in Wonderland', - }, + TestRenderer.act(() => { + renderer.unmount(); + }); + TestRenderer.act(() => { + loadNext(1); }); + + expect(warning).toHaveBeenCalledTimes(2); + expect( + (warning: $FlowFixMe).mock.calls[1][1].includes( + 'Relay: Unexpected fetch on unmounted component', + ), + ).toEqual(true); + expect(fetch).toHaveBeenCalledTimes(0); + }); + + it('does not load more if fragment ref passed to usePaginationFragment() was null', () => { + const warning = require('warning'); + // $FlowFixMe[prop-missing] + warning.mockClear(); + + renderFragment({userRef: null}); expectFragmentResults([ { - data: { - ...initialUser, - // Assert that name is updated - name: 'Alice in Wonderland', - }, + data: null, isLoadingNext: false, isLoadingPrevious: false, - hasNext: true, + hasNext: false, hasPrevious: false, }, ]); - // Update edge - environment.commitPayload(query, { - node: { - __typename: 'User', - id: 'node:1', - // Update name - name: 'name:node:1-updated', - }, + TestRenderer.act(() => { + loadNext(1); }); + + expect(warning).toHaveBeenCalledTimes(2); + expect( + (warning: $FlowFixMe).mock.calls[1][1].includes( + 'Relay: Unexpected fetch while using a null fragment ref', + ), + ).toEqual(true); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(fetch).toHaveBeenCalledTimes(0); + }); + + it('does not load more if request is already in flight', () => { + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment(); expectFragmentResults([ { - data: { - ...initialUser, - name: 'Alice in Wonderland', - friends: { - ...initialUser.friends, - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - // Assert that name is updated - name: 'name:node:1-updated', - ...createFragmentRef('node:1', query), - }, - }, - ], - }, - }, + data: initialUser, isLoadingNext: false, isLoadingPrevious: false, hasNext: true, hasPrevious: false, }, ]); - }); - it('should throw a promise if data is missing for fragment and request is in flight', () => { - // This prevents console.error output in the test, which is expected - jest.spyOn(console, 'error').mockImplementationOnce(() => {}); + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); + }); + expect(callback).toBeCalledTimes(0); - const missingDataVariables = {...variables, id: '4'}; - const missingDataQuery = createOperationDescriptor( - gqlQuery, - missingDataVariables, - ); + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); - // Commit a payload with name and profile_picture are missing - environment.commitPayload(missingDataQuery, { - node: { - __typename: 'User', - id: '4', - }, + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); }); + expect(fetch).toBeCalledTimes(1); + expect(callback).toBeCalledTimes(1); + expect(renderSpy).toBeCalledTimes(0); + }); - // Make sure query is in flight - fetchQuery(environment, missingDataQuery).subscribe({}); + it('does not load more if parent query is already active (i.e. during streaming)', () => { + // This prevents console.error output in the test, which is expected + jest.spyOn(console, 'error').mockImplementationOnce(() => {}); + const { + __internal: {fetchQuery}, + } = require('relay-runtime'); - const renderer = renderFragment({owner: missingDataQuery}); - expect(renderer.toJSON()).toEqual('Fallback'); - }); - }); + fetchQuery(environment, query).subscribe({}); - describe('pagination', () => { - let release; + const callback = jest.fn<[Error | null], void>(); + fetch.mockClear(); + renderFragment(); - beforeEach(() => { - release = jest.fn<$ReadOnlyArray, mixed>(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - environment.retain.mockImplementation((...args) => { - return { - dispose: release, - }; + expectFragmentResults([ + { + data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); }); + expect(fetch).toBeCalledTimes(0); + expect(callback).toBeCalledTimes(1); + expect(renderSpy).toBeCalledTimes(0); }); - function expectRequestIsInFlight(expected: any) { - expect(fetch).toBeCalledTimes(expected.requestCount); - const fetchCall = fetch.mock.calls.find(call => { - return ( - call[0] === - (expected.gqlPaginationQuery ?? gqlPaginationQuery).params && - areEqual(call[1], expected.paginationVariables) && - areEqual(call[2], {force: true}) - ); + it('attempts to load more even if there are no more items to load', () => { + (environment.getStore().getSource(): $FlowFixMe).clear(); + environment.commitPayload(query, { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + username: 'username:node:1', + }, + }, + ], + pageInfo: { + endCursor: 'cursor:1', + hasNextPage: false, + hasPreviousPage: false, + startCursor: 'cursor:1', + }, + }, + }, }); - const isInFlight = fetchCall != null; - expect(isInFlight).toEqual(expected.inFlight); - } + const callback = jest.fn<[Error | null], void>(); - function expectFragmentIsLoadingMore( - renderer: any, - direction: Direction, - expected: { - data: mixed, - hasNext: boolean, - hasPrevious: boolean, - paginationVariables: Variables, - gqlPaginationQuery?: $FlowFixMe, - }, - ) { - // Assert fragment sets isLoading to true - expect(renderSpy).toBeCalledTimes(1); - assertCall( + const renderer = renderFragment(); + const expectedUser = { + ...initialUser, + friends: { + ...initialUser.friends, + pageInfo: expect.objectContaining({hasNextPage: false}), + }, + }; + expectFragmentResults([ { - data: expected.data, - isLoadingNext: direction === 'forward', - isLoadingPrevious: direction === 'backward', - hasNext: expected.hasNext, - hasPrevious: expected.hasPrevious, + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: false, + hasPrevious: false, }, - 0, - ); - renderSpy.mockClear(); - - // Assert refetch query was fetched - expectRequestIsInFlight({...expected, inFlight: true, requestCount: 1}); - } - - // TODO - // - backward pagination - // - simultaneous pagination - // - TODO(T41131846): Fetch/Caching policies for loadMore / when network - // returns or errors synchronously - // - TODO(T41140071): Handle loadMore while refetch is in flight and vice-versa - - describe('loadNext', () => { - const direction = 'forward'; + ]); - it('does not load more if component has unmounted', () => { - const warning = require('warning'); - // $FlowFixMe[prop-missing] - warning.mockClear(); + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); + }); - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: expectedUser, + hasNext: false, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); + expect(callback).toBeCalledTimes(0); - TestRenderer.act(() => { - renderer.unmount(); - }); - TestRenderer.act(() => { - loadNext(1); - }); - - expect(warning).toHaveBeenCalledTimes(2); - expect( - (warning: $FlowFixMe).mock.calls[1][1].includes( - 'Relay: Unexpected fetch on unmounted component', - ), - ).toEqual(true); - expect(fetch).toHaveBeenCalledTimes(0); - }); - - it('does not load more if fragment ref passed to usePaginationFragment() was null', () => { - const warning = require('warning'); - // $FlowFixMe[prop-missing] - warning.mockClear(); - - renderFragment({userRef: null}); - expectFragmentResults([ - { - data: null, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: false, - hasPrevious: false, + resolveQuery({ + data: { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + // $FlowFixMe[missing-empty-array-annot] + edges: [], + pageInfo: { + startCursor: null, + endCursor: null, + hasNextPage: null, + hasPreviousPage: null, + }, + }, }, - ]); - - TestRenderer.act(() => { - loadNext(1); - }); - - expect(warning).toHaveBeenCalledTimes(2); - expect( - (warning: $FlowFixMe).mock.calls[1][1].includes( - 'Relay: Unexpected fetch while using a null fragment ref', - ), - ).toEqual(true); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(fetch).toHaveBeenCalledTimes(0); + }, }); + expectFragmentResults([ + { + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: false, + hasPrevious: false, + }, + ]); + expect(callback).toBeCalledTimes(1); + }); - it('does not load more if request is already in flight', () => { - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - expect(callback).toBeCalledTimes(0); - - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { + it('loads and renders next items in connection', () => { + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment(); + expectFragmentResults([ + { data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, hasNext: true, hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); + }, + ]); - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - expect(fetch).toBeCalledTimes(1); - expect(callback).toBeCalledTimes(1); - expect(renderSpy).toBeCalledTimes(0); + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); }); - - it('does not load more if parent query is already active (i.e. during streaming)', () => { - // This prevents console.error output in the test, which is expected - jest.spyOn(console, 'error').mockImplementationOnce(() => {}); - const { - __internal: {fetchQuery}, - } = require('relay-runtime'); - - fetchQuery(environment, query).subscribe({}); - - const callback = jest.fn<[Error | null], void>(); - fetch.mockClear(); - renderFragment(); - - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - expect(fetch).toBeCalledTimes(0); - expect(callback).toBeCalledTimes(1); - expect(renderSpy).toBeCalledTimes(0); + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, }); + expect(callback).toBeCalledTimes(0); - it('attempts to load more even if there are no more items to load', () => { - (environment.getStore().getSource(): $FlowFixMe).clear(); - environment.commitPayload(query, { + resolveQuery({ + data: { node: { __typename: 'User', id: '1', @@ -1065,722 +1171,719 @@ describe.each([ friends: { edges: [ { - cursor: 'cursor:1', + cursor: 'cursor:2', node: { __typename: 'User', - id: 'node:1', - name: 'name:node:1', - username: 'username:node:1', + id: 'node:2', + name: 'name:node:2', + username: 'username:node:2', }, }, ], pageInfo: { - endCursor: 'cursor:1', - hasNextPage: false, - hasPreviousPage: false, - startCursor: 'cursor:1', + startCursor: 'cursor:2', + endCursor: 'cursor:2', + hasNextPage: true, + hasPreviousPage: true, }, }, }, - }); - const callback = jest.fn<[Error | null], void>(); + }, + }); - const renderer = renderFragment(); - const expectedUser = { - ...initialUser, - friends: { - ...initialUser.friends, - pageInfo: expect.objectContaining({hasNextPage: false}), - }, - }; - expectFragmentResults([ - { - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: false, - hasPrevious: false, + const expectedUser = { + ...initialUser, + friends: { + ...initialUser.friends, + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', query), + }, + }, + { + cursor: 'cursor:2', + node: { + __typename: 'User', + id: 'node:2', + name: 'name:node:2', + ...createFragmentRef('node:2', query), + }, + }, + ], + pageInfo: { + endCursor: 'cursor:2', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', }, - ]); - - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { + }, + }; + expectFragmentResults([ + { + // First update has updated connection data: expectedUser, - hasNext: false, + isLoadingNext: true, + isLoadingPrevious: false, + hasNext: true, hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); + }, + { + // Second update sets isLoading flag back to false + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + expect(callback).toBeCalledTimes(1); + }); - resolveQuery({ - data: { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - // $FlowFixMe[missing-empty-array-annot] - edges: [], - pageInfo: { - startCursor: null, - endCursor: null, - hasNextPage: null, - hasPreviousPage: null, - }, + it('loads more correctly using fragment variables from literal @argument values', () => { + let expectedUser = { + ...initialUser, + friends: { + ...initialUser.friends, + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', queryWithLiteralArgs), }, }, - }, - }); - expectFragmentResults([ - { - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: false, - hasPrevious: false, - }, - ]); - expect(callback).toBeCalledTimes(1); + ], + }, + }; + + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment({owner: queryWithLiteralArgs}); + expectFragmentResults([ + { + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); + }); + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: true, + orderby: ['name'], + scale: null, + }; + expect(paginationVariables.isViewerFriendLocal).not.toBe( + variables.isViewerFriend, + ); + expectFragmentIsLoadingMore(renderer, direction, { + data: expectedUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, }); + expect(callback).toBeCalledTimes(0); - it('loads and renders next items in connection', () => { - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { - data: initialUser, - hasNext: true, - hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); - - resolveQuery({ - data: { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:2', - node: { - __typename: 'User', - id: 'node:2', - name: 'name:node:2', - username: 'username:node:2', - }, + resolveQuery({ + data: { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:2', + node: { + __typename: 'User', + id: 'node:2', + name: 'name:node:2', + username: 'username:node:2', }, - ], - pageInfo: { - startCursor: 'cursor:2', - endCursor: 'cursor:2', - hasNextPage: true, - hasPreviousPage: true, }, + ], + pageInfo: { + startCursor: 'cursor:2', + endCursor: 'cursor:2', + hasNextPage: true, + hasPreviousPage: true, }, }, }, - }); + }, + }); - const expectedUser = { - ...initialUser, - friends: { - ...initialUser.friends, - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - ...createFragmentRef('node:1', query), - }, - }, - { - cursor: 'cursor:2', - node: { - __typename: 'User', - id: 'node:2', - name: 'name:node:2', - ...createFragmentRef('node:2', query), - }, + expectedUser = { + ...expectedUser, + friends: { + ...expectedUser.friends, + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', queryWithLiteralArgs), }, - ], - pageInfo: { - endCursor: 'cursor:2', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', }, - }, - }; - expectFragmentResults([ - { - // First update has updated connection - data: expectedUser, - isLoadingNext: true, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - { - // Second update sets isLoading flag back to false - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - expect(callback).toBeCalledTimes(1); - }); - - it('loads more correctly using fragment variables from literal @argument values', () => { - let expectedUser = { - ...initialUser, - friends: { - ...initialUser.friends, - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - ...createFragmentRef('node:1', queryWithLiteralArgs), - }, + { + cursor: 'cursor:2', + node: { + __typename: 'User', + id: 'node:2', + name: 'name:node:2', + ...createFragmentRef('node:2', queryWithLiteralArgs), }, - ], - }, - }; - - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment({owner: queryWithLiteralArgs}); - expectFragmentResults([ - { - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, + }, + ], + pageInfo: { + endCursor: 'cursor:2', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', }, - ]); - - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: true, - orderby: ['name'], - scale: null, - }; - expect(paginationVariables.isViewerFriendLocal).not.toBe( - variables.isViewerFriend, - ); - expectFragmentIsLoadingMore(renderer, direction, { + }, + }; + expectFragmentResults([ + { + // First update has updated connection data: expectedUser, + isLoadingNext: true, + isLoadingPrevious: false, hasNext: true, hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); - - resolveQuery({ - data: { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:2', - node: { - __typename: 'User', - id: 'node:2', - name: 'name:node:2', - username: 'username:node:2', - }, - }, - ], - pageInfo: { - startCursor: 'cursor:2', - endCursor: 'cursor:2', - hasNextPage: true, - hasPreviousPage: true, - }, - }, - }, - }, - }); - - expectedUser = { - ...expectedUser, - friends: { - ...expectedUser.friends, - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - ...createFragmentRef('node:1', queryWithLiteralArgs), - }, - }, - { - cursor: 'cursor:2', - node: { - __typename: 'User', - id: 'node:2', - name: 'name:node:2', - ...createFragmentRef('node:2', queryWithLiteralArgs), - }, - }, - ], - pageInfo: { - endCursor: 'cursor:2', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', - }, - }, - }; - expectFragmentResults([ - { - // First update has updated connection - data: expectedUser, - isLoadingNext: true, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - { - // Second update sets isLoading flag back to false - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - expect(callback).toBeCalledTimes(1); - }); - - it('loads more correctly when original variables do not include an id', () => { - const callback = jest.fn<[Error | null], void>(); - const viewer = environment.lookup(queryWithoutID.fragment).data?.viewer; - const userRef = - typeof viewer === 'object' && viewer != null ? viewer?.actor : null; - invariant(userRef != null, 'Expected to have cached test data'); + }, + { + // Second update sets isLoading flag back to false + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + expect(callback).toBeCalledTimes(1); + }); - let expectedUser = { - ...initialUser, - friends: { - ...initialUser.friends, - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - ...createFragmentRef('node:1', queryWithoutID), - }, - }, - ], - }, - }; + it('loads more correctly when original variables do not include an id', () => { + const callback = jest.fn<[Error | null], void>(); + const viewer = environment.lookup(queryWithoutID.fragment).data?.viewer; + const userRef = + typeof viewer === 'object' && viewer != null ? viewer?.actor : null; + invariant(userRef != null, 'Expected to have cached test data'); - const renderer = renderFragment({owner: queryWithoutID, userRef}); - expectFragmentResults([ - { - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); + let expectedUser = { + ...initialUser, + friends: { + ...initialUser.friends, + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', queryWithoutID), + }, + }, + ], + }, + }; - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { + const renderer = renderFragment({owner: queryWithoutID, userRef}); + expectFragmentResults([ + { data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, hasNext: true, hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); + }, + ]); - resolveQuery({ - data: { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:2', - node: { - __typename: 'User', - id: 'node:2', - name: 'name:node:2', - username: 'username:node:2', - }, + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); + }); + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: expectedUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); + expect(callback).toBeCalledTimes(0); + + resolveQuery({ + data: { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:2', + node: { + __typename: 'User', + id: 'node:2', + name: 'name:node:2', + username: 'username:node:2', }, - ], - pageInfo: { - startCursor: 'cursor:2', - endCursor: 'cursor:2', - hasNextPage: true, - hasPreviousPage: true, }, + ], + pageInfo: { + startCursor: 'cursor:2', + endCursor: 'cursor:2', + hasNextPage: true, + hasPreviousPage: true, }, }, }, - }); + }, + }); - expectedUser = { - ...initialUser, - friends: { - ...initialUser.friends, - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - ...createFragmentRef('node:1', queryWithoutID), - }, + expectedUser = { + ...initialUser, + friends: { + ...initialUser.friends, + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', queryWithoutID), }, - { - cursor: 'cursor:2', - node: { - __typename: 'User', - id: 'node:2', - name: 'name:node:2', - ...createFragmentRef('node:2', queryWithoutID), - }, + }, + { + cursor: 'cursor:2', + node: { + __typename: 'User', + id: 'node:2', + name: 'name:node:2', + ...createFragmentRef('node:2', queryWithoutID), }, - ], - pageInfo: { - endCursor: 'cursor:2', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', }, + ], + pageInfo: { + endCursor: 'cursor:2', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', }, - }; - expectFragmentResults([ - { - // First update has updated connection - data: expectedUser, - isLoadingNext: true, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - { - // Second update sets isLoading flag back to false - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - expect(callback).toBeCalledTimes(1); - }); + }, + }; + expectFragmentResults([ + { + // First update has updated connection + data: expectedUser, + isLoadingNext: true, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + { + // Second update sets isLoading flag back to false + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + expect(callback).toBeCalledTimes(1); + }); - it('loads more with correct id from refetchable fragment when using a nested fragment', () => { - const callback = jest.fn<[Error | null], void>(); + it('loads more with correct id from refetchable fragment when using a nested fragment', () => { + const callback = jest.fn<[Error | null], void>(); - // Populate store with data for query using nested fragment - environment.commitPayload(queryNestedFragment, { - node: { - __typename: 'Feedback', - id: '', - actor: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - username: 'username:node:1', - }, + // Populate store with data for query using nested fragment + environment.commitPayload(queryNestedFragment, { + node: { + __typename: 'Feedback', + id: '', + actor: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + username: 'username:node:1', }, - ], - pageInfo: { - endCursor: 'cursor:1', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', }, + ], + pageInfo: { + endCursor: 'cursor:1', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', }, }, }, - }); + }, + }); - // Get fragment ref for user using nested fragment - const userRef = (environment.lookup(queryNestedFragment.fragment) - .data: $FlowFixMe)?.node?.actor; + // Get fragment ref for user using nested fragment + const userRef = (environment.lookup(queryNestedFragment.fragment) + .data: $FlowFixMe)?.node?.actor; - initialUser = { - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - ...createFragmentRef('node:1', queryNestedFragment), - }, + initialUser = { + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', queryNestedFragment), }, - ], - pageInfo: { - endCursor: 'cursor:1', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', }, - }, - }; - - const renderer = renderFragment({ - owner: queryNestedFragment, - userRef, - }); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - const paginationVariables = { - // The id here should correspond to the user id, and not the - // feedback id from the query variables (i.e. ``) - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { + ], + pageInfo: { + endCursor: 'cursor:1', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', + }, + }, + }; + + const renderer = renderFragment({ + owner: queryNestedFragment, + userRef, + }); + expectFragmentResults([ + { data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, hasNext: true, hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); + }, + ]); - resolveQuery({ - data: { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:2', - node: { - __typename: 'User', - id: 'node:2', - name: 'name:node:2', - username: 'username:node:2', - }, + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); + }); + const paginationVariables = { + // The id here should correspond to the user id, and not the + // feedback id from the query variables (i.e. ``) + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); + expect(callback).toBeCalledTimes(0); + + resolveQuery({ + data: { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:2', + node: { + __typename: 'User', + id: 'node:2', + name: 'name:node:2', + username: 'username:node:2', }, - ], - pageInfo: { - startCursor: 'cursor:2', - endCursor: 'cursor:2', - hasNextPage: true, - hasPreviousPage: true, }, + ], + pageInfo: { + startCursor: 'cursor:2', + endCursor: 'cursor:2', + hasNextPage: true, + hasPreviousPage: true, }, }, }, - }); + }, + }); - const expectedUser = { - ...initialUser, - friends: { - ...initialUser.friends, - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - ...createFragmentRef('node:1', queryNestedFragment), - }, + const expectedUser = { + ...initialUser, + friends: { + ...initialUser.friends, + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', queryNestedFragment), }, - { - cursor: 'cursor:2', - node: { - __typename: 'User', - id: 'node:2', - name: 'name:node:2', - ...createFragmentRef('node:2', queryNestedFragment), - }, + }, + { + cursor: 'cursor:2', + node: { + __typename: 'User', + id: 'node:2', + name: 'name:node:2', + ...createFragmentRef('node:2', queryNestedFragment), }, - ], - pageInfo: { - endCursor: 'cursor:2', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', }, + ], + pageInfo: { + endCursor: 'cursor:2', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', }, - }; - expectFragmentResults([ - { - // First update has updated connection - data: expectedUser, - isLoadingNext: true, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - { - // Second update sets isLoading flag back to false - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - expect(callback).toBeCalledTimes(1); + }, + }; + expectFragmentResults([ + { + // First update has updated connection + data: expectedUser, + isLoadingNext: true, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + { + // Second update sets isLoading flag back to false + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + expect(callback).toBeCalledTimes(1); + }); + + it('calls callback with error when error occurs during fetch', () => { + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment(); + expectFragmentResults([ + { + data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); + }); + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, }); + expect(callback).toBeCalledTimes(0); - it('calls callback with error when error occurs during fetch', () => { - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); + const error = new Error('Oops'); + dataSource.error(error); - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { + // We pass the error in the callback, but do not throw during render + // since we want to continue rendering the existing items in the + // connection + expect(callback).toBeCalledTimes(1); + expect(callback).toBeCalledWith(error); + }); + + it('preserves pagination request if re-rendered with same fragment ref', () => { + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment(); + expectFragmentResults([ + { data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, hasNext: true, hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); + }, + ]); - const error = new Error('Oops'); - dataSource.error(error); + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); + }); + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); + expect(callback).toBeCalledTimes(0); - // We pass the error in the callback, but do not throw during render - // since we want to continue rendering the existing items in the - // connection - expect(callback).toBeCalledTimes(1); - expect(callback).toBeCalledWith(error); + TestRenderer.act(() => { + setOwner({...query}); + }); + + // Assert that request is still in flight after re-rendering + // with new fragment ref that points to the same data. + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); + expect(callback).toBeCalledTimes(0); + + resolveQuery({ + data: { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:2', + node: { + __typename: 'User', + id: 'node:2', + name: 'name:node:2', + username: 'username:node:2', + }, + }, + ], + pageInfo: { + startCursor: 'cursor:2', + endCursor: 'cursor:2', + hasNextPage: true, + hasPreviousPage: true, + }, + }, + }, + }, }); - it('preserves pagination request if re-rendered with same fragment ref', () => { + const expectedUser = { + ...initialUser, + friends: { + ...initialUser.friends, + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', query), + }, + }, + { + cursor: 'cursor:2', + node: { + __typename: 'User', + id: 'node:2', + name: 'name:node:2', + ...createFragmentRef('node:2', query), + }, + }, + ], + pageInfo: { + endCursor: 'cursor:2', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', + }, + }, + }; + expectFragmentResults([ + { + // First update has updated connection + data: expectedUser, + isLoadingNext: true, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + { + // Second update sets isLoading flag back to false + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + expect(callback).toBeCalledTimes(1); + }); + + describe('extra variables', () => { + it('loads and renders the next items in the connection when passing extra variables', () => { const callback = jest.fn<[Error | null], void>(); const renderer = renderFragment(); expectFragmentResults([ @@ -1794,7 +1897,11 @@ describe.each([ ]); TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); + loadNext(1, { + onComplete: callback, + // Pass extra variables that are different from original request + UNSTABLE_extraVariables: {scale: 2.0}, + }); }); const paginationVariables = { id: '1', @@ -1804,7 +1911,8 @@ describe.each([ last: null, isViewerFriendLocal: false, orderby: ['name'], - scale: null, + // Assert that value from extra variables is used + scale: 2.0, }; expectFragmentIsLoadingMore(renderer, direction, { data: initialUser, @@ -1815,21 +1923,6 @@ describe.each([ }); expect(callback).toBeCalledTimes(0); - TestRenderer.act(() => { - setOwner({...query}); - }); - - // Assert that request is still in flight after re-rendering - // with new fragment ref that points to the same data. - expectFragmentIsLoadingMore(renderer, direction, { - data: initialUser, - hasNext: true, - hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); - resolveQuery({ data: { node: { @@ -1912,489 +2005,48 @@ describe.each([ expect(callback).toBeCalledTimes(1); }); - describe('extra variables', () => { - it('loads and renders the next items in the connection when passing extra variables', () => { - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - TestRenderer.act(() => { - loadNext(1, { - onComplete: callback, - // Pass extra variables that are different from original request - UNSTABLE_extraVariables: {scale: 2.0}, - }); - }); - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - // Assert that value from extra variables is used - scale: 2.0, - }; - expectFragmentIsLoadingMore(renderer, direction, { - data: initialUser, - hasNext: true, - hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); - - resolveQuery({ - data: { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:2', - node: { - __typename: 'User', - id: 'node:2', - name: 'name:node:2', - username: 'username:node:2', - }, - }, - ], - pageInfo: { - startCursor: 'cursor:2', - endCursor: 'cursor:2', - hasNextPage: true, - hasPreviousPage: true, - }, - }, - }, - }, - }); - - const expectedUser = { - ...initialUser, - friends: { - ...initialUser.friends, - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - ...createFragmentRef('node:1', query), - }, - }, - { - cursor: 'cursor:2', - node: { - __typename: 'User', - id: 'node:2', - name: 'name:node:2', - ...createFragmentRef('node:2', query), - }, - }, - ], - pageInfo: { - endCursor: 'cursor:2', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', - }, - }, - }; - expectFragmentResults([ - { - // First update has updated connection - data: expectedUser, - isLoadingNext: true, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - { - // Second update sets isLoading flag back to false - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - expect(callback).toBeCalledTimes(1); - }); - - it('loads the next items in the connection and ignores any pagination vars passed as extra vars', () => { - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - TestRenderer.act(() => { - loadNext(1, { - onComplete: callback, - // Pass pagination vars as extra variables - UNSTABLE_extraVariables: {first: 100, after: 'foo'}, - }); - }); - const paginationVariables = { - id: '1', - // Assert that pagination vars from extra variables are ignored - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { - data: initialUser, - hasNext: true, - hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); - - resolveQuery({ - data: { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:2', - node: { - __typename: 'User', - id: 'node:2', - name: 'name:node:2', - username: 'username:node:2', - }, - }, - ], - pageInfo: { - startCursor: 'cursor:2', - endCursor: 'cursor:2', - hasNextPage: true, - hasPreviousPage: true, - }, - }, - }, - }, - }); - - const expectedUser = { - ...initialUser, - friends: { - ...initialUser.friends, - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - ...createFragmentRef('node:1', query), - }, - }, - { - cursor: 'cursor:2', - node: { - __typename: 'User', - id: 'node:2', - name: 'name:node:2', - ...createFragmentRef('node:2', query), - }, - }, - ], - pageInfo: { - endCursor: 'cursor:2', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', - }, - }, - }; - expectFragmentResults([ - { - // First update has updated connection - data: expectedUser, - isLoadingNext: true, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - { - // Second update sets isLoading flag back to false - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - expect(callback).toBeCalledTimes(1); - }); - }); - - describe('disposing', () => { - it('cancels load more if component unmounts', () => { - unsubscribe.mockClear(); - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { - data: initialUser, - hasNext: true, - hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(unsubscribe).toHaveBeenCalledTimes(0); - - TestRenderer.act(() => { - renderer.unmount(); - jest.runAllTimers(); - }); - expect(unsubscribe).toHaveBeenCalledTimes(1); - expect(fetch).toBeCalledTimes(1); - expect(callback).toBeCalledTimes(0); - expect(renderSpy).toBeCalledTimes(0); - }); - - it('cancels load more if refetch is called', () => { - unsubscribe.mockClear(); - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { + it('loads the next items in the connection and ignores any pagination vars passed as extra vars', () => { + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment(); + expectFragmentResults([ + { data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, hasNext: true, hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(unsubscribe).toHaveBeenCalledTimes(0); - const loadNextUnsubscribe = unsubscribe; + }, + ]); - TestRenderer.act(() => { - refetch({id: '4'}); + TestRenderer.act(() => { + loadNext(1, { + onComplete: callback, + // Pass pagination vars as extra variables + UNSTABLE_extraVariables: {first: 100, after: 'foo'}, }); - expect(fetch).toBeCalledTimes(2); // loadNext and refetch - expect(loadNextUnsubscribe).toHaveBeenCalledTimes(1); // loadNext is cancelled - expect(unsubscribe).toHaveBeenCalledTimes(0); // refetch is not cancelled - expect(callback).toBeCalledTimes(0); - expect(renderSpy).toBeCalledTimes(0); }); - - it('disposes ongoing request if environment changes', () => { - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - - // Assert request is started - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { - data: initialUser, - hasNext: true, - hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - const loadNextUnsubscribe = unsubscribe; - expect(callback).toBeCalledTimes(0); - - // Set new environment - const [newEnvironment, newFetch] = createMockEnvironment(); - fetch.mockClear(); - fetch = newFetch; - newEnvironment.commitPayload(query, { - node: { - __typename: 'User', - id: '1', - name: 'Alice in a different environment', - friends: { - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - username: 'username:node:1', - }, - }, - ], - pageInfo: { - endCursor: 'cursor:1', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', - }, - }, - }, - }); - TestRenderer.act(() => { - setEnvironment(newEnvironment); - }); - - // Assert request was canceled - expect(loadNextUnsubscribe).toBeCalledTimes(1); - // changing environments resets, we don't try to auto-paginate just bc a request was pending - expect(fetch).toBeCalledTimes(0); - - // Assert newly rendered data - expectFragmentResults([ - { - data: { - ...initialUser, - name: 'Alice in a different environment', - }, - isLoadingNext: true, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - { - data: { - ...initialUser, - name: 'Alice in a different environment', - }, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); + const paginationVariables = { + id: '1', + // Assert that pagination vars from extra variables are ignored + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, }); + expect(callback).toBeCalledTimes(0); - it('disposes ongoing request if fragment ref changes', () => { - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - - // Assert request is started - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { - data: initialUser, - hasNext: true, - hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); - - // Pass new parent fragment ref with different variables - const newVariables = {...variables, isViewerFriend: true}; - const newQuery = createOperationDescriptor(gqlQuery, newVariables); - environment.commitPayload(newQuery, { + resolveQuery({ + data: { node: { __typename: 'User', id: '1', @@ -2402,362 +2054,221 @@ describe.each([ friends: { edges: [ { - cursor: 'cursor:1', + cursor: 'cursor:2', node: { __typename: 'User', - id: 'node:1', - name: 'name:node:1', - username: 'username:node:1', + id: 'node:2', + name: 'name:node:2', + username: 'username:node:2', }, }, ], pageInfo: { - endCursor: 'cursor:1', + startCursor: 'cursor:2', + endCursor: 'cursor:2', hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', + hasPreviousPage: true, }, }, }, - }); - fetch.mockClear(); - TestRenderer.act(() => { - setOwner(newQuery); - }); - - // Assert request was canceled - expect(unsubscribe).toBeCalledTimes(1); - // changing fragment ref resets, we don't try to auto-paginate just bc a request was pending - expect(fetch).toBeCalledTimes(0); - - // Assert newly rendered data - const expectedUser = { - ...initialUser, - friends: { - ...initialUser.friends, - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - // Assert fragment ref points to owner with new variables - ...createFragmentRef('node:1', newQuery), - }, - }, - ], - }, - }; - expectFragmentResults([ - { - data: expectedUser, - isLoadingNext: true, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - { - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); + }, }); - it('disposes ongoing request on unmount', () => { - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, + const expectedUser = { + ...initialUser, + friends: { + ...initialUser.friends, + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', query), + }, + }, + { + cursor: 'cursor:2', + node: { + __typename: 'User', + id: 'node:2', + name: 'name:node:2', + ...createFragmentRef('node:2', query), + }, + }, + ], + pageInfo: { + endCursor: 'cursor:2', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', }, - ]); - - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); + }, + }; + expectFragmentResults([ + { + // First update has updated connection + data: expectedUser, + isLoadingNext: true, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + { + // Second update sets isLoading flag back to false + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + expect(callback).toBeCalledTimes(1); + }); + }); - // Assert request is started - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { + describe('disposing', () => { + it('cancels load more if component unmounts', () => { + unsubscribe.mockClear(); + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment(); + expectFragmentResults([ + { data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, hasNext: true, hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); - - TestRenderer.act(() => { - renderer.unmount(); - }); + }, + ]); - // Assert request was canceled - expect(unsubscribe).toBeCalledTimes(1); - expect(fetch).toBeCalledTimes(1); // the loadNext call + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); }); + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); + expect(unsubscribe).toHaveBeenCalledTimes(0); - it('disposes ongoing request if it is manually disposed', () => { - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - let disposable; - TestRenderer.act(() => { - disposable = loadNext(1, {onComplete: callback}); - }); + TestRenderer.act(() => { + renderer.unmount(); + jest.runAllTimers(); + }); + expect(unsubscribe).toHaveBeenCalledTimes(1); + expect(fetch).toBeCalledTimes(1); + expect(callback).toBeCalledTimes(0); + expect(renderSpy).toBeCalledTimes(0); + }); - // Assert request is started - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { + it('cancels load more if refetch is called', () => { + unsubscribe.mockClear(); + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment(); + expectFragmentResults([ + { data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, hasNext: true, hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); - - expect(disposable).toBeTruthy(); - disposable?.dispose(); + }, + ]); - // Assert request was canceled - expect(unsubscribe).toBeCalledTimes(1); - expect(fetch).toBeCalledTimes(1); // the loadNext call - expect(renderSpy).toHaveBeenCalledTimes(0); + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); }); - }); - - describe('when parent query is streaming', () => { - beforeEach(() => { - [environment, fetch] = createMockEnvironment(); - environment.commitPayload(query, { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - }, - }); + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, }); + expect(unsubscribe).toHaveBeenCalledTimes(0); + const loadNextUnsubscribe = unsubscribe; - it('does not start pagination request even if query is no longer active but loadNext is bound to snapshot of data while query was active', () => { - const { - __internal: {fetchQuery}, - } = require('relay-runtime'); - - // Start parent query and assert it is active - fetchQuery(environment, queryWithStreaming).subscribe({}); - expect( - environment.isRequestActive(queryWithStreaming.request.identifier), - ).toEqual(true); - - // Render initial fragment - const instance = renderFragment({ - fragment: gqlFragmentWithStreaming, - owner: queryWithStreaming, - }); - expect(instance.toJSON()).toEqual(null); - renderSpy.mockClear(); - - // Resolve first payload - TestRenderer.act(() => { - dataSource.next({ - data: { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - username: 'username:node:1', - }, - }, - ], - }, - }, - }, - extensions: { - is_final: false, - }, - }); - }); - // Ensure request is still active - expect( - environment.isRequestActive(queryWithStreaming.request.identifier), - ).toEqual(true); - - // Assert fragment rendered with correct data - expectFragmentResults([ - { - data: { - ...initialUser, - friends: { - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - ...createFragmentRef('node:1', queryWithStreaming), - }, - }, - ], - // Assert pageInfo is currently null - pageInfo: { - endCursor: null, - hasNextPage: false, - hasPreviousPage: false, - startCursor: null, - }, - }, - }, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: false, - hasPrevious: false, - }, - ]); - - // Capture the value of loadNext at this moment, which will - // would use the page info from the current fragment snapshot. - // At the moment of this snapshot the parent request is still active, - // so calling `capturedLoadNext` should be a no-op, otherwise it - // would attempt a pagination with the incorrect cursor as null. - const capturedLoadNext = loadNext; - - // Resolve page info - TestRenderer.act(() => { - resolveQuery({ - data: { - pageInfo: { - endCursor: 'cursor:1', - hasNextPage: true, - }, - }, - label: - 'usePaginationFragmentTestUserFragmentWithStreaming$defer$UserFragment_friends$pageInfo', - path: ['node', 'friends'], - extensions: { - is_final: true, - }, - }); - }); - // Ensure request is no longer active since final payload has been - // received - expect( - environment.isRequestActive(queryWithStreaming.request.identifier), - ).toEqual(false); - - // Assert fragment rendered with correct data - expectFragmentResults([ - { - data: { - ...initialUser, - friends: { - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - ...createFragmentRef('node:1', queryWithStreaming), - }, - }, - ], - // Assert pageInfo is updated - pageInfo: { - endCursor: 'cursor:1', - hasNextPage: true, - hasPreviousPage: false, - startCursor: null, - }, - }, - }, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - fetch.mockClear(); - renderSpy.mockClear(); - // Call `capturedLoadNext`, which should be a no-op since it's - // bound to the snapshot of the fragment taken while the query is - // still active and pointing to incomplete page info. - TestRenderer.act(() => { - capturedLoadNext(1); - }); - - // Assert that calling `capturedLoadNext` is a no-op - expect(fetch).toBeCalledTimes(0); - expect(renderSpy).toBeCalledTimes(0); + TestRenderer.act(() => { + refetch({id: '4'}); + }); + expect(fetch).toBeCalledTimes(2); // loadNext and refetch + expect(loadNextUnsubscribe).toHaveBeenCalledTimes(1); // loadNext is cancelled + expect(unsubscribe).toHaveBeenCalledTimes(0); // refetch is not cancelled + expect(callback).toBeCalledTimes(0); + expect(renderSpy).toBeCalledTimes(0); + }); - // Calling `loadNext`, should be fine since it's bound to the - // latest fragment snapshot with the latest page info and when - // the request is no longer active - TestRenderer.act(() => { - loadNext(1); - }); + it('disposes ongoing request if environment changes', () => { + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment(); + expectFragmentResults([ + { + data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); - // Assert that calling `loadNext` starts the request - expect(fetch).toBeCalledTimes(1); - expect(renderSpy).toBeCalledTimes(1); + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); }); - }); - }); - describe('hasNext', () => { - const direction = 'forward'; + // Assert request is started + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); + const loadNextUnsubscribe = unsubscribe; + expect(callback).toBeCalledTimes(0); - it('returns true if it has more items', () => { - (environment.getStore().getSource(): $FlowFixMe).clear(); - environment.commitPayload(query, { + // Set new environment + const [newEnvironment, newFetch] = createMockEnvironment(); + fetch.mockClear(); + fetch = newFetch; + newEnvironment.commitPayload(query, { node: { __typename: 'User', id: '1', - name: 'Alice', + name: 'Alice in a different environment', friends: { edges: [ { @@ -2779,107 +2290,81 @@ describe.each([ }, }, }); + TestRenderer.act(() => { + setEnvironment(newEnvironment); + }); - renderFragment(); + // Assert request was canceled + expect(loadNextUnsubscribe).toBeCalledTimes(1); + // changing environments resets, we don't try to auto-paginate just bc a request was pending + expect(fetch).toBeCalledTimes(0); + + // Assert newly rendered data expectFragmentResults([ { data: { ...initialUser, - friends: { - ...initialUser.friends, - pageInfo: expect.objectContaining({hasNextPage: true}), - }, + name: 'Alice in a different environment', }, - isLoadingNext: false, + isLoadingNext: true, isLoadingPrevious: false, - // Assert hasNext is true hasNext: true, hasPrevious: false, }, - ]); - }); - - it('returns false if edges are null', () => { - (environment.getStore().getSource(): $FlowFixMe).clear(); - environment.commitPayload(query, { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: null, - pageInfo: { - endCursor: 'cursor:1', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', - }, - }, - }, - }); - - renderFragment(); - expectFragmentResults([ { data: { ...initialUser, - friends: { - ...initialUser.friends, - edges: null, - pageInfo: expect.objectContaining({hasNextPage: true}), - }, + name: 'Alice in a different environment', }, isLoadingNext: false, isLoadingPrevious: false, - // Assert hasNext is false - hasNext: false, + hasNext: true, hasPrevious: false, }, ]); }); - it('returns false if edges are undefined', () => { - (environment.getStore().getSource(): $FlowFixMe).clear(); - environment.commitPayload(query, { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: undefined, - pageInfo: { - endCursor: 'cursor:1', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', - }, - }, - }, - }); - - renderFragment(); + it('disposes ongoing request if fragment ref changes', () => { + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment(); expectFragmentResults([ { - data: { - ...initialUser, - friends: { - ...initialUser.friends, - edges: undefined, - pageInfo: expect.objectContaining({hasNextPage: true}), - }, - }, + data: initialUser, isLoadingNext: false, isLoadingPrevious: false, - // Assert hasNext is false - hasNext: false, + hasNext: true, hasPrevious: false, }, ]); - }); - it('returns false if end cursor is null', () => { - (environment.getStore().getSource(): $FlowFixMe).clear(); - environment.commitPayload(query, { + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); + }); + + // Assert request is started + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); + expect(callback).toBeCalledTimes(0); + + // Pass new parent fragment ref with different variables + const newVariables = {...variables, isViewerFriend: true}; + const newQuery = createOperationDescriptor(gqlQuery, newVariables); + environment.commitPayload(newQuery, { node: { __typename: 'User', id: '1', @@ -2897,261 +2382,479 @@ describe.each([ }, ], pageInfo: { - // endCursor is null - endCursor: null, - // but hasNextPage is still true + endCursor: 'cursor:1', hasNextPage: true, hasPreviousPage: false, - startCursor: null, + startCursor: 'cursor:1', }, }, }, }); + fetch.mockClear(); + TestRenderer.act(() => { + setOwner(newQuery); + }); - renderFragment(); + // Assert request was canceled + expect(unsubscribe).toBeCalledTimes(1); + // changing fragment ref resets, we don't try to auto-paginate just bc a request was pending + expect(fetch).toBeCalledTimes(0); + + // Assert newly rendered data + const expectedUser = { + ...initialUser, + friends: { + ...initialUser.friends, + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + // Assert fragment ref points to owner with new variables + ...createFragmentRef('node:1', newQuery), + }, + }, + ], + }, + }; expectFragmentResults([ { - data: { - ...initialUser, - friends: { - ...initialUser.friends, - pageInfo: expect.objectContaining({ - endCursor: null, - hasNextPage: true, - }), - }, - }, + data: expectedUser, + isLoadingNext: true, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + { + data: expectedUser, isLoadingNext: false, isLoadingPrevious: false, - // Assert hasNext is false - hasNext: false, + hasNext: true, + hasPrevious: false, + }, + ]); + }); + + it('disposes ongoing request on unmount', () => { + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment(); + expectFragmentResults([ + { + data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); + }); + + // Assert request is started + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); + expect(callback).toBeCalledTimes(0); + + TestRenderer.act(() => { + renderer.unmount(); + }); + + // Assert request was canceled + expect(unsubscribe).toBeCalledTimes(1); + expect(fetch).toBeCalledTimes(1); // the loadNext call + }); + + it('disposes ongoing request if it is manually disposed', () => { + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment(); + expectFragmentResults([ + { + data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, hasPrevious: false, }, ]); + + let disposable; + TestRenderer.act(() => { + disposable = loadNext(1, {onComplete: callback}); + }); + + // Assert request is started + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); + expect(callback).toBeCalledTimes(0); + + expect(disposable).toBeTruthy(); + disposable?.dispose(); + + // Assert request was canceled + expect(unsubscribe).toBeCalledTimes(1); + expect(fetch).toBeCalledTimes(1); // the loadNext call + expect(renderSpy).toHaveBeenCalledTimes(0); }); + }); - it('returns false if end cursor is undefined', () => { - (environment.getStore().getSource(): $FlowFixMe).clear(); + describe('when parent query is streaming', () => { + beforeEach(() => { + [environment, fetch] = createMockEnvironment(); environment.commitPayload(query, { node: { __typename: 'User', id: '1', name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - username: 'username:node:1', - }, - }, - ], - pageInfo: { - // endCursor is undefined - endCursor: undefined, - // but hasNextPage is still true - hasNextPage: true, - hasPreviousPage: false, - startCursor: undefined, + }, + }); + }); + + it('does not start pagination request even if query is no longer active but loadNext is bound to snapshot of data while query was active', () => { + const { + __internal: {fetchQuery}, + } = require('relay-runtime'); + + // Start parent query and assert it is active + fetchQuery(environment, queryWithStreaming).subscribe({}); + expect( + environment.isRequestActive(queryWithStreaming.request.identifier), + ).toEqual(true); + + // Render initial fragment + const instance = renderFragment({ + fragment: gqlFragmentWithStreaming, + owner: queryWithStreaming, + }); + expect(instance.toJSON()).toEqual(null); + renderSpy.mockClear(); + + // Resolve first payload + TestRenderer.act(() => { + dataSource.next({ + data: { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + username: 'username:node:1', + }, + }, + ], + }, }, }, - }, + extensions: { + is_final: false, + }, + }); }); + // Ensure request is still active + expect( + environment.isRequestActive(queryWithStreaming.request.identifier), + ).toEqual(true); - renderFragment(); + // Assert fragment rendered with correct data expectFragmentResults([ { data: { ...initialUser, friends: { - ...initialUser.friends, - pageInfo: expect.objectContaining({ + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', queryWithStreaming), + }, + }, + ], + // Assert pageInfo is currently null + pageInfo: { endCursor: null, - hasNextPage: true, - }), + hasNextPage: false, + hasPreviousPage: false, + startCursor: null, + }, }, }, isLoadingNext: false, isLoadingPrevious: false, - // Assert hasNext is false hasNext: false, hasPrevious: false, }, ]); - }); - it('returns false if pageInfo.hasNextPage is false-ish', () => { - (environment.getStore().getSource(): $FlowFixMe).clear(); - environment.commitPayload(query, { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - username: 'username:node:1', - }, - }, - ], + // Capture the value of loadNext at this moment, which will + // would use the page info from the current fragment snapshot. + // At the moment of this snapshot the parent request is still active, + // so calling `capturedLoadNext` should be a no-op, otherwise it + // would attempt a pagination with the incorrect cursor as null. + const capturedLoadNext = loadNext; + + // Resolve page info + TestRenderer.act(() => { + resolveQuery({ + data: { pageInfo: { endCursor: 'cursor:1', - hasNextPage: null, - hasPreviousPage: false, - startCursor: 'cursor:1', + hasNextPage: true, }, }, - }, + label: + 'usePaginationFragmentTestUserFragmentWithStreaming$defer$UserFragment_friends$pageInfo', + path: ['node', 'friends'], + extensions: { + is_final: true, + }, + }); }); + // Ensure request is no longer active since final payload has been + // received + expect( + environment.isRequestActive(queryWithStreaming.request.identifier), + ).toEqual(false); - renderFragment(); + // Assert fragment rendered with correct data expectFragmentResults([ { data: { ...initialUser, friends: { - ...initialUser.friends, - pageInfo: expect.objectContaining({ - hasNextPage: null, - }), + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', queryWithStreaming), + }, + }, + ], + // Assert pageInfo is updated + pageInfo: { + endCursor: 'cursor:1', + hasNextPage: true, + hasPreviousPage: false, + startCursor: null, + }, }, }, isLoadingNext: false, isLoadingPrevious: false, - // Assert hasNext is false - hasNext: false, + hasNext: true, hasPrevious: false, }, ]); + + fetch.mockClear(); + renderSpy.mockClear(); + // Call `capturedLoadNext`, which should be a no-op since it's + // bound to the snapshot of the fragment taken while the query is + // still active and pointing to incomplete page info. + TestRenderer.act(() => { + capturedLoadNext(1); + }); + + // Assert that calling `capturedLoadNext` is a no-op + expect(fetch).toBeCalledTimes(0); + expect(renderSpy).toBeCalledTimes(0); + + // Calling `loadNext`, should be fine since it's bound to the + // latest fragment snapshot with the latest page info and when + // the request is no longer active + TestRenderer.act(() => { + loadNext(1); + }); + + // Assert that calling `loadNext` starts the request + expect(fetch).toBeCalledTimes(1); + expect(renderSpy).toBeCalledTimes(1); }); + }); + }); - it('returns false if pageInfo.hasNextPage is false', () => { - (environment.getStore().getSource(): $FlowFixMe).clear(); - environment.commitPayload(query, { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - username: 'username:node:1', - }, + describe('hasNext', () => { + const direction = 'forward'; + + it('returns true if it has more items', () => { + (environment.getStore().getSource(): $FlowFixMe).clear(); + environment.commitPayload(query, { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + username: 'username:node:1', }, - ], - pageInfo: { - endCursor: 'cursor:1', - hasNextPage: false, - hasPreviousPage: false, - startCursor: 'cursor:1', }, + ], + pageInfo: { + endCursor: 'cursor:1', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', }, }, - }); + }, + }); - renderFragment(); - expectFragmentResults([ - { - data: { - ...initialUser, - friends: { - ...initialUser.friends, - pageInfo: expect.objectContaining({ - hasNextPage: false, - }), - }, + renderFragment(); + expectFragmentResults([ + { + data: { + ...initialUser, + friends: { + ...initialUser.friends, + pageInfo: expect.objectContaining({hasNextPage: true}), }, - isLoadingNext: false, - isLoadingPrevious: false, - // Assert hasNext is false - hasNext: false, - hasPrevious: false, }, - ]); - }); - - it('updates after pagination if more results are available', () => { - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, + isLoadingNext: false, + isLoadingPrevious: false, + // Assert hasNext is true + hasNext: true, + hasPrevious: false, + }, + ]); + }); - hasNext: true, - hasPrevious: false, + it('returns false if edges are null', () => { + (environment.getStore().getSource(): $FlowFixMe).clear(); + environment.commitPayload(query, { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: null, + pageInfo: { + endCursor: 'cursor:1', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', + }, }, - ]); + }, + }); - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { - data: initialUser, - hasNext: true, + renderFragment(); + expectFragmentResults([ + { + data: { + ...initialUser, + friends: { + ...initialUser.friends, + edges: null, + pageInfo: expect.objectContaining({hasNextPage: true}), + }, + }, + isLoadingNext: false, + isLoadingPrevious: false, + // Assert hasNext is false + hasNext: false, hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); + }, + ]); + }); - resolveQuery({ - data: { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:2', - node: { - __typename: 'User', - id: 'node:2', - name: 'name:node:2', - username: 'username:node:2', - }, - }, - ], - pageInfo: { - startCursor: 'cursor:2', - endCursor: 'cursor:2', - hasNextPage: true, - hasPreviousPage: true, - }, - }, + it('returns false if edges are undefined', () => { + (environment.getStore().getSource(): $FlowFixMe).clear(); + environment.commitPayload(query, { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: undefined, + pageInfo: { + endCursor: 'cursor:1', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', }, }, - }); + }, + }); - const expectedUser = { - ...initialUser, + renderFragment(); + expectFragmentResults([ + { + data: { + ...initialUser, + friends: { + ...initialUser.friends, + edges: undefined, + pageInfo: expect.objectContaining({hasNextPage: true}), + }, + }, + isLoadingNext: false, + isLoadingPrevious: false, + // Assert hasNext is false + hasNext: false, + hasPrevious: false, + }, + ]); + }); + + it('returns false if end cursor is null', () => { + (environment.getStore().getSource(): $FlowFixMe).clear(); + environment.commitPayload(query, { + node: { + __typename: 'User', + id: '1', + name: 'Alice', friends: { - ...initialUser.friends, edges: [ { cursor: 'cursor:1', @@ -3159,118 +2862,105 @@ describe.each([ __typename: 'User', id: 'node:1', name: 'name:node:1', - ...createFragmentRef('node:1', query), + username: 'username:node:1', }, }, + ], + pageInfo: { + // endCursor is null + endCursor: null, + // but hasNextPage is still true + hasNextPage: true, + hasPreviousPage: false, + startCursor: null, + }, + }, + }, + }); + + renderFragment(); + expectFragmentResults([ + { + data: { + ...initialUser, + friends: { + ...initialUser.friends, + pageInfo: expect.objectContaining({ + endCursor: null, + hasNextPage: true, + }), + }, + }, + isLoadingNext: false, + isLoadingPrevious: false, + // Assert hasNext is false + hasNext: false, + hasPrevious: false, + }, + ]); + }); + + it('returns false if end cursor is undefined', () => { + (environment.getStore().getSource(): $FlowFixMe).clear(); + environment.commitPayload(query, { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ { - cursor: 'cursor:2', + cursor: 'cursor:1', node: { __typename: 'User', - id: 'node:2', - name: 'name:node:2', - ...createFragmentRef('node:2', query), + id: 'node:1', + name: 'name:node:1', + username: 'username:node:1', }, }, ], pageInfo: { - endCursor: 'cursor:2', + // endCursor is undefined + endCursor: undefined, + // but hasNextPage is still true hasNextPage: true, hasPreviousPage: false, - startCursor: 'cursor:1', + startCursor: undefined, }, }, - }; - expectFragmentResults([ - { - // First update has updated connection - data: expectedUser, - isLoadingNext: true, - isLoadingPrevious: false, - // Assert hasNext reflects server response - hasNext: true, - hasPrevious: false, - }, - { - // Second update sets isLoading flag back to false - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - // Assert hasNext reflects server response - hasNext: true, - hasPrevious: false, - }, - ]); - expect(callback).toBeCalledTimes(1); + }, }); - it('updates after pagination if no more results are available', () => { - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - const paginationVariables = { - id: '1', - after: 'cursor:1', - first: 1, - before: null, - last: null, - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, direction, { - data: initialUser, - hasNext: true, - hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); - - resolveQuery({ + renderFragment(); + expectFragmentResults([ + { data: { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:2', - node: { - __typename: 'User', - id: 'node:2', - name: 'name:node:2', - username: 'username:node:2', - }, - }, - ], - pageInfo: { - startCursor: 'cursor:2', - endCursor: 'cursor:2', - hasNextPage: false, - hasPreviousPage: true, - }, - }, + ...initialUser, + friends: { + ...initialUser.friends, + pageInfo: expect.objectContaining({ + endCursor: null, + hasNextPage: true, + }), }, }, - }); + isLoadingNext: false, + isLoadingPrevious: false, + // Assert hasNext is false + hasNext: false, + hasPrevious: false, + }, + ]); + }); - const expectedUser = { - ...initialUser, + it('returns false if pageInfo.hasNextPage is false-ish', () => { + (environment.getStore().getSource(): $FlowFixMe).clear(); + environment.commitPayload(query, { + node: { + __typename: 'User', + id: '1', + name: 'Alice', friends: { - ...initialUser.friends, edges: [ { cursor: 'cursor:1', @@ -3278,825 +2968,1080 @@ describe.each([ __typename: 'User', id: 'node:1', name: 'name:node:1', - ...createFragmentRef('node:1', query), + username: 'username:node:1', }, }, + ], + pageInfo: { + endCursor: 'cursor:1', + hasNextPage: null, + hasPreviousPage: false, + startCursor: 'cursor:1', + }, + }, + }, + }); + + renderFragment(); + expectFragmentResults([ + { + data: { + ...initialUser, + friends: { + ...initialUser.friends, + pageInfo: expect.objectContaining({ + hasNextPage: null, + }), + }, + }, + isLoadingNext: false, + isLoadingPrevious: false, + // Assert hasNext is false + hasNext: false, + hasPrevious: false, + }, + ]); + }); + + it('returns false if pageInfo.hasNextPage is false', () => { + (environment.getStore().getSource(): $FlowFixMe).clear(); + environment.commitPayload(query, { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ { - cursor: 'cursor:2', + cursor: 'cursor:1', node: { __typename: 'User', - id: 'node:2', - name: 'name:node:2', - ...createFragmentRef('node:2', query), + id: 'node:1', + name: 'name:node:1', + username: 'username:node:1', }, }, ], pageInfo: { - endCursor: 'cursor:2', + endCursor: 'cursor:1', hasNextPage: false, hasPreviousPage: false, startCursor: 'cursor:1', }, }, - }; - expectFragmentResults([ - { - // First update has updated connection - data: expectedUser, - isLoadingNext: true, - isLoadingPrevious: false, - // Assert hasNext reflects server response - hasNext: false, - hasPrevious: false, - }, - { - // Second update sets isLoading flag back to false - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - // Assert hasNext reflects server response - hasNext: false, - hasPrevious: false, - }, - ]); - expect(callback).toBeCalledTimes(1); + }, }); + + renderFragment(); + expectFragmentResults([ + { + data: { + ...initialUser, + friends: { + ...initialUser.friends, + pageInfo: expect.objectContaining({ + hasNextPage: false, + }), + }, + }, + isLoadingNext: false, + isLoadingPrevious: false, + // Assert hasNext is false + hasNext: false, + hasPrevious: false, + }, + ]); }); - describe('refetch', () => { - // The bulk of refetch behavior is covered in useRefetchableFragmentNode-test, - // so this suite covers the pagination-related test cases. - function expectRefetchRequestIsInFlight(expected: { - data: mixed, - gqlRefetchQuery?: any, - hasNext: boolean, - hasPrevious: boolean, - inFlight: boolean, - refetchQuery?: OperationDescriptor, - refetchVariables: Variables, - requestCount: number, - }) { - expect(fetch).toBeCalledTimes(expected.requestCount); - const fetchCall = fetch.mock.calls.find(call => { - return ( - call[0] === - (expected.gqlRefetchQuery ?? gqlPaginationQuery).params && - areEqual(call[1], expected.refetchVariables) && - areEqual(call[2], {force: true}) - ); - }); - const isInFlight = fetchCall != null; - expect(isInFlight).toEqual(expected.inFlight); - } + it('updates after pagination if more results are available', () => { + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment(); + expectFragmentResults([ + { + data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, - function expectFragmentIsRefetching( - renderer: any, - expected: { - data: mixed, - hasNext: boolean, - hasPrevious: boolean, - refetchVariables: Variables, - refetchQuery?: OperationDescriptor, - gqlRefetchQuery?: $FlowFixMe, + hasNext: true, + hasPrevious: false, }, - ) { - expect(renderSpy).toBeCalledTimes(0); - renderSpy.mockClear(); - - // Assert refetch query was fetched - expectRefetchRequestIsInFlight({ - ...expected, - inFlight: true, - requestCount: 1, - }); + ]); - // Assert component suspended - expect(renderSpy).toBeCalledTimes(0); - expect(renderer.toJSON()).toEqual('Fallback'); - - // Assert query is retained by loadQuery and - // tentatively retained while component is suspended - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toBeCalledTimes(2); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain.mock.calls[0][0]).toEqual( - expected.refetchQuery ?? paginationQuery, - ); - } + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); + }); + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); + expect(callback).toBeCalledTimes(0); - it('refetches new variables correctly when refetching new id', () => { - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, + resolveQuery({ + data: { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:2', + node: { + __typename: 'User', + id: 'node:2', + name: 'name:node:2', + username: 'username:node:2', + }, + }, + ], + pageInfo: { + startCursor: 'cursor:2', + endCursor: 'cursor:2', + hasNextPage: true, + hasPreviousPage: true, + }, + }, }, - ]); + }, + }); - TestRenderer.act(() => { - refetch({id: '4'}); - }); + const expectedUser = { + ...initialUser, + friends: { + ...initialUser.friends, + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', query), + }, + }, + { + cursor: 'cursor:2', + node: { + __typename: 'User', + id: 'node:2', + name: 'name:node:2', + ...createFragmentRef('node:2', query), + }, + }, + ], + pageInfo: { + endCursor: 'cursor:2', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', + }, + }, + }; + expectFragmentResults([ + { + // First update has updated connection + data: expectedUser, + isLoadingNext: true, + isLoadingPrevious: false, + // Assert hasNext reflects server response + hasNext: true, + hasPrevious: false, + }, + { + // Second update sets isLoading flag back to false + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + // Assert hasNext reflects server response + hasNext: true, + hasPrevious: false, + }, + ]); + expect(callback).toBeCalledTimes(1); + }); - // Assert that fragment is refetching with the right variables and - // suspends upon refetch - const refetchVariables = { - after: null, - first: 1, - before: null, - last: null, - id: '4', - isViewerFriendLocal: false, - orderby: ['name'], - scale: null, - }; - paginationQuery = createOperationDescriptor( - gqlPaginationQuery, - refetchVariables, - {force: true}, - ); - expectFragmentIsRefetching(renderer, { + it('updates after pagination if no more results are available', () => { + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment(); + expectFragmentResults([ + { data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, hasNext: true, hasPrevious: false, - refetchVariables, - refetchQuery: paginationQuery, - }); + }, + ]); - // Mock network response - resolveQuery({ - data: { - node: { - __typename: 'User', - id: '4', - name: 'Mark', - friends: { - edges: [ - { - cursor: 'cursor:100', - node: { - __typename: 'User', - id: 'node:100', - name: 'name:node:100', - username: 'username:node:100', - }, + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); + }); + const paginationVariables = { + id: '1', + after: 'cursor:1', + first: 1, + before: null, + last: null, + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, direction, { + data: initialUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); + expect(callback).toBeCalledTimes(0); + + resolveQuery({ + data: { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:2', + node: { + __typename: 'User', + id: 'node:2', + name: 'name:node:2', + username: 'username:node:2', }, - ], - pageInfo: { - endCursor: 'cursor:100', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:100', }, + ], + pageInfo: { + startCursor: 'cursor:2', + endCursor: 'cursor:2', + hasNextPage: false, + hasPreviousPage: true, }, }, }, - }); + }, + }); - // Assert fragment is rendered with new data - const expectedUser = { - id: '4', - name: 'Mark', - friends: { - edges: [ - { - cursor: 'cursor:100', - node: { - __typename: 'User', - id: 'node:100', - name: 'name:node:100', - ...createFragmentRef('node:100', paginationQuery), - }, + const expectedUser = { + ...initialUser, + friends: { + ...initialUser.friends, + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', query), }, - ], - pageInfo: { - endCursor: 'cursor:100', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:100', }, + { + cursor: 'cursor:2', + node: { + __typename: 'User', + id: 'node:2', + name: 'name:node:2', + ...createFragmentRef('node:2', query), + }, + }, + ], + pageInfo: { + endCursor: 'cursor:2', + hasNextPage: false, + hasPreviousPage: false, + startCursor: 'cursor:1', }, - }; - expectFragmentResults([ - { - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - { - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); + }, + }; + expectFragmentResults([ + { + // First update has updated connection + data: expectedUser, + isLoadingNext: true, + isLoadingPrevious: false, + // Assert hasNext reflects server response + hasNext: false, + hasPrevious: false, + }, + { + // Second update sets isLoading flag back to false + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + // Assert hasNext reflects server response + hasNext: false, + hasPrevious: false, + }, + ]); + expect(callback).toBeCalledTimes(1); + }); + }); - // Assert refetch query was retained by loadQuery and the component - expect(release).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toBeCalledTimes(2); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain.mock.calls[0][0]).toEqual(paginationQuery); + describe('refetch', () => { + // The bulk of refetch behavior is covered in useRefetchableFragmentNode-test, + // so this suite covers the pagination-related test cases. + function expectRefetchRequestIsInFlight(expected: { + data: mixed, + gqlRefetchQuery?: any, + hasNext: boolean, + hasPrevious: boolean, + inFlight: boolean, + refetchQuery?: OperationDescriptor, + refetchVariables: Variables, + requestCount: number, + }) { + expect(fetch).toBeCalledTimes(expected.requestCount); + const fetchCall = fetch.mock.calls.find(call => { + return ( + call[0] === (expected.gqlRefetchQuery ?? gqlPaginationQuery).params && + areEqual(call[1], expected.refetchVariables) && + areEqual(call[2], {force: true}) + ); }); + const isInFlight = fetchCall != null; + expect(isInFlight).toEqual(expected.inFlight); + } - it('refetches new variables correctly when refetching same id', () => { - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); + function expectFragmentIsRefetching( + renderer: any, + expected: { + data: mixed, + hasNext: boolean, + hasPrevious: boolean, + refetchVariables: Variables, + refetchQuery?: OperationDescriptor, + gqlRefetchQuery?: $FlowFixMe, + }, + ) { + expect(renderSpy).toBeCalledTimes(0); + renderSpy.mockClear(); + + // Assert refetch query was fetched + expectRefetchRequestIsInFlight({ + ...expected, + inFlight: true, + requestCount: 1, + }); - TestRenderer.act(() => { - refetch({isViewerFriendLocal: true, orderby: ['lastname']}); - }); + // Assert component suspended + expect(renderSpy).toBeCalledTimes(0); + expect(renderer.toJSON()).toEqual('Fallback'); - // Assert that fragment is refetching with the right variables and - // suspends upon refetch - const refetchVariables = { - after: null, - first: 1, - before: null, - last: null, - id: '1', - isViewerFriendLocal: true, - orderby: ['lastname'], - scale: null, - }; - paginationQuery = createOperationDescriptor( - gqlPaginationQuery, - refetchVariables, - {force: true}, - ); - expectFragmentIsRefetching(renderer, { + // Assert query is retained by loadQuery and + // tentatively retained while component is suspended + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toBeCalledTimes(2); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain.mock.calls[0][0]).toEqual( + expected.refetchQuery ?? paginationQuery, + ); + } + + it('refetches new variables correctly when refetching new id', () => { + const renderer = renderFragment(); + expectFragmentResults([ + { data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, hasNext: true, hasPrevious: false, - refetchVariables, - refetchQuery: paginationQuery, - }); + }, + ]); - // Mock network response - resolveQuery({ - data: { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:100', - node: { - __typename: 'User', - id: 'node:100', - name: 'name:node:100', - username: 'username:node:100', - }, + TestRenderer.act(() => { + refetch({id: '4'}); + }); + + // Assert that fragment is refetching with the right variables and + // suspends upon refetch + const refetchVariables = { + after: null, + first: 1, + before: null, + last: null, + id: '4', + isViewerFriendLocal: false, + orderby: ['name'], + scale: null, + }; + paginationQuery = createOperationDescriptor( + gqlPaginationQuery, + refetchVariables, + {force: true}, + ); + expectFragmentIsRefetching(renderer, { + data: initialUser, + hasNext: true, + hasPrevious: false, + refetchVariables, + refetchQuery: paginationQuery, + }); + + // Mock network response + resolveQuery({ + data: { + node: { + __typename: 'User', + id: '4', + name: 'Mark', + friends: { + edges: [ + { + cursor: 'cursor:100', + node: { + __typename: 'User', + id: 'node:100', + name: 'name:node:100', + username: 'username:node:100', }, - ], - pageInfo: { - endCursor: 'cursor:100', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:100', }, + ], + pageInfo: { + endCursor: 'cursor:100', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:100', }, }, }, - }); + }, + }); - // Assert fragment is rendered with new data - const expectedUser = { - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:100', - node: { - __typename: 'User', - id: 'node:100', - name: 'name:node:100', - ...createFragmentRef('node:100', paginationQuery), - }, + // Assert fragment is rendered with new data + const expectedUser = { + id: '4', + name: 'Mark', + friends: { + edges: [ + { + cursor: 'cursor:100', + node: { + __typename: 'User', + id: 'node:100', + name: 'name:node:100', + ...createFragmentRef('node:100', paginationQuery), }, - ], - pageInfo: { - endCursor: 'cursor:100', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:100', }, + ], + pageInfo: { + endCursor: 'cursor:100', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:100', }, - }; - expectFragmentResults([ - { - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - { - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); + }, + }; + expectFragmentResults([ + { + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + { + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + + // Assert refetch query was retained by loadQuery and the component + expect(release).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toBeCalledTimes(2); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain.mock.calls[0][0]).toEqual(paginationQuery); + }); + + it('refetches new variables correctly when refetching same id', () => { + const renderer = renderFragment(); + expectFragmentResults([ + { + data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + + TestRenderer.act(() => { + refetch({isViewerFriendLocal: true, orderby: ['lastname']}); + }); - // Assert refetch query was retained by loadQuery and the component - expect(release).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toBeCalledTimes(2); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain.mock.calls[0][0]).toEqual(paginationQuery); + // Assert that fragment is refetching with the right variables and + // suspends upon refetch + const refetchVariables = { + after: null, + first: 1, + before: null, + last: null, + id: '1', + isViewerFriendLocal: true, + orderby: ['lastname'], + scale: null, + }; + paginationQuery = createOperationDescriptor( + gqlPaginationQuery, + refetchVariables, + {force: true}, + ); + expectFragmentIsRefetching(renderer, { + data: initialUser, + hasNext: true, + hasPrevious: false, + refetchVariables, + refetchQuery: paginationQuery, }); - it('refetches with correct id from refetchable fragment when using nested fragment', () => { - // Populate store with data for query using nested fragment - environment.commitPayload(queryNestedFragment, { + // Mock network response + resolveQuery({ + data: { node: { - __typename: 'Feedback', - id: '', - actor: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - username: 'username:node:1', - }, + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:100', + node: { + __typename: 'User', + id: 'node:100', + name: 'name:node:100', + username: 'username:node:100', }, - ], - pageInfo: { - endCursor: 'cursor:1', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', }, + ], + pageInfo: { + endCursor: 'cursor:100', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:100', }, }, }, - }); - - // Get fragment ref for user using nested fragment - const userRef = (environment.lookup(queryNestedFragment.fragment) - .data: $FlowFixMe)?.node?.actor; + }, + }); - initialUser = { - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:1', - node: { - __typename: 'User', - id: 'node:1', - name: 'name:node:1', - ...createFragmentRef('node:1', queryNestedFragment), - }, + // Assert fragment is rendered with new data + const expectedUser = { + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:100', + node: { + __typename: 'User', + id: 'node:100', + name: 'name:node:100', + ...createFragmentRef('node:100', paginationQuery), }, - ], - pageInfo: { - endCursor: 'cursor:1', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:1', }, + ], + pageInfo: { + endCursor: 'cursor:100', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:100', }, - }; - - const renderer = renderFragment({ - owner: queryNestedFragment, - userRef, - }); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - TestRenderer.act(() => { - refetch({isViewerFriendLocal: true, orderby: ['lastname']}); - }); - - // Assert that fragment is refetching with the right variables and - // suspends upon refetch - const refetchVariables = { - after: null, - first: 1, - before: null, - last: null, - // The id here should correspond to the user id, and not the - // feedback id from the query variables (i.e. ``) - id: '1', - isViewerFriendLocal: true, - orderby: ['lastname'], - scale: null, - }; - paginationQuery = createOperationDescriptor( - gqlPaginationQuery, - refetchVariables, - {force: true}, - ); - expectFragmentIsRefetching(renderer, { - data: initialUser, + }, + }; + expectFragmentResults([ + { + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, hasNext: true, hasPrevious: false, - refetchVariables, - refetchQuery: paginationQuery, - }); + }, + { + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); - // Mock network response - resolveQuery({ - data: { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:100', - node: { - __typename: 'User', - id: 'node:100', - name: 'name:node:100', - username: 'username:node:100', - }, + // Assert refetch query was retained by loadQuery and the component + expect(release).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toBeCalledTimes(2); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain.mock.calls[0][0]).toEqual(paginationQuery); + }); + + it('refetches with correct id from refetchable fragment when using nested fragment', () => { + // Populate store with data for query using nested fragment + environment.commitPayload(queryNestedFragment, { + node: { + __typename: 'Feedback', + id: '', + actor: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + username: 'username:node:1', }, - ], - pageInfo: { - endCursor: 'cursor:100', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:100', }, + ], + pageInfo: { + endCursor: 'cursor:1', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', }, }, }, - }); + }, + }); - // Assert fragment is rendered with new data - const expectedUser = { - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:100', - node: { - __typename: 'User', - id: 'node:100', - name: 'name:node:100', - ...createFragmentRef('node:100', paginationQuery), - }, + // Get fragment ref for user using nested fragment + const userRef = (environment.lookup(queryNestedFragment.fragment) + .data: $FlowFixMe)?.node?.actor; + + initialUser = { + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:1', + node: { + __typename: 'User', + id: 'node:1', + name: 'name:node:1', + ...createFragmentRef('node:1', queryNestedFragment), }, - ], - pageInfo: { - endCursor: 'cursor:100', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:100', }, + ], + pageInfo: { + endCursor: 'cursor:1', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:1', }, - }; - expectFragmentResults([ - { - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - { - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); + }, + }; - // Assert refetch query was retained by loadQuery and the component - expect(release).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toBeCalledTimes(2); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain.mock.calls[0][0]).toEqual(paginationQuery); + const renderer = renderFragment({ + owner: queryNestedFragment, + userRef, }); - - it('loads more items correctly after refetching', () => { - const renderer = renderFragment(); - expectFragmentResults([ - { - data: initialUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - - TestRenderer.act(() => { - refetch({isViewerFriendLocal: true, orderby: ['lastname']}); - }); - - // Assert that fragment is refetching with the right variables and - // suspends upon refetch - const refetchVariables = { - after: null, - first: 1, - before: null, - last: null, - id: '1', - isViewerFriendLocal: true, - orderby: ['lastname'], - scale: null, - }; - paginationQuery = createOperationDescriptor( - gqlPaginationQuery, - refetchVariables, - {force: true}, - ); - expectFragmentIsRefetching(renderer, { + expectFragmentResults([ + { data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, hasNext: true, hasPrevious: false, - refetchVariables, - refetchQuery: paginationQuery, - }); + }, + ]); - // Mock network response - resolveQuery({ - data: { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:100', - node: { - __typename: 'User', - id: 'node:100', - name: 'name:node:100', - username: 'username:node:100', - }, + TestRenderer.act(() => { + refetch({isViewerFriendLocal: true, orderby: ['lastname']}); + }); + + // Assert that fragment is refetching with the right variables and + // suspends upon refetch + const refetchVariables = { + after: null, + first: 1, + before: null, + last: null, + // The id here should correspond to the user id, and not the + // feedback id from the query variables (i.e. ``) + id: '1', + isViewerFriendLocal: true, + orderby: ['lastname'], + scale: null, + }; + paginationQuery = createOperationDescriptor( + gqlPaginationQuery, + refetchVariables, + {force: true}, + ); + expectFragmentIsRefetching(renderer, { + data: initialUser, + hasNext: true, + hasPrevious: false, + refetchVariables, + refetchQuery: paginationQuery, + }); + + // Mock network response + resolveQuery({ + data: { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:100', + node: { + __typename: 'User', + id: 'node:100', + name: 'name:node:100', + username: 'username:node:100', }, - ], - pageInfo: { - endCursor: 'cursor:100', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:100', }, + ], + pageInfo: { + endCursor: 'cursor:100', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:100', }, }, }, - }); + }, + }); - // Assert fragment is rendered with new data - const expectedUser = { - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:100', - node: { - __typename: 'User', - id: 'node:100', - name: 'name:node:100', - ...createFragmentRef('node:100', paginationQuery), - }, + // Assert fragment is rendered with new data + const expectedUser = { + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:100', + node: { + __typename: 'User', + id: 'node:100', + name: 'name:node:100', + ...createFragmentRef('node:100', paginationQuery), }, - ], - pageInfo: { - endCursor: 'cursor:100', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:100', }, + ], + pageInfo: { + endCursor: 'cursor:100', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:100', }, - }; - expectFragmentResults([ - { - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - { - data: expectedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); + }, + }; + expectFragmentResults([ + { + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + { + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); - // Assert refetch query was retained by loadQuery and the component - expect(release).not.toBeCalled(); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain).toBeCalledTimes(2); - // $FlowFixMe[method-unbinding] added when improving typing for this parameters - expect(environment.retain.mock.calls[0][0]).toEqual(paginationQuery); + // Assert refetch query was retained by loadQuery and the component + expect(release).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toBeCalledTimes(2); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain.mock.calls[0][0]).toEqual(paginationQuery); + }); - // Paginate after refetching - fetch.mockClear(); - TestRenderer.act(() => { - loadNext(1); - }); - const paginationVariables = { - id: '1', - after: 'cursor:100', - first: 1, - before: null, - last: null, - isViewerFriendLocal: true, - orderby: ['lastname'], - scale: null, - }; - expectFragmentIsLoadingMore(renderer, 'forward', { - data: expectedUser, + it('loads more items correctly after refetching', () => { + const renderer = renderFragment(); + expectFragmentResults([ + { + data: initialUser, + isLoadingNext: false, + isLoadingPrevious: false, hasNext: true, hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); + }, + ]); - resolveQuery({ - data: { - node: { - __typename: 'User', - id: '1', - name: 'Alice', - friends: { - edges: [ - { - cursor: 'cursor:200', - node: { - __typename: 'User', - id: 'node:200', - name: 'name:node:200', - username: 'username:node:200', - }, + TestRenderer.act(() => { + refetch({isViewerFriendLocal: true, orderby: ['lastname']}); + }); + + // Assert that fragment is refetching with the right variables and + // suspends upon refetch + const refetchVariables = { + after: null, + first: 1, + before: null, + last: null, + id: '1', + isViewerFriendLocal: true, + orderby: ['lastname'], + scale: null, + }; + paginationQuery = createOperationDescriptor( + gqlPaginationQuery, + refetchVariables, + {force: true}, + ); + expectFragmentIsRefetching(renderer, { + data: initialUser, + hasNext: true, + hasPrevious: false, + refetchVariables, + refetchQuery: paginationQuery, + }); + + // Mock network response + resolveQuery({ + data: { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:100', + node: { + __typename: 'User', + id: 'node:100', + name: 'name:node:100', + username: 'username:node:100', }, - ], - pageInfo: { - startCursor: 'cursor:200', - endCursor: 'cursor:200', - hasNextPage: true, - hasPreviousPage: true, }, + ], + pageInfo: { + endCursor: 'cursor:100', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:100', }, }, }, - }); + }, + }); - const paginatedUser = { - ...expectedUser, - friends: { - ...expectedUser.friends, - edges: [ - { - cursor: 'cursor:100', - node: { - __typename: 'User', - id: 'node:100', - name: 'name:node:100', - ...createFragmentRef('node:100', paginationQuery), - }, - }, - { - cursor: 'cursor:200', - node: { - __typename: 'User', - id: 'node:200', - name: 'name:node:200', - ...createFragmentRef('node:200', paginationQuery), - }, + // Assert fragment is rendered with new data + const expectedUser = { + id: '1', + name: 'Alice', + friends: { + edges: [ + { + cursor: 'cursor:100', + node: { + __typename: 'User', + id: 'node:100', + name: 'name:node:100', + ...createFragmentRef('node:100', paginationQuery), }, - ], - pageInfo: { - endCursor: 'cursor:200', - hasNextPage: true, - hasPreviousPage: false, - startCursor: 'cursor:100', }, + ], + pageInfo: { + endCursor: 'cursor:100', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:100', }, - }; - expectFragmentResults([ - { - // First update has updated connection - data: paginatedUser, - isLoadingNext: true, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - { - // Second update sets isLoading flag back to false - data: paginatedUser, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - }); - }); + }, + }; + expectFragmentResults([ + { + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + { + data: expectedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); - describe('paginating @fetchable types', () => { - beforeEach(() => { - const fetchVariables = {id: 'a'}; - gqlQuery = graphql` - query usePaginationFragmentTestStoryQuery($id: ID!) { - nonNodeStory(id: $id) { - ...usePaginationFragmentTestStoryFragment - } - } - `; - - // $FlowFixMe[prop-missing] - // $FlowFixMe[incompatible-type-arg] - gqlFragment = graphql` - fragment usePaginationFragmentTestStoryFragment on NonNodeStory - @argumentDefinitions( - count: {type: "Int", defaultValue: 10} - cursor: {type: "ID"} - ) - @refetchable( - queryName: "usePaginationFragmentTestStoryFragmentRefetchQuery" - ) { - comments(first: $count, after: $cursor) - @connection(key: "StoryFragment_comments") { - edges { - node { - id - } - } - } - } - `; - gqlPaginationQuery = require('./__generated__/usePaginationFragmentTestStoryFragmentRefetchQuery.graphql'); + // Assert refetch query was retained by loadQuery and the component + expect(release).not.toBeCalled(); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain).toBeCalledTimes(2); + // $FlowFixMe[method-unbinding] added when improving typing for this parameters + expect(environment.retain.mock.calls[0][0]).toEqual(paginationQuery); - query = createOperationDescriptor(gqlQuery, fetchVariables); + // Paginate after refetching + fetch.mockClear(); + TestRenderer.act(() => { + loadNext(1); + }); + const paginationVariables = { + id: '1', + after: 'cursor:100', + first: 1, + before: null, + last: null, + isViewerFriendLocal: true, + orderby: ['lastname'], + scale: null, + }; + expectFragmentIsLoadingMore(renderer, 'forward', { + data: expectedUser, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); - environment.commitPayload(query, { - nonNodeStory: { - __typename: 'NonNodeStory', - id: 'a', - fetch_id: 'fetch:a', - comments: { + resolveQuery({ + data: { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + friends: { edges: [ { - cursor: 'edge:0', + cursor: 'cursor:200', node: { - __typename: 'Comment', - id: 'comment:0', + __typename: 'User', + id: 'node:200', + name: 'name:node:200', + username: 'username:node:200', }, }, ], pageInfo: { - endCursor: 'edge:0', + startCursor: 'cursor:200', + endCursor: 'cursor:200', hasNextPage: true, + hasPreviousPage: true, }, }, }, - }); + }, }); - it('loads and renders next items in connection', () => { - const callback = jest.fn<[Error | null], void>(); - const renderer = renderFragment(); - const initialData = { + const paginatedUser = { + ...expectedUser, + friends: { + ...expectedUser.friends, + edges: [ + { + cursor: 'cursor:100', + node: { + __typename: 'User', + id: 'node:100', + name: 'name:node:100', + ...createFragmentRef('node:100', paginationQuery), + }, + }, + { + cursor: 'cursor:200', + node: { + __typename: 'User', + id: 'node:200', + name: 'name:node:200', + ...createFragmentRef('node:200', paginationQuery), + }, + }, + ], + pageInfo: { + endCursor: 'cursor:200', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'cursor:100', + }, + }, + }; + expectFragmentResults([ + { + // First update has updated connection + data: paginatedUser, + isLoadingNext: true, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + { + // Second update sets isLoading flag back to false + data: paginatedUser, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + }); + }); + + describe('paginating @fetchable types', () => { + beforeEach(() => { + const fetchVariables = {id: 'a'}; + gqlQuery = graphql` + query usePaginationFragmentTestStoryQuery($id: ID!) { + nonNodeStory(id: $id) { + ...usePaginationFragmentTestStoryFragment + } + } + `; + + // $FlowFixMe[prop-missing] + // $FlowFixMe[incompatible-type-arg] + gqlFragment = graphql` + fragment usePaginationFragmentTestStoryFragment on NonNodeStory + @argumentDefinitions( + count: {type: "Int", defaultValue: 10} + cursor: {type: "ID"} + ) + @refetchable( + queryName: "usePaginationFragmentTestStoryFragmentRefetchQuery" + ) { + comments(first: $count, after: $cursor) + @connection(key: "StoryFragment_comments") { + edges { + node { + id + } + } + } + } + `; + gqlPaginationQuery = require('./__generated__/usePaginationFragmentTestStoryFragmentRefetchQuery.graphql'); + + query = createOperationDescriptor(gqlQuery, fetchVariables); + + environment.commitPayload(query, { + nonNodeStory: { + __typename: 'NonNodeStory', + id: 'a', fetch_id: 'fetch:a', comments: { edges: [ @@ -4113,97 +4058,120 @@ describe.each([ hasNextPage: true, }, }, - }; - expectFragmentResults([ - { - data: initialData, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); + }, + }); + }); - TestRenderer.act(() => { - loadNext(1, {onComplete: callback}); - }); - const paginationVariables = { - id: 'fetch:a', - cursor: 'edge:0', - count: 1, - }; - expectFragmentIsLoadingMore(renderer, 'forward', { + it('loads and renders next items in connection', () => { + const callback = jest.fn<[Error | null], void>(); + const renderer = renderFragment(); + const initialData = { + fetch_id: 'fetch:a', + comments: { + edges: [ + { + cursor: 'edge:0', + node: { + __typename: 'Comment', + id: 'comment:0', + }, + }, + ], + pageInfo: { + endCursor: 'edge:0', + hasNextPage: true, + }, + }, + }; + expectFragmentResults([ + { data: initialData, + isLoadingNext: false, + isLoadingPrevious: false, hasNext: true, hasPrevious: false, - paginationVariables, - gqlPaginationQuery, - }); - expect(callback).toBeCalledTimes(0); + }, + ]); - resolveQuery({ - data: { - fetch__NonNodeStory: { - id: 'a', - fetch_id: 'fetch:a', - comments: { - edges: [ - { - cursor: 'edge:1', - node: { - __typename: 'Comment', - id: 'comment:1', - }, + TestRenderer.act(() => { + loadNext(1, {onComplete: callback}); + }); + const paginationVariables = { + id: 'fetch:a', + cursor: 'edge:0', + count: 1, + }; + expectFragmentIsLoadingMore(renderer, 'forward', { + data: initialData, + hasNext: true, + hasPrevious: false, + paginationVariables, + gqlPaginationQuery, + }); + expect(callback).toBeCalledTimes(0); + + resolveQuery({ + data: { + fetch__NonNodeStory: { + id: 'a', + fetch_id: 'fetch:a', + comments: { + edges: [ + { + cursor: 'edge:1', + node: { + __typename: 'Comment', + id: 'comment:1', }, - ], - pageInfo: { - endCursor: 'edge:1', - hasNextPage: true, }, + ], + pageInfo: { + endCursor: 'edge:1', + hasNextPage: true, }, }, }, - }); + }, + }); - const expectedData = { - ...initialData, - comments: { - edges: [ - ...initialData.comments.edges, - { - cursor: 'edge:1', - node: { - __typename: 'Comment', - id: 'comment:1', - }, + const expectedData = { + ...initialData, + comments: { + edges: [ + ...initialData.comments.edges, + { + cursor: 'edge:1', + node: { + __typename: 'Comment', + id: 'comment:1', }, - ], - pageInfo: { - endCursor: 'edge:1', - hasNextPage: true, }, + ], + pageInfo: { + endCursor: 'edge:1', + hasNextPage: true, }, - }; - expectFragmentResults([ - { - // First update has updated connection - data: expectedData, - isLoadingNext: true, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - { - // Second update sets isLoading flag back to false - data: expectedData, - isLoadingNext: false, - isLoadingPrevious: false, - hasNext: true, - hasPrevious: false, - }, - ]); - expect(callback).toBeCalledTimes(1); - }); + }, + }; + expectFragmentResults([ + { + // First update has updated connection + data: expectedData, + isLoadingNext: true, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + { + // Second update sets isLoading flag back to false + data: expectedData, + isLoadingNext: false, + isLoadingPrevious: false, + hasNext: true, + hasPrevious: false, + }, + ]); + expect(callback).toBeCalledTimes(1); }); }); }); diff --git a/packages/react-relay/relay-hooks/__tests__/usePreloadedQuery-provided-variables-test.js b/packages/react-relay/relay-hooks/__tests__/usePreloadedQuery-provided-variables-test.js index b3aae63293c68..b2ebce318726a 100644 --- a/packages/react-relay/relay-hooks/__tests__/usePreloadedQuery-provided-variables-test.js +++ b/packages/react-relay/relay-hooks/__tests__/usePreloadedQuery-provided-variables-test.js @@ -20,10 +20,9 @@ import type {GraphQLResponse} from 'relay-runtime/network/RelayNetworkTypes'; const {loadQuery} = require('../loadQuery'); const preloadQuery_DEPRECATED = require('../preloadQuery_DEPRECATED'); -const usePreloadedQuery_REACT_CACHE = require('../react-cache/usePreloadedQuery_REACT_CACHE'); const RelayEnvironmentProvider = require('../RelayEnvironmentProvider'); const useFragment = require('../useFragment'); -const usePreloadedQuery_LEGACY = require('../usePreloadedQuery'); +const usePreloadedQuery = require('../usePreloadedQuery'); const RelayProvider_impure = require('./RelayProvider_impure'); const React = require('react'); const TestRenderer = require('react-test-renderer'); @@ -33,7 +32,6 @@ const { Observable, PreloadableQueryRegistry, RecordSource, - RelayFeatureFlags, Store, graphql, } = require('relay-runtime'); @@ -103,211 +101,187 @@ const responsePV = { }, }; -describe.each([ - ['React Cache', usePreloadedQuery_REACT_CACHE], - ['Legacy', usePreloadedQuery_LEGACY], -])( - 'usePreloadedQuery provided variables (%s)', - (_hookName, usePreloadedQuery) => { - const usingReactCache = usePreloadedQuery === usePreloadedQuery_REACT_CACHE; - // Our open-source build is still on React 17, so we need to skip these tests there: - if (usingReactCache) { - // $FlowExpectedError[prop-missing] Cache not yet part of Flow types - if (React.unstable_getCacheForType === undefined) { - return; - } - } - let originalReactCacheFeatureFlag; - beforeEach(() => { - originalReactCacheFeatureFlag = RelayFeatureFlags.USE_REACT_CACHE; - RelayFeatureFlags.USE_REACT_CACHE = - usePreloadedQuery === usePreloadedQuery_REACT_CACHE; - }); - afterEach(() => { - RelayFeatureFlags.USE_REACT_CACHE = originalReactCacheFeatureFlag; +describe('usePreloadedQuery provided variables (%s)', () => { + let data; + let dataSource: ?Sink; + let environment; + let fetch; + const Component = function (props: any) { + const queryData = usePreloadedQuery(queryPV, props.prefetched); + data = useFragment(fragmentPV, queryData.node); + return [ + data?.name ?? 'MISSING NAME', + data?.firstName ?? 'skipped firstName', + data?.lastName ?? 'MISSING LASTNAME', + data?.username ?? 'skipped username', + ].join(', '); + }; + beforeEach(() => { + dataSource = undefined; + fetch = jest.fn( + ( + _query: RequestParameters, + _variables: Variables, + _cacheConfig: CacheConfig, + ) => + Observable.create((sink: Sink) => { + dataSource = sink; + }), + ); + environment = new Environment({ + // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file + network: Network.create(fetch), + store: new Store(new RecordSource()), }); + RelayProvider_impure.test_reset(); + if (withProvidedVariables.tests_only_resetDebugCache !== undefined) { + withProvidedVariables.tests_only_resetDebugCache(); + } + }); - let data; - let dataSource: ?Sink; - let environment; - let fetch; - const Component = function (props: any) { - const queryData = usePreloadedQuery(queryPV, props.prefetched); - data = useFragment(fragmentPV, queryData.node); - return [ - data?.name ?? 'MISSING NAME', - data?.firstName ?? 'skipped firstName', - data?.lastName ?? 'MISSING LASTNAME', - data?.username ?? 'skipped username', - ].join(', '); - }; - beforeEach(() => { - dataSource = undefined; - fetch = jest.fn( - ( - _query: RequestParameters, - _variables: Variables, - _cacheConfig: CacheConfig, - ) => - Observable.create((sink: Sink) => { - dataSource = sink; - }), + describe('using preloadQuery_DEPRECATED', () => { + it('renders synchronously with provided variables', () => { + const prefetched = preloadQuery_DEPRECATED( + environment, + preloadableConcreteRequestPV, + { + id: '4', + }, ); - environment = new Environment({ - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - network: Network.create(fetch), - store: new Store(new RecordSource()), - }); - RelayProvider_impure.test_reset(); - if (withProvidedVariables.tests_only_resetDebugCache !== undefined) { - withProvidedVariables.tests_only_resetDebugCache(); - } - }); - - describe('using preloadQuery_DEPRECATED', () => { - it('renders synchronously with provided variables', () => { - const prefetched = preloadQuery_DEPRECATED( - environment, - preloadableConcreteRequestPV, - { - id: '4', - }, - ); - expect(dataSource).toBeDefined(); - if (dataSource) { - dataSource.next(responsePV); - } + expect(dataSource).toBeDefined(); + if (dataSource) { + dataSource.next(responsePV); + } - const renderer = TestRenderer.create( - - - - - , - ); - TestRenderer.act(() => jest.runAllImmediates()); - expect(renderer.toJSON()).toEqual( - 'testName, skipped firstName, testLastName, skipped username', - ); - expect(data).toEqual({ - name: 'testName', - lastName: 'testLastName', - }); + const renderer = TestRenderer.create( + + + + + , + ); + TestRenderer.act(() => jest.runAllImmediates()); + expect(renderer.toJSON()).toEqual( + 'testName, skipped firstName, testLastName, skipped username', + ); + expect(data).toEqual({ + name: 'testName', + lastName: 'testLastName', }); }); - describe('using loadQuery', () => { - it('renders synchronously when passed a preloadableConcreteRequest', () => { - const prefetched = loadQuery( - environment, - preloadableConcreteRequestPV, - { - id: '4', - }, - ); + }); + describe('using loadQuery', () => { + it('renders synchronously when passed a preloadableConcreteRequest', () => { + const prefetched = loadQuery( + environment, + preloadableConcreteRequestPV, + { + id: '4', + }, + ); - PreloadableQueryRegistry.set(IdPV, queryPV); + PreloadableQueryRegistry.set(IdPV, queryPV); - expect(dataSource).toBeDefined(); - if (dataSource) { - dataSource.next(responsePV); - } - TestRenderer.act(() => jest.runAllImmediates()); + expect(dataSource).toBeDefined(); + if (dataSource) { + dataSource.next(responsePV); + } + TestRenderer.act(() => jest.runAllImmediates()); - const renderer = TestRenderer.create( - - - - - , - ); - TestRenderer.act(() => jest.runAllImmediates()); + const renderer = TestRenderer.create( + + + + + , + ); + TestRenderer.act(() => jest.runAllImmediates()); - expect(renderer.toJSON()).toEqual( - 'testName, skipped firstName, testLastName, skipped username', - ); - expect(data).toEqual({ - name: 'testName', - lastName: 'testLastName', - }); + expect(renderer.toJSON()).toEqual( + 'testName, skipped firstName, testLastName, skipped username', + ); + expect(data).toEqual({ + name: 'testName', + lastName: 'testLastName', }); + }); - it('renders synchronously when passed a query AST', () => { - const prefetched = loadQuery(environment, queryPV, { - id: '4', - }); - expect(dataSource).toBeDefined(); - if (dataSource) { - dataSource.next(responsePV); - } - TestRenderer.act(() => jest.runAllImmediates()); + it('renders synchronously when passed a query AST', () => { + const prefetched = loadQuery(environment, queryPV, { + id: '4', + }); + expect(dataSource).toBeDefined(); + if (dataSource) { + dataSource.next(responsePV); + } + TestRenderer.act(() => jest.runAllImmediates()); - const renderer = TestRenderer.create( - - - - - , - ); + const renderer = TestRenderer.create( + + + + + , + ); - expect(renderer.toJSON()).toEqual( - 'testName, skipped firstName, testLastName, skipped username', - ); - expect(data).toEqual({ - name: 'testName', - lastName: 'testLastName', - }); + expect(renderer.toJSON()).toEqual( + 'testName, skipped firstName, testLastName, skipped username', + ); + expect(data).toEqual({ + name: 'testName', + lastName: 'testLastName', }); }); + }); - it('warns when variable provider is an impure function', () => { - graphql` - fragment usePreloadedQueryProvidedVariablesTest_badFragment on User - @argumentDefinitions( - impureProvider: { - type: "Float!" - provider: "./RelayProvider_impure.relayprovider" - } - ) { - profile_picture(scale: $impureProvider) { - uri - } + it('warns when variable provider is an impure function', () => { + graphql` + fragment usePreloadedQueryProvidedVariablesTest_badFragment on User + @argumentDefinitions( + impureProvider: { + type: "Float!" + provider: "./RelayProvider_impure.relayprovider" } - `; - const queryPVBad = graphql` - query usePreloadedQueryProvidedVariablesTest_badQuery($id: ID!) { - node(id: $id) { - ...usePreloadedQueryProvidedVariablesTest_badFragment - } + ) { + profile_picture(scale: $impureProvider) { + uri } - `; - - const preloadWithFetchKey = (fetchKey: string | number) => { - return preloadQuery_DEPRECATED( - environment, - { - kind: 'PreloadableConcreteRequest', - params: queryPVBad.params, - }, - { - id: '4', - }, - { - fetchKey, - }, - ); - }; - - preloadWithFetchKey('fetchKey0'); + } + `; + const queryPVBad = graphql` + query usePreloadedQueryProvidedVariablesTest_badQuery($id: ID!) { + node(id: $id) { + ...usePreloadedQueryProvidedVariablesTest_badFragment + } + } + `; - expectToWarn( - 'Relay: Expected function `get` for provider ' + - '`__relay_internal__pv__RelayProvider_impurerelayprovider` ' + - 'to be a pure function, but got conflicting return values', - () => { - preloadWithFetchKey('fetchKey1'); + const preloadWithFetchKey = (fetchKey: string | number) => { + return preloadQuery_DEPRECATED( + environment, + { + kind: 'PreloadableConcreteRequest', + params: queryPVBad.params, + }, + { + id: '4', + }, + { + fetchKey, }, ); - }); - }, -); + }; + + preloadWithFetchKey('fetchKey0'); + + expectToWarn( + 'Relay: Expected function `get` for provider ' + + '`__relay_internal__pv__RelayProvider_impurerelayprovider` ' + + 'to be a pure function, but got conflicting return values', + () => { + preloadWithFetchKey('fetchKey1'); + }, + ); + }); +}); diff --git a/packages/react-relay/relay-hooks/__tests__/usePreloadedQuery-react-double-effects-test.js b/packages/react-relay/relay-hooks/__tests__/usePreloadedQuery-react-double-effects-test.js index 2d76e13387a66..78cb799ca293d 100644 --- a/packages/react-relay/relay-hooks/__tests__/usePreloadedQuery-react-double-effects-test.js +++ b/packages/react-relay/relay-hooks/__tests__/usePreloadedQuery-react-double-effects-test.js @@ -157,10 +157,11 @@ describe.skip('usePreloadedQuery-react-double-effects', () => { }; renderLogs = []; - QueryComponent = function (props: any) { + QueryComponent = function TestQueryComponent(props: any) { const result = usePreloadedQuery(props.queryInput, props.queryRef); const name = result?.node?.name ?? 'Empty'; + // $FlowFixMe[react-rule-hook] useEffect(() => { renderLogs.push(`commit: ${name}`); return () => { diff --git a/packages/react-relay/relay-hooks/__tests__/usePreloadedQuery-test.js b/packages/react-relay/relay-hooks/__tests__/usePreloadedQuery-test.js index 386486e23c4ab..7c14a2feadbb2 100644 --- a/packages/react-relay/relay-hooks/__tests__/usePreloadedQuery-test.js +++ b/packages/react-relay/relay-hooks/__tests__/usePreloadedQuery-test.js @@ -11,14 +11,15 @@ 'use strict'; -import type {Sink} from '../../../relay-runtime/network/RelayObservable'; +import type {PreloadableConcreteRequest} from '../EntryPointTypes.flow'; +import type {usePreloadedQueryTestQuery} from './__generated__/usePreloadedQueryTestQuery.graphql'; +import type {Sink} from 'relay-runtime'; import type {GraphQLResponse} from 'relay-runtime/network/RelayNetworkTypes'; const {loadQuery} = require('../loadQuery'); const preloadQuery_DEPRECATED = require('../preloadQuery_DEPRECATED'); -const usePreloadedQuery_REACT_CACHE = require('../react-cache/usePreloadedQuery_REACT_CACHE'); const RelayEnvironmentProvider = require('../RelayEnvironmentProvider'); -const usePreloadedQuery_LEGACY = require('../usePreloadedQuery'); +const usePreloadedQuery = require('../usePreloadedQuery'); const React = require('react'); const TestRenderer = require('react-test-renderer'); const { @@ -27,7 +28,6 @@ const { Observable, PreloadableQueryRegistry, RecordSource, - RelayFeatureFlags, Store, graphql, } = require('relay-runtime'); @@ -51,10 +51,11 @@ const query = graphql` const ID = '12345'; (query.params: $FlowFixMe).id = ID; -const preloadableConcreteRequest = { - kind: 'PreloadableConcreteRequest', - params: query.params, -}; +const preloadableConcreteRequest: PreloadableConcreteRequest = + { + kind: 'PreloadableConcreteRequest', + params: query.params, + }; const response = { data: { @@ -113,28 +114,7 @@ afterAll(() => { jest.clearAllMocks(); }); -describe.each([ - ['React Cache', usePreloadedQuery_REACT_CACHE], - ['Legacy', usePreloadedQuery_LEGACY], -])('usePreloadedQuery (%s)', (_hookName, usePreloadedQuery) => { - const usingReactCache = usePreloadedQuery === usePreloadedQuery_REACT_CACHE; - // Our open-source build is still on React 17, so we need to skip these tests there: - if (usingReactCache) { - // $FlowExpectedError[prop-missing] Cache not yet part of Flow types - if (React.unstable_getCacheForType === undefined) { - return; - } - } - let originalReactCacheFeatureFlag; - beforeEach(() => { - originalReactCacheFeatureFlag = RelayFeatureFlags.USE_REACT_CACHE; - RelayFeatureFlags.USE_REACT_CACHE = - usePreloadedQuery === usePreloadedQuery_REACT_CACHE; - }); - afterEach(() => { - RelayFeatureFlags.USE_REACT_CACHE = originalReactCacheFeatureFlag; - }); - +describe('usePreloadedQuery', () => { beforeEach(() => { dataSource = undefined; // $FlowFixMe[missing-local-annot] error found when enabling Flow LTI mode @@ -559,13 +539,9 @@ describe.each([ describe('if loadQuery is passed a preloadableConcreteRequest which is not available synchronously', () => { it('does not suspend while the query is pending until the query AST and network response are available', () => { - const prefetched = loadQuery( - environment, - preloadableConcreteRequest, - { - id: '4', - }, - ); + const prefetched = loadQuery(environment, preloadableConcreteRequest, { + id: '4', + }); let data; function Component(props: any) { data = usePreloadedQuery(query, props.prefetched); @@ -606,13 +582,9 @@ describe.each([ }); it('does not suspend while the query is pending until the network response and the query AST are available', () => { - const prefetched = loadQuery( - environment, - preloadableConcreteRequest, - { - id: '4', - }, - ); + const prefetched = loadQuery(environment, preloadableConcreteRequest, { + id: '4', + }); let data; function Component(props: any) { data = usePreloadedQuery(query, props.prefetched); @@ -653,13 +625,9 @@ describe.each([ }); it('renders synchronously if the query has already completed', () => { - const prefetched = loadQuery( - environment, - preloadableConcreteRequest, - { - id: '4', - }, - ); + const prefetched = loadQuery(environment, preloadableConcreteRequest, { + id: '4', + }); let data; PreloadableQueryRegistry.set(ID, query); expect(dataSource).toBeDefined(); @@ -690,13 +658,9 @@ describe.each([ }); it('renders an error synchronously if the query has already errored', () => { - const prefetched = loadQuery( - environment, - preloadableConcreteRequest, - { - id: '4', - }, - ); + const prefetched = loadQuery(environment, preloadableConcreteRequest, { + id: '4', + }); let data; PreloadableQueryRegistry.set(ID, query); expect(dataSource).toBeDefined(); @@ -726,7 +690,7 @@ describe.each([ describe('when loadQuery is passed a query AST', () => { describe('when the network response is available before usePreloadedQuery is rendered', () => { it('should synchronously render successfully', () => { - const prefetched = loadQuery(environment, query, { + const prefetched = loadQuery(environment, query, { id: '4', }); let data; @@ -757,7 +721,7 @@ describe.each([ }); }); it('should synchronously render errors', () => { - const prefetched = loadQuery(environment, query, { + const prefetched = loadQuery(environment, query, { id: '4', }); let data; @@ -787,7 +751,7 @@ describe.each([ describe('when the network response occurs after usePreloadedQuery is rendered', () => { it('should suspend, and then render', () => { - const prefetched = loadQuery(environment, query, { + const prefetched = loadQuery(environment, query, { id: '4', }); let data; @@ -821,7 +785,7 @@ describe.each([ }); }); it('should suspend, then render and error', () => { - const prefetched = loadQuery(environment, query, { + const prefetched = loadQuery(environment, query, { id: '4', }); let data; @@ -864,7 +828,7 @@ describe.each([ }); describe('when the network response is available before usePreloadedQuery is rendered', () => { it('should synchronously render successfully', () => { - const prefetched = loadQuery( + const prefetched = loadQuery( environment, preloadableConcreteRequest, { @@ -899,7 +863,7 @@ describe.each([ }); }); it('should synchronously render errors', () => { - const prefetched = loadQuery( + const prefetched = loadQuery( environment, preloadableConcreteRequest, { @@ -933,7 +897,7 @@ describe.each([ describe('when the network response occurs after usePreloadedQuery is rendered', () => { it('should suspend, and then render', () => { - const prefetched = loadQuery( + const prefetched = loadQuery( environment, preloadableConcreteRequest, { @@ -971,7 +935,7 @@ describe.each([ }); }); it('should suspend, then render and error', () => { - const prefetched = loadQuery( + const prefetched = loadQuery( environment, preloadableConcreteRequest, { @@ -1022,13 +986,9 @@ describe.each([ network: Network.create(altFetch), store: new Store(new RecordSource()), }); - const prefetched = loadQuery( - environment, - preloadableConcreteRequest, - { - id: '4', - }, - ); + const prefetched = loadQuery(environment, preloadableConcreteRequest, { + id: '4', + }); let data; expect(dataSource).toBeDefined(); if (dataSource) { @@ -1066,11 +1026,9 @@ describe.each([ const expectWarningMessage = expect.stringMatching( /^usePreloadedQuery\(\): Expected preloadedQuery to not be disposed/, ); - const prefetched = loadQuery( - environment, - preloadableConcreteRequest, - {}, - ); + const prefetched = loadQuery(environment, preloadableConcreteRequest, { + id: '1', + }); function Component(props: any) { const data = usePreloadedQuery(query, props.prefetched); @@ -1107,7 +1065,7 @@ describe.each([ describe('refetching', () => { it('renders updated data correctly when refetching same query and variables', () => { - const loadedFirst = loadQuery( + const loadedFirst = loadQuery( environment, preloadableConcreteRequest, { @@ -1119,7 +1077,7 @@ describe.each([ ); let data; function Component(props: any) { - data = usePreloadedQuery(query, props.prefetched); + data = usePreloadedQuery(query, props.prefetched); return data.node?.name; } const renderer = TestRenderer.create( @@ -1155,7 +1113,7 @@ describe.each([ // Refetch data = undefined; dataSource = undefined; - const loadedSecond = loadQuery( + const loadedSecond = loadQuery( environment, preloadableConcreteRequest, { @@ -1208,7 +1166,7 @@ describe.each([ }); it('renders updated data correctly when refetching different variables', () => { - const loadedFirst = loadQuery( + const loadedFirst = loadQuery( environment, preloadableConcreteRequest, { @@ -1220,7 +1178,7 @@ describe.each([ ); let data; function Component(props: any) { - data = usePreloadedQuery(query, props.prefetched); + data = usePreloadedQuery(query, props.prefetched); return data.node?.name; } const renderer = TestRenderer.create( @@ -1256,7 +1214,7 @@ describe.each([ // Refetch data = undefined; dataSource = undefined; - const loadedSecond = loadQuery( + const loadedSecond = loadQuery( environment, preloadableConcreteRequest, { diff --git a/packages/react-relay/relay-hooks/__tests__/useQueryLoader-live-query-test.js b/packages/react-relay/relay-hooks/__tests__/useQueryLoader-live-query-test.js index ba679969d93b4..9a9195cd7dfff 100644 --- a/packages/react-relay/relay-hooks/__tests__/useQueryLoader-live-query-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useQueryLoader-live-query-test.js @@ -297,7 +297,7 @@ describe('when an initial preloaded query is passed', () => { }); beforeEach(() => { - jest.mock('scheduler', () => require('scheduler/unstable_mock')); + jest.mock('scheduler', () => require('../../__tests__/mockScheduler')); }); afterEach(() => { diff --git a/packages/react-relay/relay-hooks/__tests__/useQueryLoader-react-double-effects-test.js b/packages/react-relay/relay-hooks/__tests__/useQueryLoader-react-double-effects-test.js index 125f4a790b14f..183feff6ac13f 100644 --- a/packages/react-relay/relay-hooks/__tests__/useQueryLoader-react-double-effects-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useQueryLoader-react-double-effects-test.js @@ -76,7 +76,7 @@ describe.skip('useQueryLoader-react-double-effects', () => { let loaderRenderLogs: Array; beforeEach(() => { - jest.mock('scheduler', () => require('scheduler/unstable_mock')); + jest.mock('scheduler', () => require('../../__tests__/mockScheduler')); environment = createMockEnvironment(); @@ -128,10 +128,11 @@ describe.skip('useQueryLoader-react-double-effects', () => { query = createOperationDescriptor(gqlQuery, variables); queryRenderLogs = []; - QueryComponent = function (props: any) { + QueryComponent = function TestQueryComponent(props: any) { const result = usePreloadedQuery(gqlQuery, (props.queryRef: $FlowFixMe)); const name = result?.node?.name ?? 'Empty'; + // $FlowFixMe[react-rule-hook] useEffect(() => { queryRenderLogs.push(`commit: ${name}`); return () => { @@ -144,10 +145,11 @@ describe.skip('useQueryLoader-react-double-effects', () => { }; loaderRenderLogs = []; - LoaderComponent = function (props: any) { + LoaderComponent = function TestLoaderComponent(props: any) { const [queryRef] = useQueryLoader(gqlQuery, props.initialQueryRef); const queryRefId = queryRef == null ? 'null' : queryRef.id ?? 'Unknown'; + // $FlowFixMe[react-rule-hook] useEffect(() => { loaderRenderLogs.push(`commit: ${queryRefId}`); return () => { diff --git a/packages/react-relay/relay-hooks/__tests__/useQueryLoader-test.js b/packages/react-relay/relay-hooks/__tests__/useQueryLoader-test.js index da99d0edf72be..9cadad9e48a0e 100644 --- a/packages/react-relay/relay-hooks/__tests__/useQueryLoader-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useQueryLoader-test.js @@ -347,7 +347,7 @@ describe('useQueryLoader', () => { }); beforeEach(() => { - jest.mock('scheduler', () => require('scheduler/unstable_mock')); + jest.mock('scheduler', () => require('../../__tests__/mockScheduler')); }); afterEach(() => { diff --git a/packages/react-relay/relay-hooks/__tests__/useRefetchableFragment-test.js b/packages/react-relay/relay-hooks/__tests__/useRefetchableFragment-test.js index 088f8d6cd7ede..89d95e192d86a 100644 --- a/packages/react-relay/relay-hooks/__tests__/useRefetchableFragment-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useRefetchableFragment-test.js @@ -74,7 +74,6 @@ describe('useRefetchableFragment', () => { useRefetchableFragmentTestNestedUserFragment: {}, }, [FRAGMENT_OWNER_KEY]: owner.request, - __isWithinUnmatchedTypeRefinement: false, }; } @@ -143,7 +142,6 @@ describe('useRefetchableFragment', () => { [gqlFragment.name]: {}, }, [FRAGMENT_OWNER_KEY]: query.request, - __isWithinUnmatchedTypeRefinement: false, }), [], ); diff --git a/packages/react-relay/relay-hooks/__tests__/useRefetchableFragmentNode-test.js b/packages/react-relay/relay-hooks/__tests__/useRefetchableFragmentNode-test.js index db698f58ecf4b..418f6c11c572e 100644 --- a/packages/react-relay/relay-hooks/__tests__/useRefetchableFragmentNode-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useRefetchableFragmentNode-test.js @@ -39,8 +39,8 @@ import type {OperationDescriptor, Variables} from 'relay-runtime'; import type {Query} from 'relay-runtime/util/RelayRuntimeTypes'; const {useTrackLoadQueryInRender} = require('../loadQuery'); -const useRefetchableFragmentInternal_REACT_CACHE = require('../react-cache/useRefetchableFragmentInternal_REACT_CACHE'); -const useRefetchableFragmentNode_LEGACY = require('../useRefetchableFragmentNode'); +const RelayEnvironmentProvider = require('../RelayEnvironmentProvider'); +const useRefetchableFragmentInternal = require('../useRefetchableFragmentInternal'); const invariant = require('invariant'); const React = require('react'); const ReactRelayContext = require('react-relay/ReactRelayContext'); @@ -51,7 +51,6 @@ const { FRAGMENTS_KEY, ID_KEY, Observable, - RelayFeatureFlags, createOperationDescriptor, graphql, } = require('relay-runtime'); @@ -63,25 +62,11 @@ const Scheduler = require('scheduler'); const {useMemo, useState, useEffect} = React; -describe.each([ - ['React Cache', useRefetchableFragmentInternal_REACT_CACHE], - ['Legacy', useRefetchableFragmentNode_LEGACY], -])( +describe.each([['New', useRefetchableFragmentInternal]])( 'useRefetchableFragmentNode (%s)', (_hookName, useRefetchableFragmentNodeOriginal) => { - let isUsingReactCacheImplementation; - let originalReactCacheFeatureFlag; - beforeEach(() => { - isUsingReactCacheImplementation = - useRefetchableFragmentNodeOriginal === - useRefetchableFragmentInternal_REACT_CACHE; - originalReactCacheFeatureFlag = RelayFeatureFlags.USE_REACT_CACHE; - RelayFeatureFlags.USE_REACT_CACHE = isUsingReactCacheImplementation; - }); - afterEach(() => { - RelayFeatureFlags.USE_REACT_CACHE = originalReactCacheFeatureFlag; - }); - + const isUsingNewImplementation = + useRefetchableFragmentNodeOriginal === useRefetchableFragmentInternal; let environment; let gqlQuery: | Query< @@ -143,10 +128,10 @@ describe.each([ this.setState({error}); } render(): React.Node { - const {children, fallback} = this.props; + const {children, fallback: Fallback} = this.props; const {error} = this.state; if (error) { - return React.createElement(fallback, {error}); + return ; } return children; } @@ -192,7 +177,6 @@ describe.each([ [fragmentName]: {}, }, [FRAGMENT_OWNER_KEY]: owner.request, - __isWithinUnmatchedTypeRefinement: false, }; } @@ -200,9 +184,7 @@ describe.each([ // Set up mocks jest.spyOn(console, 'warn').mockImplementationOnce(() => {}); jest.mock('warning'); - jest.mock('scheduler', () => - jest.requireActual('scheduler/unstable_mock'), - ); + jest.mock('scheduler', () => require('../../__tests__/mockScheduler')); /* $FlowFixMe[underconstrained-implicit-instantiation] error found when * enabling Flow LTI mode */ commitSpy = jest.fn<_, mixed>(); @@ -356,7 +338,6 @@ describe.each([ [fragment.name]: {}, }, [FRAGMENT_OWNER_KEY]: owner.request, - __isWithinUnmatchedTypeRefinement: false, }), [owner, fragment.name], ); @@ -396,7 +377,7 @@ describe.each([ const Fallback = () => { useEffect(() => { - Scheduler.unstable_yieldValue('Fallback'); + Scheduler.log('Fallback'); }); return 'Fallback'; @@ -1397,7 +1378,7 @@ describe.each([ ]); }); - it('warns if data retured has different __typename', () => { + it('warns if data returned has different __typename', () => { const warning = require('warning'); // $FlowFixMe[prop-missing] warning.mockClear(); @@ -1463,12 +1444,13 @@ describe.each([ const warningCalls = warning.mock.calls.filter( call => call[0] === false, ); - expect(warningCalls.length).toEqual(2); // the other warnings are from FragmentResource.js expect( - warningCalls[1][1].includes( - 'Relay: Call to `refetch` returned data with a different __typename:', + warningCalls.some(([_condition, format, ..._args]) => + format.includes( + 'Relay: Call to `refetch` returned data with a different __typename:', + ), ), - ).toEqual(true); + ).toBe(true); }); it('warns if a different id is returned', () => { @@ -1535,9 +1517,7 @@ describe.each([ const warningCalls = warning.mock.calls.filter( call => call[0] === false, ); - expect(warningCalls.length).toEqual( - isUsingReactCacheImplementation ? 2 : 1, - ); + expect(warningCalls.length).toEqual(isUsingNewImplementation ? 2 : 1); expect( warningCalls[0][1].includes( 'Relay: Call to `refetch` returned a different id, expected', @@ -1988,7 +1968,7 @@ describe.each([ {force: true}, ); - // Assert we suspend on intial refetch request + // Assert we suspend on initial refetch request expectFragmentIsRefetching(renderer, { refetchQuery: refetchQuery1, refetchVariables: refetchVariables1, @@ -2157,6 +2137,75 @@ describe.each([ expect(fetchSpy).toBeCalledTimes(4); }); + + it('preserves referential equality after refetch if data & variables have not changed', async () => { + let refetchCount = 0; + const ComponentWithUseEffectRefetch = (props: { + fragmentKey: any, + }): null => { + const {fragmentData, refetch} = useRefetchableFragmentNode( + graphql` + fragment useRefetchableFragmentNodeTestIdentityTestFragment on User + @refetchable( + queryName: "useRefetchableFragmentNodeTestIdentityTestFragmentRefetchQuery" + ) { + id + name + profile_picture(scale: $scale) { + uri + } + } + `, + props.fragmentKey, + ); + if (refetchCount > 2) { + throw new Error('Detected refetch loop.'); + } + useEffect(() => { + refetchCount++; + refetch(fragmentData.id); + }, [fragmentData, refetch]); + + return null; + }; + const variables = {id: '1', scale: 16}; + const query = createOperationDescriptor( + gqlRefetchQuery, + variables, + {}, + ); + environment.commitPayload(query, { + node: { + __typename: 'User', + id: '1', + name: 'Alice', + profile_picture: null, + }, + }); + let renderer; + TestRenderer.act(() => { + renderer = TestRenderer.create( + `Error: ${error.message}`}> + + + + + + , + // $FlowFixMe[prop-missing] - error revealed when flow-typing ReactTestRenderer + {unstable_isConcurrent: true}, + ); + jest.runAllImmediates(); + }); + expect(refetchCount).toBe(2); + expect(renderer?.toJSON()).toBe(null); + }); }); describe('fetchPolicy', () => { diff --git a/packages/react-relay/relay-hooks/__tests__/useRefetchableFragmentNode-with-suspense-transition-test.js b/packages/react-relay/relay-hooks/__tests__/useRefetchableFragmentNode-with-suspense-transition-test.js index f8911dc62fdd4..d8fa23787d21a 100644 --- a/packages/react-relay/relay-hooks/__tests__/useRefetchableFragmentNode-with-suspense-transition-test.js +++ b/packages/react-relay/relay-hooks/__tests__/useRefetchableFragmentNode-with-suspense-transition-test.js @@ -14,7 +14,7 @@ import type {RelayMockEnvironment} from '../../../relay-test-utils/RelayModernMo import type {OperationDescriptor, Variables} from 'relay-runtime'; import type {Disposable} from 'relay-runtime/util/RelayRuntimeTypes'; -const useRefetchableFragmentNodeOriginal = require('../useRefetchableFragmentNode'); +const useRefetchableFragmentNodeOriginal = require('../legacy/useRefetchableFragmentNode'); const React = require('react'); const ReactRelayContext = require('react-relay/ReactRelayContext'); const TestRenderer = require('react-test-renderer'); @@ -72,14 +72,14 @@ describe('useRefetchableFragmentNode with useTransition', () => { }; useLayoutEffect(() => { - Scheduler.unstable_yieldValue({data, isPending}); + Scheduler.log({data, isPending}); }); return {data, ...result}; } function assertYieldsWereCleared() { - const actualYields = Scheduler.unstable_clearYields(); + const actualYields = Scheduler.unstable_clearLog(); if (actualYields.length !== 0) { throw new Error( 'Log of yielded values is not empty. ' + @@ -104,7 +104,7 @@ describe('useRefetchableFragmentNode with useTransition', () => { ) { assertYieldsWereCleared(); Scheduler.unstable_flushAllWithoutAsserting(); - const actualYields = Scheduler.unstable_clearYields(); + const actualYields = Scheduler.unstable_clearLog(); expect(actualYields.length).toEqual(expectedYields.length); expectedYields.forEach((expected, idx) => assertYield(expected, actualYields[idx]), @@ -114,7 +114,7 @@ describe('useRefetchableFragmentNode with useTransition', () => { function expectNoYields() { assertYieldsWereCleared(); Scheduler.unstable_flushAllWithoutAsserting(); - const actualYields = Scheduler.unstable_clearYields(); + const actualYields = Scheduler.unstable_clearLog(); expect(actualYields.length).toEqual(0); } @@ -172,7 +172,6 @@ describe('useRefetchableFragmentNode with useTransition', () => { {}, }, [FRAGMENT_OWNER_KEY]: owner.request, - __isWithinUnmatchedTypeRefinement: false, }; } @@ -180,9 +179,7 @@ describe('useRefetchableFragmentNode with useTransition', () => { // Set up mocks jest.resetModules(); jest.mock('warning'); - jest.mock('scheduler', () => { - return jest.requireActual('scheduler/unstable_mock'); - }); + jest.mock('scheduler', () => require('../../__tests__/mockScheduler')); // Supress `act` warnings since we are intentionally not // using it for most tests here. `act` currently always @@ -266,7 +263,6 @@ describe('useRefetchableFragmentNode with useTransition', () => { [fragment.name]: {}, }, [FRAGMENT_OWNER_KEY]: owner.request, - __isWithinUnmatchedTypeRefinement: false, }), [owner, fragment.name], ); @@ -295,7 +291,7 @@ describe('useRefetchableFragmentNode with useTransition', () => { const Fallback = () => { useLayoutEffect(() => { - Scheduler.unstable_yieldValue('Fallback'); + Scheduler.log('Fallback'); }); return 'Fallback'; diff --git a/packages/react-relay/relay-hooks/FragmentResource.js b/packages/react-relay/relay-hooks/legacy/FragmentResource.js similarity index 86% rename from packages/react-relay/relay-hooks/FragmentResource.js rename to packages/react-relay/relay-hooks/legacy/FragmentResource.js index 72b2fee1aa288..0fad4a48a78b2 100644 --- a/packages/react-relay/relay-hooks/FragmentResource.js +++ b/packages/react-relay/relay-hooks/legacy/FragmentResource.js @@ -11,8 +11,8 @@ 'use strict'; -import type {Cache} from './LRUCache'; -import type {QueryResource, QueryResult} from './QueryResource'; +import type {Cache} from '../LRUCache'; +import type {QueryResource, QueryResult} from '../QueryResource'; import type { ConcreteRequest, DataID, @@ -24,9 +24,9 @@ import type { } from 'relay-runtime'; import type {MissingLiveResolverField} from 'relay-runtime/store/RelayStoreTypes'; -const LRUCache = require('./LRUCache'); -const {getQueryResourceForEnvironment} = require('./QueryResource'); -const SuspenseResource = require('./SuspenseResource'); +const LRUCache = require('../LRUCache'); +const {getQueryResourceForEnvironment} = require('../QueryResource'); +const SuspenseResource = require('../SuspenseResource'); const invariant = require('invariant'); const { __internal: {fetchQuery, getPromiseForActiveRequest}, @@ -50,7 +50,12 @@ type FragmentResourceCache = Cache< promise: Promise, result: FragmentResult, } - | {kind: 'done', result: FragmentResult}, + | {kind: 'done', result: FragmentResult} + | { + kind: 'missing', + result: FragmentResult, + snapshot: SingularOrPluralSnapshot, + }, >; const WEAKMAP_SUPPORTED = typeof WeakMap === 'function'; @@ -201,11 +206,9 @@ class FragmentResourceImpl { constructor(environment: IEnvironment) { this._environment = environment; this._cache = LRUCache.create(CACHE_CAPACITY); - if (RelayFeatureFlags.ENABLE_CLIENT_EDGES) { - this._clientEdgeQueryResultsCache = new ClientEdgeQueryResultsCache( - environment, - ); - } + this._clientEdgeQueryResultsCache = new ClientEdgeQueryResultsCache( + environment, + ); } /** @@ -348,16 +351,28 @@ class FragmentResourceImpl { componentDisplayName, ); - const snapshot = - fragmentSelector.kind === 'PluralReaderSelector' - ? fragmentSelector.selectors.map(s => environment.lookup(s)) - : environment.lookup(fragmentSelector); + let fragmentResult = null; + let snapshot = null; + // Fall through to existing logic if it's 'missing' state so it would check and save promise into cache. + if ( + RelayFeatureFlags.ENABLE_RELAY_OPERATION_TRACKER_SUSPENSE && + cachedValue != null && + cachedValue.kind === 'missing' + ) { + fragmentResult = cachedValue.result; + snapshot = cachedValue.snapshot; + } else { + snapshot = + fragmentSelector.kind === 'PluralReaderSelector' + ? fragmentSelector.selectors.map(s => environment.lookup(s)) + : environment.lookup(fragmentSelector); - const fragmentResult = getFragmentResult( - fragmentIdentifier, - snapshot, - storeEpoch, - ); + fragmentResult = getFragmentResult( + fragmentIdentifier, + snapshot, + storeEpoch, + ); + } if (!fragmentResult.isMissingData) { this._throwOrLogErrorsInSnapshot(snapshot); @@ -380,7 +395,6 @@ class FragmentResourceImpl { // First, initiate a query for any client edges that were missing data: let clientEdgeRequests: ?Array = null; if ( - RelayFeatureFlags.ENABLE_CLIENT_EDGES && fragmentNode.metadata?.hasClientEdges === true && hasMissingClientEdges(snapshot) ) { @@ -415,7 +429,7 @@ class FragmentResourceImpl { ); } let clientEdgePromises: Array> = []; - if (RelayFeatureFlags.ENABLE_CLIENT_EDGES && clientEdgeRequests) { + if (clientEdgeRequests) { clientEdgePromises = clientEdgeRequests .map(request => getPromiseForActiveRequest(this._environment, request)) .filter(Boolean); @@ -483,6 +497,17 @@ class FragmentResourceImpl { } } + // set it as done if has missing data and no pending operations + if ( + RelayFeatureFlags.ENABLE_RELAY_OPERATION_TRACKER_SUSPENSE && + fragmentResult.isMissingData + ) { + this._cache.set(fragmentIdentifier, { + kind: 'done', + result: fragmentResult, + }); + } + this._throwOrLogErrorsInSnapshot(snapshot); // At this point, there's nothing we can do. We don't have all the expected @@ -538,6 +563,8 @@ class FragmentResourceImpl { this._environment, s.missingRequiredFields, s.relayResolverErrors, + s.errorResponseFields, + s.selector.node.metadata?.throwOnFieldError ?? false, ); }); } else { @@ -545,6 +572,8 @@ class FragmentResourceImpl { this._environment, snapshot.missingRequiredFields, snapshot.relayResolverErrors, + snapshot.errorResponseFields, + snapshot.selector.node.metadata?.throwOnFieldError ?? false, ); } } @@ -617,24 +646,38 @@ class FragmentResourceImpl { disposables.push( environment.subscribe(currentSnapshot, latestSnapshot => { const storeEpoch = environment.getStore().getEpoch(); - this._cache.set(cacheKey, { - kind: 'done', - result: getFragmentResult(cacheKey, latestSnapshot, storeEpoch), - }); + const result = getFragmentResult( + cacheKey, + latestSnapshot, + storeEpoch, + ); + if ( + RelayFeatureFlags.ENABLE_RELAY_OPERATION_TRACKER_SUSPENSE && + result.isMissingData + ) { + this._cache.set(cacheKey, { + kind: 'missing', + result: result, + snapshot: latestSnapshot, + }); + } else { + this._cache.set(cacheKey, { + kind: 'done', + result: getFragmentResult(cacheKey, latestSnapshot, storeEpoch), + }); + } callback(); }), ); } - if (RelayFeatureFlags.ENABLE_CLIENT_EDGES) { - const clientEdgeQueryResults = - this._clientEdgeQueryResultsCache?.get(cacheKey) ?? undefined; - if (clientEdgeQueryResults?.length) { - const queryResource = getQueryResourceForEnvironment(this._environment); - clientEdgeQueryResults.forEach(queryResult => { - disposables.push(queryResource.retain(queryResult)); - }); - } + const clientEdgeQueryResults = + this._clientEdgeQueryResultsCache?.get(cacheKey) ?? undefined; + if (clientEdgeQueryResults?.length) { + const queryResource = getQueryResourceForEnvironment(this._environment); + clientEdgeQueryResults.forEach(queryResult => { + disposables.push(queryResource.retain(queryResult)); + }); } return { @@ -696,10 +739,26 @@ class FragmentResourceImpl { // Only update the cache when the data is changed to avoid // returning different `data` instances if (didMissUpdates) { - this._cache.set(cacheKey, { - kind: 'done', - result: getFragmentResult(cacheKey, currentSnapshots, storeEpoch), - }); + const result = getFragmentResult( + cacheKey, + currentSnapshots, + storeEpoch, + ); + if ( + RelayFeatureFlags.ENABLE_RELAY_OPERATION_TRACKER_SUSPENSE && + result.isMissingData + ) { + this._cache.set(cacheKey, { + kind: 'missing', + result, + snapshot: currentSnapshots, + }); + } else { + this._cache.set(cacheKey, { + kind: 'done', + result, + }); + } } return [didMissUpdates, currentSnapshots]; } @@ -716,12 +775,29 @@ class FragmentResourceImpl { selector: currentSnapshot.selector, missingRequiredFields: currentSnapshot.missingRequiredFields, relayResolverErrors: currentSnapshot.relayResolverErrors, + errorResponseFields: currentSnapshot.errorResponseFields, }; if (updatedData !== renderData) { - this._cache.set(cacheKey, { - kind: 'done', - result: getFragmentResult(cacheKey, updatedCurrentSnapshot, storeEpoch), - }); + const result = getFragmentResult( + cacheKey, + updatedCurrentSnapshot, + storeEpoch, + ); + if ( + RelayFeatureFlags.ENABLE_RELAY_OPERATION_TRACKER_SUSPENSE && + result.isMissingData + ) { + this._cache.set(cacheKey, { + kind: 'missing', + result: result, + snapshot: updatedCurrentSnapshot, + }); + } else { + this._cache.set(cacheKey, { + kind: 'done', + result, + }); + } } return [updatedData !== renderData, updatedCurrentSnapshot]; } @@ -799,10 +875,22 @@ class FragmentResourceImpl { ? [...currentSnapshot] : [...baseSnapshots]; nextSnapshots[idx] = latestSnapshot; - this._cache.set(cacheKey, { - kind: 'done', - result: getFragmentResult(cacheKey, nextSnapshots, storeEpoch), - }); + const result = getFragmentResult(cacheKey, nextSnapshots, storeEpoch); + if ( + RelayFeatureFlags.ENABLE_RELAY_OPERATION_TRACKER_SUSPENSE && + result.isMissingData + ) { + this._cache.set(cacheKey, { + kind: 'missing', + result, + snapshot: nextSnapshots, + }); + } else { + this._cache.set(cacheKey, { + kind: 'done', + result, + }); + } } } diff --git a/packages/react-relay/relay-hooks/useBlockingPaginationFragment.js b/packages/react-relay/relay-hooks/legacy/useBlockingPaginationFragment.js similarity index 87% rename from packages/react-relay/relay-hooks/useBlockingPaginationFragment.js rename to packages/react-relay/relay-hooks/legacy/useBlockingPaginationFragment.js index 226d1026c02e5..702e4b8b6d76d 100644 --- a/packages/react-relay/relay-hooks/useBlockingPaginationFragment.js +++ b/packages/react-relay/relay-hooks/legacy/useBlockingPaginationFragment.js @@ -11,9 +11,9 @@ 'use strict'; -import type {RefetchableFragment} from '../../relay-runtime/util/RelayRuntimeTypes'; -import type {LoadMoreFn, UseLoadMoreFunctionArgs} from './useLoadMoreFunction'; +import type {LoadMoreFn, UseLoadMoreFunctionArgs} from '../useLoadMoreFunction'; import type {Options} from './useRefetchableFragmentNode'; +import type {RefetchableFragment} from 'relay-runtime'; import type { Disposable, FragmentType, @@ -22,9 +22,9 @@ import type { Variables, } from 'relay-runtime'; -const useLoadMoreFunction = require('./useLoadMoreFunction'); +const useLoadMoreFunction = require('../useLoadMoreFunction'); +const useStaticFragmentNodeWarning = require('../useStaticFragmentNodeWarning'); const useRefetchableFragmentNode = require('./useRefetchableFragmentNode'); -const useStaticFragmentNodeWarning = require('./useStaticFragmentNodeWarning'); const invariant = require('invariant'); const {useCallback, useEffect, useRef, useState} = require('react'); const { @@ -34,15 +34,12 @@ const { } = require('relay-runtime'); type RefetchVariables = - // NOTE: This $Call ensures that the type of the variables is either: + // NOTE: This type ensures that the type of the variables is either: // - nullable if the provided ref type is non-nullable // - non-nullable if the provided ref type is nullable, and the caller need to provide the full set of variables - // prettier-ignore - $Call< - & (( { +$fragmentSpreads: TFragmentType, ... }) => $Shape) - & ((?{ +$fragmentSpreads: TFragmentType, ... }) => TVariables), - TKey, - >; + [+key: TKey] extends [+key: {+$fragmentSpreads: mixed, ...}] + ? Partial + : TVariables; type RefetchFnBase = ( vars: TVars, @@ -55,15 +52,12 @@ type RefetchFn = RefetchFnBase< >; type ReturnType = { - // NOTE: This $Call ensures that the type of the returned data is either: + // NOTE: This rtpw ensures that the type of the returned data is either: // - nullable if the provided ref type is nullable // - non-nullable if the provided ref type is non-nullable - // prettier-ignore - data: $Call< - & (( { +$fragmentSpreads: TFragmentType, ... }) => TData) - & ((?{ +$fragmentSpreads: TFragmentType, ... }) => ?TData), - TKey, - >, + data: [+key: TKey] extends [+key: {+$fragmentSpreads: mixed, ...}] + ? TData + : ?TData, loadNext: LoadMoreFn, loadPrevious: LoadMoreFn, hasNext: boolean, @@ -71,7 +65,7 @@ type ReturnType = { refetch: RefetchFn, }; -function useBlockingPaginationFragment< +hook useBlockingPaginationFragment< TFragmentType: FragmentType, TVariables: Variables, TData, @@ -89,7 +83,6 @@ function useBlockingPaginationFragment< const { connectionPathInFragmentData, - identifierField, paginationRequest, paginationMetadata, stream, @@ -132,7 +125,6 @@ function useBlockingPaginationFragment< fragmentIdentifier, fragmentNode, fragmentRef, - identifierField, paginationMetadata, paginationRequest, }); @@ -148,7 +140,6 @@ function useBlockingPaginationFragment< fragmentIdentifier, fragmentNode, fragmentRef, - identifierField, paginationMetadata, paginationRequest, }); @@ -157,6 +148,7 @@ function useBlockingPaginationFragment< (variables: TVariables, options: void | Options) => { disposeFetchNext(); disposeFetchPrevious(); + // $FlowFixMe[incompatible-variance] return refetch(variables, {...options, __environment: undefined}); }, [disposeFetchNext, disposeFetchPrevious, refetch], @@ -164,6 +156,7 @@ function useBlockingPaginationFragment< return { // $FlowFixMe[incompatible-cast] + // $FlowFixMe[incompatible-return] data: (fragmentData: TData), loadNext, loadPrevious, @@ -173,7 +166,7 @@ function useBlockingPaginationFragment< }; } -function useLoadMore(args: { +hook useLoadMore(args: { disableStoreUpdates: () => void, enableStoreUpdates: () => void, ...$Exact< diff --git a/packages/react-relay/relay-hooks/useFragmentNode.js b/packages/react-relay/relay-hooks/legacy/useFragmentNode.js similarity index 92% rename from packages/react-relay/relay-hooks/useFragmentNode.js rename to packages/react-relay/relay-hooks/legacy/useFragmentNode.js index 5ba5c2f5dc1a0..996d4a8c687a8 100644 --- a/packages/react-relay/relay-hooks/useFragmentNode.js +++ b/packages/react-relay/relay-hooks/legacy/useFragmentNode.js @@ -13,11 +13,11 @@ import type {ReaderFragment} from 'relay-runtime'; +const useRelayEnvironment = require('../useRelayEnvironment'); +const useUnsafeRef_DEPRECATED = require('../useUnsafeRef_DEPRECATED'); const {getFragmentResourceForEnvironment} = require('./FragmentResource'); -const useRelayEnvironment = require('./useRelayEnvironment'); -const useUnsafeRef_DEPRECATED = require('./useUnsafeRef_DEPRECATED'); const {useEffect, useState} = require('react'); -const {getFragmentIdentifier} = require('relay-runtime'); +const {RelayFeatureFlags, getFragmentIdentifier} = require('relay-runtime'); const warning = require('warning'); type ReturnType = { @@ -26,7 +26,7 @@ type ReturnType = { enableStoreUpdates: () => void, }; -function useFragmentNode( +hook useFragmentNode( fragmentNode: ReaderFragment, fragmentRef: mixed, componentDisplayName: string, @@ -99,7 +99,7 @@ function useFragmentNode( // eslint-disable-next-line react-hooks/exhaustive-deps }, [environment, fragmentIdentifier]); - if (__DEV__) { + if (RelayFeatureFlags.LOG_MISSING_RECORDS_IN_PROD || __DEV__) { if ( fragmentRef != null && (fragmentResult.data === undefined || diff --git a/packages/react-relay/relay-hooks/useRefetchableFragmentNode.js b/packages/react-relay/relay-hooks/legacy/useRefetchableFragmentNode.js similarity index 91% rename from packages/react-relay/relay-hooks/useRefetchableFragmentNode.js rename to packages/react-relay/relay-hooks/legacy/useRefetchableFragmentNode.js index b7c6ead508e7c..f3bac9430145f 100644 --- a/packages/react-relay/relay-hooks/useRefetchableFragmentNode.js +++ b/packages/react-relay/relay-hooks/legacy/useRefetchableFragmentNode.js @@ -11,7 +11,8 @@ 'use strict'; -import type {LoaderFn} from './useQueryLoader'; +import type {LoaderFn} from '../useQueryLoader'; +import type {RefetchableIdentifierInfo} from 'relay-runtime'; import type { ConcreteRequest, Disposable, @@ -25,13 +26,13 @@ import type { VariablesOf, } from 'relay-runtime'; +const ProfilerContext = require('../ProfilerContext'); +const {getQueryResourceForEnvironment} = require('../QueryResource'); +const useIsMountedRef = require('../useIsMountedRef'); +const useQueryLoader = require('../useQueryLoader'); +const useRelayEnvironment = require('../useRelayEnvironment'); const {getFragmentResourceForEnvironment} = require('./FragmentResource'); -const ProfilerContext = require('./ProfilerContext'); -const {getQueryResourceForEnvironment} = require('./QueryResource'); const useFragmentNode = require('./useFragmentNode'); -const useIsMountedRef = require('./useIsMountedRef'); -const useQueryLoader = require('./useQueryLoader'); -const useRelayEnvironment = require('./useRelayEnvironment'); const invariant = require('invariant'); const {useCallback, useContext, useReducer} = require('react'); const { @@ -54,16 +55,13 @@ export type RefetchFn< // /nullable/. // - Or, expects /a subset/ of the query variables if the provided key type is // /non-null/. -// prettier-ignore export type RefetchFnDynamic< TQuery: OperationType, - TKey: ?{ +$data?: mixed, ... }, + TKey: ?{+$data?: mixed, ...}, TOptions = Options, -> = $Call< - & (( { +$data?: mixed, ... }) => RefetchFnInexact) - & ((?{ +$data?: mixed, ... }) => RefetchFnExact), - TKey ->; +> = [TKey] extends [{+$data?: mixed, ...}] + ? RefetchFnInexact + : RefetchFnExact; export type ReturnType< TQuery: OperationType, @@ -100,7 +98,7 @@ type RefetchFnExact = RefetchFnBase< type RefetchFnInexact< TQuery: OperationType, TOptions = Options, -> = RefetchFnBase<$Shape>, TOptions>; +> = RefetchFnBase>, TOptions>; type Action = | { @@ -164,7 +162,7 @@ function reducer(state: RefetchState, action: Action): RefetchState { } } -function useRefetchableFragmentNode< +hook useRefetchableFragmentNode< TQuery: OperationType, TKey: ?{+$data?: mixed, ...}, >( @@ -173,8 +171,10 @@ function useRefetchableFragmentNode< componentDisplayName: string, ): ReturnType { const parentEnvironment = useRelayEnvironment(); - const {refetchableRequest, fragmentRefPathInResponse, identifierField} = - getRefetchMetadata(fragmentNode, componentDisplayName); + const {refetchableRequest, fragmentRefPathInResponse} = getRefetchMetadata( + fragmentNode, + componentDisplayName, + ); const fragmentIdentifier = getFragmentIdentifier( fragmentNode, parentFragmentRef, @@ -214,6 +214,12 @@ function useRefetchableFragmentNode< >((refetchableRequest: $FlowFixMe)); let fragmentRef = parentFragmentRef; + + const {identifierInfo} = getRefetchMetadata( + fragmentNode, + componentDisplayName, + ); + if (shouldReset) { dispatch({ type: 'reset', @@ -239,6 +245,7 @@ function useRefetchableFragmentNode< debugPreviousIDAndTypename = debugFunctions.getInitialIDAndType( refetchQuery.request.variables, fragmentRefPathInResponse, + identifierInfo?.identifierQueryVariableName, environment, ); } @@ -345,7 +352,7 @@ function useRefetchableFragmentNode< fragmentIdentifier, fragmentNode, fragmentRefPathInResponse, - identifierField, + identifierInfo, loadQuery, parentFragmentRef, refetchableRequest, @@ -353,13 +360,14 @@ function useRefetchableFragmentNode< return { fragmentData, fragmentRef, + // $FlowFixMe[incompatible-return] RefetchFn not compatible with RefetchFnDynamic refetch, disableStoreUpdates, enableStoreUpdates, }; } -function useRefetchFunction( +hook useRefetchFunction( componentDisplayName: string, dispatch: ( | { @@ -381,17 +389,17 @@ function useRefetchFunction( fragmentIdentifier: string, fragmentNode: ReaderFragment, fragmentRefPathInResponse: $ReadOnlyArray, - identifierField: ?string, + identifierInfo: ?RefetchableIdentifierInfo, loadQuery: LoaderFn, parentFragmentRef: mixed, refetchableRequest: ConcreteRequest, ): RefetchFn { const isMountedRef = useIsMountedRef(); const identifierValue = - identifierField != null && + identifierInfo?.identifierField != null && fragmentData != null && typeof fragmentData === 'object' - ? fragmentData[identifierField] + ? fragmentData[identifierInfo.identifierField] : null; return useCallback( ( @@ -458,8 +466,10 @@ function useRefetchFunction( // If the query needs an identifier value ('id' or similar) and one // was not explicitly provided, read it from the fragment data. if ( - identifierField != null && - !providedRefetchVariables.hasOwnProperty('id') + identifierInfo != null && + !providedRefetchVariables.hasOwnProperty( + identifierInfo.identifierQueryVariableName, + ) ) { // @refetchable fragments are guaranteed to have an `id` selection // if the type is Node, implements Node, or is @fetchable. Double-check @@ -469,11 +479,13 @@ function useRefetchFunction( false, 'Relay: Expected result to have a string ' + '`%s` in order to refetch, got `%s`.', - identifierField, + identifierInfo.identifierField, identifierValue, ); } - (refetchVariables: $FlowFixMe).id = identifierValue; + (refetchVariables: $FlowFixMe)[ + identifierInfo.identifierQueryVariableName + ] = identifierValue; } const refetchQuery = createOperationDescriptor( @@ -522,10 +534,11 @@ if (__DEV__) { getInitialIDAndType( memoRefetchVariables: ?Variables, fragmentRefPathInResponse: $ReadOnlyArray, + identifierQueryVariableName: ?string, environment: IEnvironment, ): ?DebugIDandTypename { const {Record} = require('relay-runtime'); - const id = memoRefetchVariables?.id; + const id = memoRefetchVariables?.[identifierQueryVariableName ?? 'id']; if ( fragmentRefPathInResponse.length !== 1 || fragmentRefPathInResponse[0] !== 'node' || @@ -535,7 +548,7 @@ if (__DEV__) { } const recordSource = environment.getStore().getSource(); const record = recordSource.get(id); - const typename = record && Record.getType(record); + const typename = record == null ? null : Record.getType(record); if (typename == null) { return null; } diff --git a/packages/react-relay/relay-hooks/loadEntryPoint.js b/packages/react-relay/relay-hooks/loadEntryPoint.js index 3ee355f217fe1..208e01705d05c 100644 --- a/packages/react-relay/relay-hooks/loadEntryPoint.js +++ b/packages/react-relay/relay-hooks/loadEntryPoint.js @@ -46,8 +46,10 @@ function loadEntryPoint< } const preloadProps = entryPoint.getPreloadProps(entryPointParams); const {queries, entryPoints, extraProps} = preloadProps; - const preloadedQueries: $Shape = {}; - const preloadedEntryPoints: $Shape = {}; + // $FlowFixMe[incompatible-type] + const preloadedQueries: Partial = {}; + // $FlowFixMe[incompatible-type] + const preloadedEntryPoints: Partial = {}; if (queries != null) { const queriesPropNames = Object.keys(queries); queriesPropNames.forEach(queryPropName => { diff --git a/packages/react-relay/relay-hooks/loadQuery.js b/packages/react-relay/relay-hooks/loadQuery.js index 9d00aed8a7fa5..66e8177afea44 100644 --- a/packages/react-relay/relay-hooks/loadQuery.js +++ b/packages/react-relay/relay-hooks/loadQuery.js @@ -24,6 +24,7 @@ import type { IEnvironment, OperationDescriptor, OperationType, + Query, RequestIdentifier, RequestParameters, } from 'relay-runtime'; @@ -45,7 +46,7 @@ const warning = require('warning'); let RenderDispatcher = null; let fetchKey = 100001; -function useTrackLoadQueryInRender() { +hook useTrackLoadQueryInRender() { if (RenderDispatcher === null) { // Flow does not know of React internals (rightly so), but we need to // ensure here that this function isn't called inside render. @@ -56,12 +57,32 @@ function useTrackLoadQueryInRender() { } } +type QueryType = + T extends Query + ? { + variables: V, + response: D, + rawResponse?: $NonMaybeType, + } // $FlowFixMe[deprecated-type] + : $Call<(PreloadableConcreteRequest) => T, T>; + +declare function loadQuery< + T, + TEnvironmentProviderOptions = EnvironmentProviderOptions, +>( + environment: IEnvironment, + preloadableRequest: T, + variables: QueryType['variables'], + options?: ?LoadQueryOptions, + environmentProviderOptions?: ?TEnvironmentProviderOptions, +): PreloadedQueryInner, TEnvironmentProviderOptions>; + function loadQuery< TQuery: OperationType, TEnvironmentProviderOptions = EnvironmentProviderOptions, >( environment: IEnvironment, - preloadableRequest: GraphQLTaggedNode | PreloadableConcreteRequest, + preloadableRequest: PreloadableConcreteRequest, variables: TQuery['variables'], options?: ?LoadQueryOptions, environmentProviderOptions?: ?TEnvironmentProviderOptions, diff --git a/packages/react-relay/relay-hooks/prepareEntryPoint_DEPRECATED.js b/packages/react-relay/relay-hooks/prepareEntryPoint_DEPRECATED.js index 1b71f0d03ff77..3a4638d88600d 100644 --- a/packages/react-relay/relay-hooks/prepareEntryPoint_DEPRECATED.js +++ b/packages/react-relay/relay-hooks/prepareEntryPoint_DEPRECATED.js @@ -45,8 +45,10 @@ function prepareEntryPoint< } const preloadProps = entryPoint.getPreloadProps(entryPointParams); const {queries, entryPoints} = preloadProps; - const preloadedQueries: $Shape = {}; - const preloadedEntryPoints: $Shape = {}; + // $FlowFixMe[incompatible-type] + const preloadedQueries: Partial = {}; + // $FlowFixMe[incompatible-type] + const preloadedEntryPoints: Partial = {}; if (queries != null) { const queriesPropNames = Object.keys(queries); queriesPropNames.forEach(queryPropName => { diff --git a/packages/react-relay/relay-hooks/react-cache/RelayReactCache.js b/packages/react-relay/relay-hooks/react-cache/RelayReactCache.js deleted file mode 100644 index ca270ca9075f3..0000000000000 --- a/packages/react-relay/relay-hooks/react-cache/RelayReactCache.js +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; - -const invariant = require('invariant'); -// $FlowFixMe[prop-missing] These exist in experimental builds but aren't in React's types yet. -const {unstable_getCacheForType, unstable_getCacheSignal} = require('react'); -const {RelayFeatureFlags} = require('relay-runtime'); - -function getCacheForType(factory: () => T): T { - invariant( - typeof unstable_getCacheForType === 'function' && - RelayFeatureFlags.USE_REACT_CACHE, - 'RelayReactCache.getCacheForType should only be called when the USE_REACT_CACHE feature flag is enabled and when on an experimental React build that supports it.', - ); - return unstable_getCacheForType(factory); -} - -function getCacheSignal(): AbortSignal { - invariant( - typeof unstable_getCacheSignal === 'function' && - RelayFeatureFlags.USE_REACT_CACHE, - 'RelayReactCache.getCacheSignal should only be called when the USE_REACT_CACHE feature flag is enabled and when on an experimental React build that supports it.', - ); - return unstable_getCacheSignal(); -} - -module.exports = { - getCacheForType, - getCacheSignal, -}; diff --git a/packages/react-relay/relay-hooks/react-cache/__tests__/__generated__/useLazyLoadQueryREACTCACHETest1Fragment.graphql.js b/packages/react-relay/relay-hooks/react-cache/__tests__/__generated__/useLazyLoadQueryREACTCACHETest1Fragment.graphql.js deleted file mode 100644 index 57c1aaa08823f..0000000000000 --- a/packages/react-relay/relay-hooks/react-cache/__tests__/__generated__/useLazyLoadQueryREACTCACHETest1Fragment.graphql.js +++ /dev/null @@ -1,59 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<1723d9f143bb98bfff3918381c689702>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { Fragment, ReaderFragment } from 'relay-runtime'; -import type { FragmentType } from "relay-runtime"; -declare export opaque type useLazyLoadQueryREACTCACHETest1Fragment$fragmentType: FragmentType; -export type useLazyLoadQueryREACTCACHETest1Fragment$data = {| - +name: ?string, - +$fragmentType: useLazyLoadQueryREACTCACHETest1Fragment$fragmentType, -|}; -export type useLazyLoadQueryREACTCACHETest1Fragment$key = { - +$data?: useLazyLoadQueryREACTCACHETest1Fragment$data, - +$fragmentSpreads: useLazyLoadQueryREACTCACHETest1Fragment$fragmentType, - ... -}; -*/ - -var node/*: ReaderFragment*/ = { - "argumentDefinitions": [], - "kind": "Fragment", - "metadata": null, - "name": "useLazyLoadQueryREACTCACHETest1Fragment", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - } - ], - "type": "User", - "abstractKey": null -}; - -if (__DEV__) { - (node/*: any*/).hash = "cbe8a3b8ca4ae6ac275a86584cb5d227"; -} - -module.exports = ((node/*: any*/)/*: Fragment< - useLazyLoadQueryREACTCACHETest1Fragment$fragmentType, - useLazyLoadQueryREACTCACHETest1Fragment$data, ->*/); diff --git a/packages/react-relay/relay-hooks/react-cache/__tests__/__generated__/useLazyLoadQueryREACTCACHETest1Query.graphql.js b/packages/react-relay/relay-hooks/react-cache/__tests__/__generated__/useLazyLoadQueryREACTCACHETest1Query.graphql.js deleted file mode 100644 index 8184fdd29ed3c..0000000000000 --- a/packages/react-relay/relay-hooks/react-cache/__tests__/__generated__/useLazyLoadQueryREACTCACHETest1Query.graphql.js +++ /dev/null @@ -1,163 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { useLazyLoadQueryREACTCACHETest1Fragment$fragmentType } from "./useLazyLoadQueryREACTCACHETest1Fragment.graphql"; -export type useLazyLoadQueryREACTCACHETest1Query$variables = {| - id: string, -|}; -export type useLazyLoadQueryREACTCACHETest1Query$data = {| - +node: ?{| - +__typename: string, - +username?: ?string, - +$fragmentSpreads: useLazyLoadQueryREACTCACHETest1Fragment$fragmentType, - |}, -|}; -export type useLazyLoadQueryREACTCACHETest1Query = {| - response: useLazyLoadQueryREACTCACHETest1Query$data, - variables: useLazyLoadQueryREACTCACHETest1Query$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } -], -v1 = [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } -], -v2 = { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null -}, -v3 = { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "username", - "storageKey": null -}; -return { - "fragment": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Fragment", - "metadata": null, - "name": "useLazyLoadQueryREACTCACHETest1Query", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - (v2/*: any*/), - { - "kind": "InlineFragment", - "selections": [ - (v3/*: any*/) - ], - "type": "User", - "abstractKey": null - }, - { - "args": null, - "kind": "FragmentSpread", - "name": "useLazyLoadQueryREACTCACHETest1Fragment" - } - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Operation", - "name": "useLazyLoadQueryREACTCACHETest1Query", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - (v2/*: any*/), - { - "kind": "InlineFragment", - "selections": [ - (v3/*: any*/), - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - } - ], - "type": "User", - "abstractKey": null - }, - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "098de982a39032743cd9ad9bc5849426", - "id": null, - "metadata": {}, - "name": "useLazyLoadQueryREACTCACHETest1Query", - "operationKind": "query", - "text": "query useLazyLoadQueryREACTCACHETest1Query(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ... on User {\n username\n }\n ...useLazyLoadQueryREACTCACHETest1Fragment\n id\n }\n}\n\nfragment useLazyLoadQueryREACTCACHETest1Fragment on User {\n name\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "f65d6d0f38c33ef45c2fd0cb1e138dbc"; -} - -module.exports = ((node/*: any*/)/*: Query< - useLazyLoadQueryREACTCACHETest1Query$variables, - useLazyLoadQueryREACTCACHETest1Query$data, ->*/); diff --git a/packages/react-relay/relay-hooks/react-cache/__tests__/useLazyLoadQuery_REACT_CACHE-test.js b/packages/react-relay/relay-hooks/react-cache/__tests__/useLazyLoadQuery_REACT_CACHE-test.js deleted file mode 100644 index 55f53f8ba6265..0000000000000 --- a/packages/react-relay/relay-hooks/react-cache/__tests__/useLazyLoadQuery_REACT_CACHE-test.js +++ /dev/null @@ -1,1202 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow - * @format - * @oncall relay - */ - -'use strict'; -import type { - LogRequestInfoFunction, - UploadableMap, -} from '../../../../relay-runtime/network/RelayNetworkTypes'; -import type {Sink} from '../../../../relay-runtime/network/RelayObservable'; -import type {RequestParameters} from '../../../../relay-runtime/util/RelayConcreteNode'; -import type { - CacheConfig, - Variables, -} from '../../../../relay-runtime/util/RelayRuntimeTypes'; -import type {FetchPolicy, GraphQLResponse, RenderPolicy} from 'relay-runtime'; -import type {LogEvent} from 'relay-runtime/store/RelayStoreTypes'; - -const React = require('react'); -const useLazyLoadQuery_REACT_CACHE = require('react-relay/relay-hooks/react-cache/useLazyLoadQuery_REACT_CACHE'); -const RelayEnvironmentProvider = require('react-relay/relay-hooks/RelayEnvironmentProvider'); -const useLazyLoadQuery_LEGACY = require('react-relay/relay-hooks/useLazyLoadQuery'); -const ReactTestRenderer = require('react-test-renderer'); -const { - __internal: {getPromiseForActiveRequest}, - Environment, - RecordSource, - RelayFeatureFlags, - ROOT_ID, - Store, - createOperationDescriptor, - graphql, -} = require('relay-runtime'); -const RelayNetwork = require('relay-runtime/network/RelayNetwork'); -const RelayObservable = require('relay-runtime/network/RelayObservable'); -const {ROOT_TYPE} = require('relay-runtime/store/RelayStoreUtils'); -const RelayReplaySubject = require('relay-runtime/util/RelayReplaySubject'); -const { - disallowConsoleErrors, - disallowConsoleWarnings, - disallowWarnings, - expectConsoleErrorWillFire, - trackRetentionForEnvironment, -} = require('relay-test-utils-internal'); - -// $FlowExpectedError[prop-missing] Cache not yet part of Flow types -const {unstable_Cache, useState} = React; -const Cache = unstable_Cache ?? React.Fragment; // Tempporary: for OSS builds still on 17 - -function isPromise(p: any) { - return typeof p.then === 'function'; -} - -/* -Inputs in the QueryResource test suite: -Network policy -Render policy -Query can be fulfilled from store -Query is stale in store -Read multiple times -Does the fetch return synchronously -Does the network request error -Does the component commit -Is it using a fragment -Is it incremental -Are we unsubscribing and then re-subscribing an in-flight query <— not sure what I meant by this??? -Is it a live query -Is it retained multiple times (multiple components committed) -Is it SSR - -Output: -Does it suspend (and until what incremental stage?) -Does it throw an error -Does it send a network request (how many?) -Does it retain and release (not until all components finished) -Does it re-use the same promise -Does it cancel the network request in flight upon release - - -Possibly special tests: -correctly retains query when releasing and re-retaining - */ - -describe('useLazyLoadQuery_REACT_CACHE', () => { - beforeAll(() => { - disallowConsoleErrors(); - disallowConsoleWarnings(); - disallowWarnings(); - }); - - graphql` - fragment useLazyLoadQueryREACTCACHETest1Fragment on User { - name - } - `; - const query = graphql` - query useLazyLoadQueryREACTCACHETest1Query($id: ID!) { - node(id: $id) { - __typename - ... on User { - username - } - ...useLazyLoadQueryREACTCACHETest1Fragment - } - } - `; - const variables = {id: '1'}; - const responsePayload = { - node: { - __typename: 'User', - id: variables.id, - name: 'Alice', - username: 'abc', - }, - }; - const operation = createOperationDescriptor(query, variables, { - force: true, - }); - - type TestInputs = { - fetchPolicy: FetchPolicy, - renderPolicy: RenderPolicy, - availability: - | 'available' - | 'stale' - | 'missing' - | 'available-root-fragment-only', - }; - type TestOutputs = { - shouldAwaitFetchResult: boolean, - shouldFetch: boolean, - shouldBeMissingData?: boolean, - }; - type Test = [TestInputs, TestOutputs]; - - describe.each([ - ['React Cache', useLazyLoadQuery_REACT_CACHE, false], - ['React Cache with Legacy Timeouts', useLazyLoadQuery_REACT_CACHE, true], - ['Legacy', useLazyLoadQuery_LEGACY, false], - ])( - 'Hook implementation: %s', - (_hookName, useLazyLoadQuery, shouldEnableLegacyTimeouts) => { - const usingReactCache = useLazyLoadQuery === useLazyLoadQuery_REACT_CACHE; - // Our open-source build is still on React 17, so we need to skip these tests there: - if (usingReactCache) { - // $FlowExpectedError[prop-missing] Cache not yet part of Flow types - if (React.unstable_getCacheForType === undefined) { - return; - } - } - let originalReactCacheFeatureFlag; - beforeEach(() => { - originalReactCacheFeatureFlag = RelayFeatureFlags.USE_REACT_CACHE; - RelayFeatureFlags.USE_REACT_CACHE = usingReactCache; - }); - afterEach(() => { - RelayFeatureFlags.USE_REACT_CACHE = originalReactCacheFeatureFlag; - }); - - let originalReactCacheTimeoutFeatureFlag; - beforeEach(() => { - originalReactCacheTimeoutFeatureFlag = - RelayFeatureFlags.USE_REACT_CACHE_LEGACY_TIMEOUTS; - RelayFeatureFlags.USE_REACT_CACHE_LEGACY_TIMEOUTS = - shouldEnableLegacyTimeouts; - }); - afterEach(() => { - RelayFeatureFlags.USE_REACT_CACHE_LEGACY_TIMEOUTS = - originalReactCacheTimeoutFeatureFlag; - }); - - let environment; - let fetch; - let subject: RelayReplaySubject; - let logs; - let release; - let isOperationRetained; - - let errorBoundaryDidCatchFn; - class ErrorBoundary extends React.Component { - state: {error: ?Error} = {error: null}; - componentDidCatch(error: Error) { - errorBoundaryDidCatchFn(error); - this.setState({error}); - } - render(): React.Node { - const {children, fallback} = this.props; - const {error} = this.state; - if (error) { - return React.createElement(fallback, {error}); - } - return children; - } - } - - function Wrappers({ - env, - children, - }: { - children: React.Node, - env: Environment, - }) { - return ( - - - `Error: ${error.message + ': ' + error.stack}` - }> - {children} - - - ); - } - - beforeEach(() => { - jest.clearAllTimers(); - errorBoundaryDidCatchFn = jest.fn<[Error], mixed>(); - logs = ([]: Array); - subject = new RelayReplaySubject(); - fetch = jest.fn( - ( - _query: ?( - | LogRequestInfoFunction - | UploadableMap - | RequestParameters - | Variables - | CacheConfig - ), - _vars: ?( - | LogRequestInfoFunction - | UploadableMap - | RequestParameters - | Variables - | CacheConfig - ), - config: ?( - | LogRequestInfoFunction - | UploadableMap - | RequestParameters - | Variables - | CacheConfig - ), - ) => { - return RelayObservable.create((sink: Sink) => { - subject.subscribe(sink); - }); - }, - ); - environment = new Environment({ - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - network: RelayNetwork.create(fetch), - store: new Store(new RecordSource(), { - gcReleaseBufferSize: 0, - gcScheduler: f => f(), - }), - log: event => { - logs.push(event); - }, - }); - // $FlowExpectedError[method-unbinding] - // $FlowExpectedError[cannot-write] - environment.execute = jest.fn(environment.execute.bind(environment)); - ({release_DEPRECATED: release, isOperationRetained} = - trackRetentionForEnvironment(environment)); - }); - - // NB we can remove a level of nesting here (the inner arrays) if we can - // figure out how to Flow-type the interface of describe.each. It accepts - // an array of either arrays of arguments or single arguments (which is what - // would allow us to remove second array level). - describe.each( - ([ - /******** store-or-network ********/ - [ - { - fetchPolicy: 'store-or-network', - renderPolicy: 'full', - availability: 'missing', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'store-or-network', - renderPolicy: 'full', - availability: 'available', - }, - { - shouldAwaitFetchResult: false, - shouldFetch: false, - }, - ], - [ - { - fetchPolicy: 'store-or-network', - renderPolicy: 'full', - availability: 'stale', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'store-or-network', - renderPolicy: 'full', - availability: 'available-root-fragment-only', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - - [ - { - fetchPolicy: 'store-or-network', - renderPolicy: 'partial', - availability: 'missing', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'store-or-network', - renderPolicy: 'partial', - availability: 'available', - }, - { - shouldAwaitFetchResult: false, - shouldFetch: false, - }, - ], - [ - { - fetchPolicy: 'store-or-network', - renderPolicy: 'partial', - availability: 'stale', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'store-or-network', - renderPolicy: 'partial', - availability: 'available-root-fragment-only', - }, - { - shouldAwaitFetchResult: false, - shouldFetch: true, - }, - ], - - /******** network-only ********/ - [ - { - fetchPolicy: 'network-only', - renderPolicy: 'full', - availability: 'missing', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'network-only', - renderPolicy: 'full', - availability: 'available', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'network-only', - renderPolicy: 'full', - availability: 'stale', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'network-only', - renderPolicy: 'full', - availability: 'available-root-fragment-only', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - - [ - { - fetchPolicy: 'network-only', - renderPolicy: 'partial', - availability: 'missing', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'network-only', - renderPolicy: 'partial', - availability: 'available', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'network-only', - renderPolicy: 'partial', - availability: 'stale', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'network-only', - renderPolicy: 'partial', - availability: 'available-root-fragment-only', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - - /******** store-and-network ********/ - [ - { - fetchPolicy: 'store-and-network', - renderPolicy: 'full', - availability: 'missing', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'store-and-network', - renderPolicy: 'full', - availability: 'available', - }, - { - shouldAwaitFetchResult: false, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'store-and-network', - renderPolicy: 'full', - availability: 'stale', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'store-and-network', - renderPolicy: 'full', - availability: 'available-root-fragment-only', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - - [ - { - fetchPolicy: 'store-and-network', - renderPolicy: 'partial', - availability: 'missing', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'store-and-network', - renderPolicy: 'partial', - availability: 'available', - }, - { - shouldAwaitFetchResult: false, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'store-and-network', - renderPolicy: 'partial', - availability: 'stale', - }, - { - shouldAwaitFetchResult: true, - shouldFetch: true, - }, - ], - [ - { - fetchPolicy: 'store-and-network', - renderPolicy: 'partial', - availability: 'available-root-fragment-only', - }, - { - shouldAwaitFetchResult: false, - shouldFetch: true, - }, - ], - - /******** store-only ********/ - [ - { - fetchPolicy: 'store-only', - renderPolicy: 'full', - availability: 'missing', - }, - { - shouldAwaitFetchResult: false, - shouldFetch: false, - shouldBeMissingData: true, - }, - ], - [ - { - fetchPolicy: 'store-only', - renderPolicy: 'full', - availability: 'available', - }, - { - shouldAwaitFetchResult: false, - shouldFetch: false, - }, - ], - [ - { - fetchPolicy: 'store-only', - renderPolicy: 'full', - availability: 'stale', - }, - { - shouldAwaitFetchResult: false, - shouldFetch: false, - }, - ], - [ - { - fetchPolicy: 'store-only', - renderPolicy: 'full', - availability: 'available-root-fragment-only', - }, - { - shouldAwaitFetchResult: false, - shouldFetch: false, - }, - ], - - [ - { - fetchPolicy: 'store-only', - renderPolicy: 'partial', - availability: 'missing', - }, - { - shouldAwaitFetchResult: false, - shouldFetch: false, - shouldBeMissingData: true, - }, - ], - [ - { - fetchPolicy: 'store-only', - renderPolicy: 'partial', - availability: 'available', - }, - { - shouldAwaitFetchResult: false, - shouldFetch: false, - }, - ], - [ - { - fetchPolicy: 'store-only', - renderPolicy: 'partial', - availability: 'stale', - }, - { - shouldAwaitFetchResult: false, - shouldFetch: false, - }, - ], - [ - { - fetchPolicy: 'store-only', - renderPolicy: 'partial', - availability: 'available-root-fragment-only', - }, - { - shouldAwaitFetchResult: false, - shouldFetch: false, - }, - ], - ]: Array), - )( - 'Suspending and fetching behavior: %o', - ( - {fetchPolicy, renderPolicy, availability}, - {shouldAwaitFetchResult, shouldFetch, shouldBeMissingData}, - ) => { - // If we're supposed to be performing a fetch, perform tests for whether - // the fetch returns data synchronously and whether it results in an error. - // If there's no fetch being performed, this is irrelevant so just do one test. - let innerTable; - if (shouldFetch) { - innerTable = [ - [{responseIsSynchronous: false, responseIsRejected: false}], - [{responseIsSynchronous: false, responseIsRejected: true}], - [{responseIsSynchronous: true, responseIsRejected: false}], - [{responseIsSynchronous: true, responseIsRejected: true}], - ]; - } else { - innerTable = [[{}]]; - } - it.each(innerTable)( - 'With the response (or {} if not applicable) being: %o', - ({responseIsSynchronous, responseIsRejected}) => { - // Assertions about the test table itself, because these are things that - // should always be true: - if (fetchPolicy === 'network-only') { - expect(shouldAwaitFetchResult).toBe(true); - expect(shouldFetch).toBe(true); - } - - if (fetchPolicy === 'store-only') { - expect(shouldAwaitFetchResult).toBe(false); - expect(shouldFetch).toBe(false); - } else { - if (renderPolicy === 'full' && availability !== 'available') { - expect(shouldAwaitFetchResult).toBe(true); - expect(shouldFetch).toBe(true); - } - if (availability === 'stale') { - expect(shouldAwaitFetchResult).toBe(true); - expect(shouldFetch).toBe(true); - } - } - - const shouldSuspend = - shouldAwaitFetchResult && !responseIsSynchronous; - const shouldThrowError = - responseIsRejected && - (shouldAwaitFetchResult || responseIsSynchronous); - - const thrownPromises = new Set(); - let numberOfRendersObserved = 0; - function TestComponent({output}: {output: boolean}) { - numberOfRendersObserved++; - try { - const data = useLazyLoadQuery(query, variables, { - fetchPolicy, - UNSTABLE_renderPolicy: renderPolicy, - }); - if (output) { - return data.node?.username ?? 'Data is missing'; - } else { - return null; - } - } catch (p) { - if (isPromise(p)) { - thrownPromises.add(p); - } - throw p; - } - } - - if (availability === 'available' || availability === 'stale') { - // The data needed to fulfill the query, including sub-fragments: - environment.commitPayload(operation, responsePayload); - expect(environment.check(operation)).toEqual({ - status: 'available', - fetchTime: null, - }); - } else if (availability === 'available-root-fragment-only') { - // Not the data needed to fulfill the entire query, just the data needed - // for the top-level root fragment, to demonstrate partial rendering: - environment.commitUpdate(store => { - let root = store.get(ROOT_ID); - if (!root) { - root = store.create(ROOT_ID, ROOT_TYPE); - } - const record = store.create(variables.id, 'User'); - record.setValue('abc', 'username'); - record.setValue(variables.id, 'id'); - root.setLinkedRecord(record, 'node', {id: variables.id}); - }); - } - - if (availability === 'stale') { - environment.commitUpdate(storeProxy => { - storeProxy.invalidateStore(); - }); - expect(environment.check(operation)).toEqual({status: 'stale'}); - } - - function deliverNetworkResponse() { - if (responseIsRejected) { - subject.error(new Error('Error message')); - } else { - subject.next({data: responsePayload}); - subject.complete(); - } - } - - if (responseIsSynchronous) { - deliverNetworkResponse(); - } - - // Create the React tree and then cause it to be re-rendered some - // number of times, to test idempotence. We perform an initial render first - // without the Cache boundary (or its contents) so that we can later unmount - // that Cache boundary -- if it was all rendered together it would share - // a cache with the root, and currently TestRenderer has no way to destroy - // the root to test unmounting. Also, we have to render the cache boundary before - // the test component so that the boundary is created even if we suspend. - const container = ReactTestRenderer.create(
); - container.update(); - - const numberOfRenders = 2; - const numberOfComponents = 2; - - if (shouldThrowError && responseIsSynchronous) { - expectConsoleErrorWillFire( - 'The above error occurred in the component', - {count: numberOfComponents}, - ); - } - for (let i = 0; i < numberOfRenders; i++) { - container.update( - - - {new Array(numberOfComponents) - .fill() - .map((a, k) => ( - - ))} - - , - ); - } - expect(numberOfRendersObserved).toBe( - responseIsSynchronous && responseIsRejected - ? numberOfComponents - : numberOfRenders * numberOfComponents, - ); - expect(thrownPromises.size).toBe(shouldSuspend ? 1 : 0); // Ensure same promise is re-thrown each time - // $FlowExpectedError[method-unbinding] - expect(environment.execute).toBeCalledTimes(shouldFetch ? 1 : 0); - // $FlowExpectedError[method-unbinding] - expect(environment.retain).toBeCalledTimes(1); - expect(release).toBeCalledTimes(0); - - const promise = getPromiseForActiveRequest( - environment, - operation.request, - ); - expect(promise != null).toEqual( - shouldFetch && !responseIsSynchronous, // if synchronous, request will no longer be active - ); - if (promise != null) { - promise.catch(() => {}); // Avoid "Possible Unhandled Promise Rejection" guardrail. - } - - if (shouldSuspend) { - expect(container.toJSON()).toBe('Fallback'); - expect(fetch).toBeCalledTimes(1); // No duplicate requests - if (shouldThrowError) { - expectConsoleErrorWillFire( - 'The above error occurred in the component', - {count: numberOfComponents}, - ); - } - ReactTestRenderer.act(() => { - deliverNetworkResponse(); - jest.runAllImmediates(); - }); - } else { - ReactTestRenderer.act(() => { - jest.runAllImmediates(); - }); - } - - const output = container.toJSON(); - if (shouldThrowError) { - expect(output).toEqual(expect.stringContaining('Error')); - } else if (shouldBeMissingData) { - expect(output).toBe('Data is missing'); - } else { - expect(output).toBe('abc'); - } - - // $FlowExpectedError[method-unbinding] - expect(environment.retain).toBeCalledTimes(1); - expect(release).toBeCalledTimes(0); - container.unmount(); - ReactTestRenderer.act(() => { - jest.runAllImmediates(); - }); - - // $FlowExpectedError[method-unbinding] - expect(environment.retain).toBeCalledTimes(1); - - // With the new implementation we support two behaviors: - // The new behavior, in which unmounting the Cache component - // above our component causes us to release; - // And the legacy behavior, where in some cases it isn't released - // until a timeout. - if (usingReactCache && !shouldEnableLegacyTimeouts) { - expect(release).toBeCalledTimes(1); - } else { - // We rely on the timeout only in case we throw an error, otherwise we'll have - // switched to a permanent retain and then released when the tree unmounted. - if (!shouldThrowError) { - expect(release).toBeCalledTimes(1); - } else { - expect(release).toBeCalledTimes(0); - ReactTestRenderer.act(() => { - jest.runAllTimers(); - }); - expect(release).toBeCalledTimes(1); - } - } - }, - ); - }, - ); - - it('Distinguishes environments', () => { - // Ensures that the pending/error/resolved state of a query (as opposed - // to just the resulting value) is distinguished from one environment - // to another. A regression test. - // Create two enviroments where one has the data available and the other not: - const env1 = new Environment({ - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - network: RelayNetwork.create(fetch), - store: new Store(new RecordSource()), - }); - const env2 = new Environment({ - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - network: RelayNetwork.create(fetch), - store: new Store(new RecordSource()), - }); - env1.commitPayload(operation, responsePayload); - - // Render the same component within each of the enviroments but - // sharing a React Cache component: - function UsesQuery(_props: {}) { - const data = useLazyLoadQuery(query, variables); - return data.node?.username ?? 'Data is missing'; - } - function TestComponent(_props: {}) { - return ( - - - - - - - - - ); - } - - const container = ReactTestRenderer.create( - - - - - , - ); - - // If this behavior were not met, it would be 'Data missing' instead of - // 'Fallback' because the read in the second environment would have seen - // the 'resolved' state of the query from the first environment since they - // share a Cache. - expect(container.toJSON()).toEqual(['abc', 'Fallback']); - - container.unmount(); - ReactTestRenderer.act(() => { - jest.runAllImmediates(); - }); - }); - - it('Honors fetchKey', () => { - let setFetchKey; - function TestComponent(_props: {}) { - let fetchKey; - [fetchKey, setFetchKey] = useState(0); - return useLazyLoadQuery(query, variables, { - fetchKey, - fetchPolicy: 'network-only', - })?.node?.username; - } - const container = ReactTestRenderer.create( - - - - - , - ); - expect(container.toJSON()).toBe('Fallback'); - - ReactTestRenderer.act(() => { - subject.next({data: responsePayload}); - subject.complete(); - jest.runAllImmediates(); - }); - expect(container.toJSON()).toBe('abc'); - - // When we set the fetchKey, the component should suspend again as it initiates a - // new network request and awaits its response: - ReactTestRenderer.act(() => { - subject = new RelayReplaySubject(); // prepare new network response instead of replaying last one - setFetchKey(1); - }); - expect(container.toJSON()).toBe('Fallback'); - - ReactTestRenderer.act(() => { - subject.next({data: responsePayload}); - subject.complete(); - jest.runAllImmediates(); - }); - expect(container.toJSON()).toBe('abc'); - - ReactTestRenderer.act(() => { - container.unmount(); - }); - }); - - it('Retains the query when two components use the same query and one of them unmounts while the other is suspended', () => { - function UsesQuery(_props: {}) { - useLazyLoadQuery(query, variables); - return null; - } - let unsuspend; - let promise: void | null | Promise; - function UsesQueryButAlsoSeparatelySuspends(_props: {}) { - const data = useLazyLoadQuery(query, variables); - if (promise === undefined) { - promise = new Promise(r => { - unsuspend = () => { - promise = null; - r(); - }; - }); - } - if (promise != null) { - throw promise; - } - return data.node?.username ?? 'Data is missing'; - } - - let unmountChild; - function TestComponent(_props: {}) { - const [hasChild, setHasChild] = useState(true); - unmountChild = () => setHasChild(false); - return ( - <> - {hasChild && } - - - - - ); - } - - environment.commitPayload(operation, responsePayload); - expect(environment.check(operation)).toEqual({ - status: 'available', - fetchTime: null, - }); - - const container = ReactTestRenderer.create(
); - container.update(); - container.update( - - - - - , - ); - expect(container.toJSON()).toBe('Inner Fallback'); - - ReactTestRenderer.act(() => unmountChild()); - expect(container.toJSON()).toBe('Inner Fallback'); - // This is what would happen if we were to solve this edge case - // completely, but it seems like we cannot currently do so, so the - // solution is to just re-fetch the query if needed when the second - // component comes out of suspense. - // NB: The QueryResource/SuspensResource implementation does handle - // this -- it looks like it's creating two separate cache entries with - // the same key! - // expect(isOperationRetained(operation)).toBe(true); - ReactTestRenderer.act(() => { - unsuspend(); - jest.runAllImmediates(); - }); - expect(container.toJSON()).toBe('abc'); - expect(isOperationRetained(operation)).toBe(true); - }); - - it('Handles this other weird situation that it initially did not handle', () => { - // This is a regression test for a situation that hit a bug initially where the retain - // count was being updated on an out-of-date cache entry instead of the correct one. - function UsesQuery(_props: {}) { - useLazyLoadQuery(query, variables); - return null; - } - let unsuspend; - let promise: void | null | Promise; - function UsesQueryButAlsoSeparatelySuspends(_props: {}) { - const data = useLazyLoadQuery(query, variables); - if (promise === undefined) { - promise = new Promise(r => { - unsuspend = () => { - promise = null; - r(); - }; - }); - throw promise; - } - return data.node?.username ?? 'Data is missing'; - } - - let unmountChild; - function TestComponent(_props: {}) { - const [hasChild, setHasChild] = useState(true); - unmountChild = () => setHasChild(false); - return ( - <> - {hasChild && } - - - - - ); - } - - environment.commitPayload(operation, responsePayload); - expect(environment.check(operation)).toEqual({ - status: 'available', - fetchTime: null, - }); - - const container = ReactTestRenderer.create(
); - container.update(); - container.update( - - - - - , - ); - expect(container.toJSON()).toBe('Inner Fallback'); - - ReactTestRenderer.act(() => unmountChild()); - ReactTestRenderer.act(() => { - unsuspend(); - jest.runAllImmediates(); - }); - expect(container.toJSON()).toBe('abc'); - expect(isOperationRetained(operation)).toBe(true); - }); - - it('Handles a second component needing a timeout', () => { - // In legacy timeouts mode, make sure that we handle this sequnece of events: - // 1 component accesses an uninitialized entry, initializes it and starts the timer - // 2 component mounts - // 3 a new component accesses this entry on render - // 4 before the new component (3) mounts, the earlier component (1) unmounts and removes the entry. - function Component1(_props: {}) { - useLazyLoadQuery(query, variables); - return null; - } - - let unsuspendComponent2; - let promise: void | null | Promise; - function Component2(_props: {}) { - const data = useLazyLoadQuery(query, variables); - if (promise === undefined) { - promise = new Promise(r => { - unsuspendComponent2 = () => { - promise = null; - r(); - }; - }); - } - if (promise != null) { - throw promise; - } - return data.node?.username ?? 'Data is missing'; - } - - environment.commitPayload(operation, responsePayload); - expect(environment.check(operation)).toEqual({ - status: 'available', - fetchTime: null, - }); - - let hideComponent1; - let showComponent2; - function TestComponent(_props: {}) { - const [component1Visible, setComponent1Visible] = useState(true); - const [component2Visible, setComponent2Visible] = useState(false); - hideComponent1 = () => setComponent1Visible(false); - showComponent2 = () => setComponent2Visible(true); - - return ( - <> - {component1Visible && } - - {component2Visible && } - - - ); - } - - const container = ReactTestRenderer.create(
); - container.update(); - container.update( - - - - - , - ); - - // Bring Component2 into the tree; it will suspend. - // While it's suspended, remove component 1. - ReactTestRenderer.act(() => { - showComponent2(); - jest.runAllImmediates(); - }); - ReactTestRenderer.act(() => { - hideComponent1(); - jest.runAllImmediates(); - jest.runAllTimers(); - }); - - // Now Component2 should have still retained the query and be able to read it: - expect(isOperationRetained(operation)).toBe(true); - ReactTestRenderer.act(() => { - unsuspendComponent2(); - jest.runAllImmediates(); - }); - expect(container.toJSON()).toBe('abc'); - expect(isOperationRetained(operation)).toBe(true); - }); - }, - ); -}); diff --git a/packages/react-relay/relay-hooks/react-cache/getQueryResultOrFetchQuery_REACT_CACHE.js b/packages/react-relay/relay-hooks/react-cache/getQueryResultOrFetchQuery_REACT_CACHE.js deleted file mode 100644 index 4b0a4b0f5013a..0000000000000 --- a/packages/react-relay/relay-hooks/react-cache/getQueryResultOrFetchQuery_REACT_CACHE.js +++ /dev/null @@ -1,430 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; - -import type { - FetchPolicy, - GraphQLResponse, - IEnvironment, - Observable, - OperationDescriptor, - ReaderFragment, - RenderPolicy, -} from 'relay-runtime'; - -const SuspenseResource = require('../SuspenseResource'); -const {getCacheForType, getCacheSignal} = require('./RelayReactCache'); -const invariant = require('invariant'); -const { - __internal: {fetchQuery: fetchQueryInternal}, - RelayFeatureFlags, -} = require('relay-runtime'); -const warning = require('warning'); - -type QueryCacheCommitable = () => () => void; - -type QueryResult = { - fragmentNode: ReaderFragment, - fragmentRef: mixed, -}; - -// Note that the status of a cache entry will be 'resolved' when partial -// rendering is allowed, even if a fetch is ongoing. The pending status -// is specifically to indicate that we should suspend. -// Note also that the retainCount is different from the retain count of -// an operation, which is maintained by the Environment. This retain -// count is used in Legacy Timeouts mode to count how many components -// are mounted that use the entry, plus one count for the temporary retain -// before any components have mounted. It is unused when Legacy Timeouts -// mode is off. -type QueryCacheEntryStatus = - | { - status: 'resolved', - result: QueryResult, - } - | { - status: 'pending', - promise: Promise, - } - | { - status: 'rejected', - error: Error, - }; - -type QueryCacheEntry = { - ...QueryCacheEntryStatus, - onCommit: QueryCacheCommitable, - suspenseResource: SuspenseResource | null, -}; - -const DEFAULT_FETCH_POLICY = 'store-or-network'; - -const WEAKMAP_SUPPORTED = typeof WeakMap === 'function'; - -interface IMap { - delete(key: K): boolean; - get(key: K): V | void; - set(key: K, value: V): IMap; -} - -type QueryCacheKey = string; - -class QueryCache { - _map: IMap>; - - constructor() { - this._map = WEAKMAP_SUPPORTED ? new WeakMap() : new Map(); - } - - get(environment: IEnvironment, key: QueryCacheKey): QueryCacheEntry | void { - let forEnv = this._map.get(environment); - if (!forEnv) { - forEnv = new Map(); - this._map.set(environment, forEnv); - } - return forEnv.get(key); - } - - set( - environment: IEnvironment, - key: QueryCacheKey, - value: QueryCacheEntry, - ): void { - let forEnv = this._map.get(environment); - if (!forEnv) { - forEnv = new Map(); - this._map.set(environment, forEnv); - } - forEnv.set(key, value); - } - - delete(environment: IEnvironment, key: QueryCacheKey): void { - const forEnv = this._map.get(environment); - if (!forEnv) { - return; - } - forEnv.delete(key); - if (forEnv.size === 0) { - this._map.delete(environment); - } - } -} - -function createQueryCache(): QueryCache { - return new QueryCache(); -} - -const noopOnCommit = () => { - return () => undefined; -}; - -const noopPromise = new Promise(() => {}); - -function getQueryCacheKey( - operation: OperationDescriptor, - fetchPolicy: FetchPolicy, - renderPolicy: RenderPolicy, - fetchKey?: ?string | ?number, -): QueryCacheKey { - return `${fetchPolicy}-${renderPolicy}-${operation.request.identifier}-${ - fetchKey ?? '' - }`; -} - -function constructQueryResult(operation: OperationDescriptor): QueryResult { - const rootFragmentRef = { - __id: operation.fragment.dataID, - __fragments: { - [operation.fragment.node.name]: operation.request.variables, - }, - __fragmentOwner: operation.request, - }; - return { - fragmentNode: operation.request.node.fragment, - fragmentRef: rootFragmentRef, - }; -} - -function makeInitialCacheEntry() { - return { - status: 'pending', - promise: noopPromise, - onCommit: noopOnCommit, - suspenseResource: null, - }; -} - -function getQueryResultOrFetchQuery_REACT_CACHE( - environment: IEnvironment, - queryOperationDescriptor: OperationDescriptor, - options?: { - fetchPolicy?: FetchPolicy, - renderPolicy?: RenderPolicy, - fetchKey?: ?string | ?number, - fetchObservable?: Observable, - }, -): [QueryResult, QueryCacheCommitable] { - const fetchPolicy = options?.fetchPolicy ?? DEFAULT_FETCH_POLICY; - const renderPolicy = - options?.renderPolicy ?? environment.UNSTABLE_getDefaultRenderPolicy(); - - const cache = getCacheForType(createQueryCache); - - const cacheKey = getQueryCacheKey( - queryOperationDescriptor, - fetchPolicy, - renderPolicy, - options?.fetchKey, - ); - - const initialEntry = cache.get(environment, cacheKey); - - function updateCache( - updater: QueryCacheEntryStatus => QueryCacheEntryStatus, - ) { - let currentEntry = cache.get(environment, cacheKey); - if (!currentEntry) { - currentEntry = makeInitialCacheEntry(); - cache.set(environment, cacheKey, currentEntry); - } - // $FlowExpectedError[prop-missing] Extra properties are passed in -- this is fine - const newStatus: {...} = updater(currentEntry); - // $FlowExpectedError[cannot-spread-inexact] Flow cannot understand that this is valid... - cache.set(environment, cacheKey, {...currentEntry, ...newStatus}); - // ... but we can because QueryCacheEntry spreads QueryCacheEntryStatus, so spreading - // a QueryCacheEntryStatus into a QueryCacheEntry will result in a valid QueryCacheEntry. - } - - // Initiate a query to fetch the data if needed: - if (RelayFeatureFlags.USE_REACT_CACHE_LEGACY_TIMEOUTS) { - let entry; - if (initialEntry === undefined) { - onCacheMiss( - environment, - queryOperationDescriptor, - fetchPolicy, - renderPolicy, - updateCache, - options?.fetchObservable, - ); - const createdEntry = cache.get(environment, cacheKey); - invariant( - createdEntry !== undefined, - 'An entry should have been created by onCacheMiss. This is a bug in Relay.', - ); - entry = createdEntry; - } else { - entry = initialEntry; - } - if (!entry.suspenseResource) { - entry.suspenseResource = new SuspenseResource(() => { - const retention = environment.retain(queryOperationDescriptor); - return { - dispose: () => { - retention.dispose(); - cache.delete(environment, cacheKey); - }, - }; - }); - } - if (entry.onCommit === noopOnCommit) { - entry.onCommit = () => { - invariant( - entry.suspenseResource, - 'SuspenseResource should have been initialized. This is a bug in Relay.', - ); - const retention = entry.suspenseResource.permanentRetain(environment); - return () => { - retention.dispose(); - }; - }; - } - entry.suspenseResource.temporaryRetain(environment); - } else { - if (initialEntry === undefined) { - // This is the behavior we eventually want: We retain the query until the - // presiding Cache component unmounts, at which point the AbortSignal - // will be triggered. - onCacheMiss( - environment, - queryOperationDescriptor, - fetchPolicy, - renderPolicy, - updateCache, - options?.fetchObservable, - ); - - // Since this is the first time rendering, retain the query. React will - // trigger the abort signal when this cache entry is no longer needed. - const retention = environment.retain(queryOperationDescriptor); - - const dispose = () => { - retention.dispose(); - cache.delete(environment, cacheKey); - }; - const abortSignal = getCacheSignal(); - abortSignal.addEventListener('abort', dispose, {once: true}); - } - } - - const entry = cache.get(environment, cacheKey); // could be a different entry now if synchronously resolved - invariant( - entry !== undefined, - 'An entry should have been created by onCacheMiss. This is a bug in Relay.', - ); - switch (entry.status) { - case 'pending': - throw entry.promise; - case 'rejected': - throw entry.error; - case 'resolved': - return [entry.result, entry.onCommit]; - } - invariant(false, 'switch statement should be exhaustive'); -} - -function onCacheMiss( - environment: IEnvironment, - operation: OperationDescriptor, - fetchPolicy: FetchPolicy, - renderPolicy: RenderPolicy, - updateCache: ((QueryCacheEntryStatus) => QueryCacheEntryStatus) => void, - customFetchObservable?: Observable, -): void { - // NB: Besides checking if the data is available, calling `check` will write missing - // data to the store using any missing data handlers specified in the environment. - const queryAvailability = environment.check(operation); - const queryStatus = queryAvailability.status; - const hasFullQuery = queryStatus === 'available'; - const canPartialRender = - hasFullQuery || (renderPolicy === 'partial' && queryStatus !== 'stale'); - - let shouldFetch; - let shouldRenderNow; - switch (fetchPolicy) { - case 'store-only': { - shouldFetch = false; - shouldRenderNow = true; - break; - } - case 'store-or-network': { - shouldFetch = !hasFullQuery; - shouldRenderNow = canPartialRender; - break; - } - case 'store-and-network': { - shouldFetch = true; - shouldRenderNow = canPartialRender; - break; - } - case 'network-only': - default: { - shouldFetch = true; - shouldRenderNow = false; - break; - } - } - - if (shouldFetch) { - executeOperationAndKeepUpToDate( - environment, - operation, - updateCache, - customFetchObservable, - ); - updateCache(existing => { - switch (existing.status) { - case 'resolved': - return existing; - case 'rejected': - return existing; - case 'pending': - return shouldRenderNow - ? { - status: 'resolved', - result: constructQueryResult(operation), - } - : existing; - } - }); - } else { - invariant( - shouldRenderNow, - 'Should either fetch or be willing to render. This is a bug in Relay.', - ); - updateCache(_existing => ({ - status: 'resolved', - result: constructQueryResult(operation), - })); - } -} - -function executeOperationAndKeepUpToDate( - environment: IEnvironment, - operation: OperationDescriptor, - updateCache: ((QueryCacheEntryStatus) => QueryCacheEntryStatus) => void, - customFetchObservable?: Observable, -) { - let resolvePromise; - const promise = new Promise(r => { - resolvePromise = r; - }); - // $FlowExpectedError[prop-missing] Expando to annotate Promises. - promise.displayName = 'Relay(' + operation.request.node.operation.name + ')'; - - let isFirstPayload = true; - - // FIXME We may still need to cancel network requests for live queries. - const fetchObservable = - customFetchObservable ?? fetchQueryInternal(environment, operation); - fetchObservable.subscribe({ - start: subscription => {}, - error: error => { - if (isFirstPayload) { - updateCache(_existing => ({ - status: 'rejected', - error, - })); - } else { - // TODO:T92030819 Remove this warning and actually throw the network error - // To complete this task we need to have a way of precisely tracking suspendable points - warning( - false, - 'getQueryResultOrFetchQuery: An incremental payload for query `%` returned an error: `%`:`%`.', - operation.request.node.operation.name, - error.message, - error.stack, - ); - } - resolvePromise(); - isFirstPayload = false; - }, - next: response => { - // Stop suspending on the first payload because of streaming, defer, etc. - updateCache(_existing => ({ - status: 'resolved', - result: constructQueryResult(operation), - })); - resolvePromise(); - isFirstPayload = false; - }, - }); - - // If the above subscription yields a value synchronously, then one of the updates - // above will have already happened and we'll now be in a resolved or rejected state. - // But in the usual case, we save the promise to the entry here: - updateCache(existing => - existing.status === 'pending' ? {status: 'pending', promise} : existing, - ); -} - -module.exports = getQueryResultOrFetchQuery_REACT_CACHE; diff --git a/packages/react-relay/relay-hooks/react-cache/readFragmentInternal_REACT_CACHE.js b/packages/react-relay/relay-hooks/react-cache/readFragmentInternal_REACT_CACHE.js deleted file mode 100644 index 1a1bb944d9d67..0000000000000 --- a/packages/react-relay/relay-hooks/react-cache/readFragmentInternal_REACT_CACHE.js +++ /dev/null @@ -1,297 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; - -import type {QueryResult} from '../QueryResource'; -import type { - CacheConfig, - FetchPolicy, - IEnvironment, - ReaderFragment, - ReaderSelector, - SelectorData, - Snapshot, -} from 'relay-runtime'; -import type {MissingClientEdgeRequestInfo} from 'relay-runtime/store/RelayStoreTypes'; - -const {getQueryResourceForEnvironment} = require('../QueryResource'); -const invariant = require('invariant'); -const { - __internal: {fetchQuery: fetchQueryInternal}, - createOperationDescriptor, - getPendingOperationsForFragment, - getSelector, - getVariablesFromFragment, - handlePotentialSnapshotErrors, -} = require('relay-runtime'); -const warning = require('warning'); - -type FragmentQueryOptions = { - fetchPolicy?: FetchPolicy, - networkCacheConfig?: ?CacheConfig, -}; - -type FragmentState = $ReadOnly< - | {kind: 'bailout'} - | {kind: 'singular', snapshot: Snapshot, epoch: number} - | {kind: 'plural', snapshots: $ReadOnlyArray, epoch: number}, ->; - -function isMissingData(state: FragmentState): boolean { - if (state.kind === 'bailout') { - return false; - } else if (state.kind === 'singular') { - return state.snapshot.isMissingData; - } else { - return state.snapshots.some(s => s.isMissingData); - } -} - -function getMissingClientEdges( - state: FragmentState, -): $ReadOnlyArray | null { - if (state.kind === 'bailout') { - return null; - } else if (state.kind === 'singular') { - return state.snapshot.missingClientEdges ?? null; - } else { - let edges: null | Array = null; - for (const snapshot of state.snapshots) { - if (snapshot.missingClientEdges) { - edges = edges ?? []; - for (const edge of snapshot.missingClientEdges) { - edges.push(edge); - } - } - } - return edges; - } -} - -function handlePotentialSnapshotErrorsForState( - environment: IEnvironment, - state: FragmentState, -): void { - if (state.kind === 'singular') { - handlePotentialSnapshotErrors( - environment, - state.snapshot.missingRequiredFields, - state.snapshot.relayResolverErrors, - ); - } else if (state.kind === 'plural') { - for (const snapshot of state.snapshots) { - handlePotentialSnapshotErrors( - environment, - snapshot.missingRequiredFields, - snapshot.relayResolverErrors, - ); - } - } -} - -function handleMissingClientEdge( - environment: IEnvironment, - parentFragmentNode: ReaderFragment, - parentFragmentRef: mixed, - missingClientEdgeRequestInfo: MissingClientEdgeRequestInfo, - queryOptions?: FragmentQueryOptions, -): QueryResult { - const originalVariables = getVariablesFromFragment( - parentFragmentNode, - parentFragmentRef, - ); - const variables = { - ...originalVariables, - id: missingClientEdgeRequestInfo.clientEdgeDestinationID, // TODO should be a reserved name - }; - const queryOperationDescriptor = createOperationDescriptor( - missingClientEdgeRequestInfo.request, - variables, - queryOptions?.networkCacheConfig, - ); - // This may suspend. We don't need to do anything with the results; all we're - // doing here is started the query if needed and retaining and releasing it - // according to the component mount/suspense cycle; QueryResource - // already handles this by itself. - const QueryResource = getQueryResourceForEnvironment(environment); - return QueryResource.prepare( - queryOperationDescriptor, - fetchQueryInternal(environment, queryOperationDescriptor), - queryOptions?.fetchPolicy, - ); -} - -function getFragmentState( - environment: IEnvironment, - fragmentSelector: ?ReaderSelector, -): FragmentState { - if (fragmentSelector == null) { - return {kind: 'bailout'}; - } else if (fragmentSelector.kind === 'PluralReaderSelector') { - if (fragmentSelector.selectors.length === 0) { - return {kind: 'bailout'}; - } else { - return { - kind: 'plural', - snapshots: fragmentSelector.selectors.map(s => environment.lookup(s)), - epoch: environment.getStore().getEpoch(), - }; - } - } else { - return { - kind: 'singular', - snapshot: environment.lookup(fragmentSelector), - epoch: environment.getStore().getEpoch(), - }; - } -} - -// fragmentNode cannot change during the lifetime of the component, though fragmentRef may change. -function readFragmentInternal_REACT_CACHE( - environment: IEnvironment, - fragmentNode: ReaderFragment, - fragmentRef: mixed, - hookDisplayName: string, - queryOptions?: FragmentQueryOptions, - fragmentKey?: string, -): { - +data: ?SelectorData | Array, - +clientEdgeQueries: ?Array, -} { - const fragmentSelector = getSelector(fragmentNode, fragmentRef); - const isPlural = fragmentNode?.metadata?.plural === true; - - if (isPlural) { - invariant( - fragmentRef == null || Array.isArray(fragmentRef), - 'Relay: Expected fragment pointer%s for fragment `%s` to be ' + - 'an array, instead got `%s`. Remove `@relay(plural: true)` ' + - 'from fragment `%s` to allow the prop to be an object.', - fragmentKey != null ? ` for key \`${fragmentKey}\`` : '', - fragmentNode.name, - typeof fragmentRef, - fragmentNode.name, - ); - } else { - invariant( - !Array.isArray(fragmentRef), - 'Relay: Expected fragment pointer%s for fragment `%s` not to be ' + - 'an array, instead got `%s`. Add `@relay(plural: true)` ' + - 'to fragment `%s` to allow the prop to be an array.', - fragmentKey != null ? ` for key \`${fragmentKey}\`` : '', - fragmentNode.name, - typeof fragmentRef, - fragmentNode.name, - ); - } - invariant( - fragmentRef == null || - (isPlural && Array.isArray(fragmentRef) && fragmentRef.length === 0) || - fragmentSelector != null, - 'Relay: Expected to receive an object where `...%s` was spread, ' + - 'but the fragment reference was not found`. This is most ' + - 'likely the result of:\n' + - "- Forgetting to spread `%s` in `%s`'s parent's fragment.\n" + - '- Conditionally fetching `%s` but unconditionally passing %s prop ' + - 'to `%s`. If the parent fragment only fetches the fragment conditionally ' + - '- with e.g. `@include`, `@skip`, or inside a `... on SomeType { }` ' + - 'spread - then the fragment reference will not exist. ' + - 'In this case, pass `null` if the conditions for evaluating the ' + - 'fragment are not met (e.g. if the `@include(if)` value is false.)', - fragmentNode.name, - fragmentNode.name, - hookDisplayName, - fragmentNode.name, - fragmentKey == null ? 'a fragment reference' : `the \`${fragmentKey}\``, - hookDisplayName, - ); - - const state = getFragmentState(environment, fragmentSelector); - - // Handle the queries for any missing client edges; this may suspend. - // FIXME handle client edges in parallel. - let clientEdgeQueries = null; - if (fragmentNode.metadata?.hasClientEdges === true) { - const missingClientEdges = getMissingClientEdges(state); - if (missingClientEdges?.length) { - clientEdgeQueries = ([]: Array); - for (const edge of missingClientEdges) { - clientEdgeQueries.push( - handleMissingClientEdge( - environment, - fragmentNode, - fragmentRef, - edge, - queryOptions, - ), - ); - } - } - } - - if (isMissingData(state)) { - // Suspend if an active operation bears on this fragment, either the - // fragment's owner or some other mutation etc. that could affect it: - invariant(fragmentSelector != null, 'refinement, see invariants above'); - const fragmentOwner = - fragmentSelector.kind === 'PluralReaderSelector' - ? fragmentSelector.selectors[0].owner - : fragmentSelector.owner; - const pendingOperationsResult = getPendingOperationsForFragment( - environment, - fragmentNode, - fragmentOwner, - ); - if (pendingOperationsResult) { - throw pendingOperationsResult.promise; - } - // Report required fields only if we're not suspending, since that means - // they're missing even though we are out of options for possibly fetching them: - handlePotentialSnapshotErrorsForState(environment, state); - } - - let data: ?SelectorData | Array; - if (state.kind === 'bailout') { - data = isPlural ? [] : null; - } else if (state.kind === 'singular') { - data = state.snapshot.data; - } else { - data = state.snapshots.map(s => s.data); - } - - if (__DEV__) { - if ( - fragmentRef != null && - (data === undefined || - (Array.isArray(data) && - data.length > 0 && - data.every(d => d === undefined))) - ) { - warning( - false, - 'Relay: Expected to have been able to read non-null data for ' + - 'fragment `%s` declared in ' + - '`%s`, since fragment reference was non-null. ' + - "Make sure that that `%s`'s parent isn't " + - 'holding on to and/or passing a fragment reference for data that ' + - 'has been deleted.', - fragmentNode.name, - hookDisplayName, - hookDisplayName, - ); - } - } - - return {data, clientEdgeQueries}; -} - -module.exports = readFragmentInternal_REACT_CACHE; diff --git a/packages/react-relay/relay-hooks/react-cache/useFragmentInternal_REACT_CACHE.js b/packages/react-relay/relay-hooks/react-cache/useFragmentInternal_REACT_CACHE.js deleted file mode 100644 index e6d7a3a8ef250..0000000000000 --- a/packages/react-relay/relay-hooks/react-cache/useFragmentInternal_REACT_CACHE.js +++ /dev/null @@ -1,599 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; - -import type {QueryResult} from '../QueryResource'; -import type { - CacheConfig, - FetchPolicy, - IEnvironment, - ReaderFragment, - ReaderSelector, - SelectorData, - Snapshot, -} from 'relay-runtime'; -import type { - MissingClientEdgeRequestInfo, - MissingLiveResolverField, -} from 'relay-runtime/store/RelayStoreTypes'; - -const {getQueryResourceForEnvironment} = require('../QueryResource'); -const useRelayEnvironment = require('../useRelayEnvironment'); -const invariant = require('invariant'); -const {useDebugValue, useEffect, useMemo, useRef, useState} = require('react'); -const { - __internal: {fetchQuery: fetchQueryInternal}, - areEqualSelectors, - createOperationDescriptor, - getPendingOperationsForFragment, - getSelector, - getVariablesFromFragment, - handlePotentialSnapshotErrors, - recycleNodesInto, -} = require('relay-runtime'); -const warning = require('warning'); - -type FragmentQueryOptions = { - fetchPolicy?: FetchPolicy, - networkCacheConfig?: ?CacheConfig, -}; - -type FragmentState = $ReadOnly< - | {kind: 'bailout'} - | {kind: 'singular', snapshot: Snapshot, epoch: number} - | {kind: 'plural', snapshots: $ReadOnlyArray, epoch: number}, ->; - -type StateUpdaterFunction = ((T) => T) => void; - -function isMissingData(state: FragmentState): boolean { - if (state.kind === 'bailout') { - return false; - } else if (state.kind === 'singular') { - return state.snapshot.isMissingData; - } else { - return state.snapshots.some(s => s.isMissingData); - } -} - -function getMissingClientEdges( - state: FragmentState, -): $ReadOnlyArray | null { - if (state.kind === 'bailout') { - return null; - } else if (state.kind === 'singular') { - return state.snapshot.missingClientEdges ?? null; - } else { - let edges: null | Array = null; - for (const snapshot of state.snapshots) { - if (snapshot.missingClientEdges) { - edges = edges ?? []; - for (const edge of snapshot.missingClientEdges) { - edges.push(edge); - } - } - } - return edges; - } -} - -function getSuspendingLiveResolver( - state: FragmentState, -): $ReadOnlyArray | null { - if (state.kind === 'bailout') { - return null; - } else if (state.kind === 'singular') { - return state.snapshot.missingLiveResolverFields ?? null; - } else { - let missingFields: null | Array = null; - for (const snapshot of state.snapshots) { - if (snapshot.missingLiveResolverFields) { - missingFields = missingFields ?? []; - for (const edge of snapshot.missingLiveResolverFields) { - missingFields.push(edge); - } - } - } - return missingFields; - } -} - -function handlePotentialSnapshotErrorsForState( - environment: IEnvironment, - state: FragmentState, -): void { - if (state.kind === 'singular') { - handlePotentialSnapshotErrors( - environment, - state.snapshot.missingRequiredFields, - state.snapshot.relayResolverErrors, - ); - } else if (state.kind === 'plural') { - for (const snapshot of state.snapshots) { - handlePotentialSnapshotErrors( - environment, - snapshot.missingRequiredFields, - snapshot.relayResolverErrors, - ); - } - } -} - -/** - * Check for updates to the store that occurred concurrently with rendering the given `state` value, - * returning a new (updated) state if there were updates or null if there were no changes. - */ -function handleMissedUpdates( - environment: IEnvironment, - state: FragmentState, -): null | [/* has data changed */ boolean, FragmentState] { - if (state.kind === 'bailout') { - return null; - } - // FIXME this is invalid if we've just switched environments. - const currentEpoch = environment.getStore().getEpoch(); - if (currentEpoch === state.epoch) { - return null; - } - // The store has updated since we rendered (without us being subscribed yet), - // so check for any updates to the data we're rendering: - if (state.kind === 'singular') { - const currentSnapshot = environment.lookup(state.snapshot.selector); - const updatedData = recycleNodesInto( - state.snapshot.data, - currentSnapshot.data, - ); - const updatedCurrentSnapshot: Snapshot = { - data: updatedData, - isMissingData: currentSnapshot.isMissingData, - missingClientEdges: currentSnapshot.missingClientEdges, - missingLiveResolverFields: currentSnapshot.missingLiveResolverFields, - seenRecords: currentSnapshot.seenRecords, - selector: currentSnapshot.selector, - missingRequiredFields: currentSnapshot.missingRequiredFields, - relayResolverErrors: currentSnapshot.relayResolverErrors, - }; - return [ - updatedData !== state.snapshot.data, - { - kind: 'singular', - snapshot: updatedCurrentSnapshot, - epoch: currentEpoch, - }, - ]; - } else { - let didMissUpdates = false; - const currentSnapshots = []; - for (let index = 0; index < state.snapshots.length; index++) { - const snapshot = state.snapshots[index]; - const currentSnapshot = environment.lookup(snapshot.selector); - const updatedData = recycleNodesInto(snapshot.data, currentSnapshot.data); - const updatedCurrentSnapshot: Snapshot = { - data: updatedData, - isMissingData: currentSnapshot.isMissingData, - missingClientEdges: currentSnapshot.missingClientEdges, - missingLiveResolverFields: currentSnapshot.missingLiveResolverFields, - seenRecords: currentSnapshot.seenRecords, - selector: currentSnapshot.selector, - missingRequiredFields: currentSnapshot.missingRequiredFields, - relayResolverErrors: currentSnapshot.relayResolverErrors, - }; - if (updatedData !== snapshot.data) { - didMissUpdates = true; - } - currentSnapshots.push(updatedCurrentSnapshot); - } - invariant( - currentSnapshots.length === state.snapshots.length, - 'Expected same number of snapshots', - ); - return [ - didMissUpdates, - { - kind: 'plural', - snapshots: currentSnapshots, - epoch: currentEpoch, - }, - ]; - } -} - -function handleMissingClientEdge( - environment: IEnvironment, - parentFragmentNode: ReaderFragment, - parentFragmentRef: mixed, - missingClientEdgeRequestInfo: MissingClientEdgeRequestInfo, - queryOptions?: FragmentQueryOptions, -): QueryResult { - const originalVariables = getVariablesFromFragment( - parentFragmentNode, - parentFragmentRef, - ); - const variables = { - ...originalVariables, - id: missingClientEdgeRequestInfo.clientEdgeDestinationID, // TODO should be a reserved name - }; - const queryOperationDescriptor = createOperationDescriptor( - missingClientEdgeRequestInfo.request, - variables, - queryOptions?.networkCacheConfig, - ); - // This may suspend. We don't need to do anything with the results; all we're - // doing here is started the query if needed and retaining and releasing it - // according to the component mount/suspense cycle; QueryResource - // already handles this by itself. - const QueryResource = getQueryResourceForEnvironment(environment); - return QueryResource.prepare( - queryOperationDescriptor, - fetchQueryInternal(environment, queryOperationDescriptor), - queryOptions?.fetchPolicy, - ); -} - -function subscribeToSnapshot( - environment: IEnvironment, - state: FragmentState, - setState: StateUpdaterFunction, -): () => void { - if (state.kind === 'bailout') { - return () => {}; - } else if (state.kind === 'singular') { - const disposable = environment.subscribe(state.snapshot, latestSnapshot => { - setState(prevState => { - // In theory a setState from a subscription could be batched together - // with a setState to change the fragment selector. Guard against this - // by bailing out of the state update if the selector has changed. - if ( - prevState.kind !== 'singular' || - prevState.snapshot.selector !== latestSnapshot.selector - ) { - return prevState; - } - return { - kind: 'singular', - snapshot: latestSnapshot, - epoch: environment.getStore().getEpoch(), - }; - }); - }); - return () => { - disposable.dispose(); - }; - } else { - const disposables = state.snapshots.map((snapshot, index) => - environment.subscribe(snapshot, latestSnapshot => { - setState(prevState => { - // In theory a setState from a subscription could be batched together - // with a setState to change the fragment selector. Guard against this - // by bailing out of the state update if the selector has changed. - if ( - prevState.kind !== 'plural' || - prevState.snapshots[index]?.selector !== latestSnapshot.selector - ) { - return prevState; - } - const updated = [...prevState.snapshots]; - updated[index] = latestSnapshot; - return { - kind: 'plural', - snapshots: updated, - epoch: environment.getStore().getEpoch(), - }; - }); - }), - ); - return () => { - for (const d of disposables) { - d.dispose(); - } - }; - } -} - -function getFragmentState( - environment: IEnvironment, - fragmentSelector: ?ReaderSelector, -): FragmentState { - if (fragmentSelector == null) { - return {kind: 'bailout'}; - } else if (fragmentSelector.kind === 'PluralReaderSelector') { - // Note that if fragmentRef is an empty array, fragmentSelector will be null so we'll hit the above case. - // Null is returned by getSelector if fragmentRef has no non-null items. - return { - kind: 'plural', - snapshots: fragmentSelector.selectors.map(s => environment.lookup(s)), - epoch: environment.getStore().getEpoch(), - }; - } else { - return { - kind: 'singular', - snapshot: environment.lookup(fragmentSelector), - epoch: environment.getStore().getEpoch(), - }; - } -} - -// fragmentNode cannot change during the lifetime of the component, though fragmentRef may change. -function useFragmentInternal_REACT_CACHE( - fragmentNode: ReaderFragment, - fragmentRef: mixed, - hookDisplayName: string, - queryOptions?: FragmentQueryOptions, - fragmentKey?: string, -): ?SelectorData | Array { - const fragmentSelector = useMemo( - () => getSelector(fragmentNode, fragmentRef), - [fragmentNode, fragmentRef], - ); - - const isPlural = fragmentNode?.metadata?.plural === true; - - if (isPlural) { - invariant( - fragmentRef == null || Array.isArray(fragmentRef), - 'Relay: Expected fragment pointer%s for fragment `%s` to be ' + - 'an array, instead got `%s`. Remove `@relay(plural: true)` ' + - 'from fragment `%s` to allow the prop to be an object.', - fragmentKey != null ? ` for key \`${fragmentKey}\`` : '', - fragmentNode.name, - typeof fragmentRef, - fragmentNode.name, - ); - } else { - invariant( - !Array.isArray(fragmentRef), - 'Relay: Expected fragment pointer%s for fragment `%s` not to be ' + - 'an array, instead got `%s`. Add `@relay(plural: true)` ' + - 'to fragment `%s` to allow the prop to be an array.', - fragmentKey != null ? ` for key \`${fragmentKey}\`` : '', - fragmentNode.name, - typeof fragmentRef, - fragmentNode.name, - ); - } - invariant( - fragmentRef == null || - (isPlural && Array.isArray(fragmentRef) && fragmentRef.length === 0) || - fragmentSelector != null, - 'Relay: Expected to receive an object where `...%s` was spread, ' + - 'but the fragment reference was not found`. This is most ' + - 'likely the result of:\n' + - "- Forgetting to spread `%s` in `%s`'s parent's fragment.\n" + - '- Conditionally fetching `%s` but unconditionally passing %s prop ' + - 'to `%s`. If the parent fragment only fetches the fragment conditionally ' + - '- with e.g. `@include`, `@skip`, or inside a `... on SomeType { }` ' + - 'spread - then the fragment reference will not exist. ' + - 'In this case, pass `null` if the conditions for evaluating the ' + - 'fragment are not met (e.g. if the `@include(if)` value is false.)', - fragmentNode.name, - fragmentNode.name, - hookDisplayName, - fragmentNode.name, - fragmentKey == null ? 'a fragment reference' : `the \`${fragmentKey}\``, - hookDisplayName, - ); - - const environment = useRelayEnvironment(); - const [_state, setState] = useState(() => - getFragmentState(environment, fragmentSelector), - ); - let state = _state; - - // This copy of the state we only update when something requires us to - // unsubscribe and re-subscribe, namely a changed environment or - // fragment selector. - const [_subscribedState, setSubscribedState] = useState(state); - // FIXME since this is used as an effect dependency, it needs to be memoized. - let subscribedState = _subscribedState; - - const [previousFragmentSelector, setPreviousFragmentSelector] = - useState(fragmentSelector); - const [previousEnvironment, setPreviousEnvironment] = useState(environment); - if ( - !areEqualSelectors(fragmentSelector, previousFragmentSelector) || - environment !== previousEnvironment - ) { - // Enqueue setState to record the new selector and state - setPreviousFragmentSelector(fragmentSelector); - setPreviousEnvironment(environment); - const newState = getFragmentState(environment, fragmentSelector); - setState(newState); - setSubscribedState(newState); // This causes us to form a new subscription - // But render with the latest state w/o waiting for the setState. Otherwise - // the component would render the wrong information temporarily (including - // possibly incorrectly triggering some warnings below). - state = newState; - subscribedState = newState; - } - - // The purpose of this is to detect whether we have ever committed, because we - // don't suspend on store updates, only when the component either is first trying - // to mount or when the our selector changes. The selector change in particular is - // how we suspend for pagination and refetech. Also, fragment selector can be null - // or undefined, so we use false as a special value to distinguish from all fragment - // selectors; false means that the component hasn't mounted yet. - const committedFragmentSelectorRef = useRef(false); - useEffect(() => { - committedFragmentSelectorRef.current = fragmentSelector; - }, [fragmentSelector]); - - // Handle the queries for any missing client edges; this may suspend. - // FIXME handle client edges in parallel. - if (fragmentNode.metadata?.hasClientEdges === true) { - // The fragment is validated to be static (in useFragment) and hasClientEdges is - // a static (constant) property of the fragment. In practice, this effect will - // always or never run for a given invocation of this hook. - // eslint-disable-next-line react-hooks/rules-of-hooks - const clientEdgeQueries = useMemo(() => { - const missingClientEdges = getMissingClientEdges(state); - // eslint-disable-next-line no-shadow - let clientEdgeQueries; - if (missingClientEdges?.length) { - clientEdgeQueries = ([]: Array); - for (const edge of missingClientEdges) { - clientEdgeQueries.push( - handleMissingClientEdge( - environment, - fragmentNode, - fragmentRef, - edge, - queryOptions, - ), - ); - } - } - return clientEdgeQueries; - }, [state, environment, fragmentNode, fragmentRef, queryOptions]); - - // See above note - // eslint-disable-next-line react-hooks/rules-of-hooks - useEffect(() => { - const QueryResource = getQueryResourceForEnvironment(environment); - if (clientEdgeQueries?.length) { - const disposables = []; - for (const query of clientEdgeQueries) { - disposables.push(QueryResource.retain(query)); - } - return () => { - for (const disposable of disposables) { - disposable.dispose(); - } - }; - } - }, [environment, clientEdgeQueries]); - } - - if (isMissingData(state)) { - // Suspend if a Live Resolver within this fragment is in a suspended state: - const suspendingLiveResolvers = getSuspendingLiveResolver(state); - if (suspendingLiveResolvers != null && suspendingLiveResolvers.length > 0) { - throw Promise.all( - suspendingLiveResolvers.map(({liveStateID}) => { - // $FlowFixMe[prop-missing] This is expected to be a LiveResolverStore - return environment.getStore().getLiveResolverPromise(liveStateID); - }), - ); - } - // Suspend if an active operation bears on this fragment, either the - // fragment's owner or some other mutation etc. that could affect it. - // We only suspend when the component is first trying to mount or changing - // selectors, not if data becomes missing later: - if ( - !committedFragmentSelectorRef.current || - !areEqualSelectors(committedFragmentSelectorRef.current, fragmentSelector) - ) { - invariant(fragmentSelector != null, 'refinement, see invariants above'); - const fragmentOwner = - fragmentSelector.kind === 'PluralReaderSelector' - ? fragmentSelector.selectors[0].owner - : fragmentSelector.owner; - const pendingOperationsResult = getPendingOperationsForFragment( - environment, - fragmentNode, - fragmentOwner, - ); - if (pendingOperationsResult) { - throw pendingOperationsResult.promise; - } - } - // Report required fields only if we're not suspending, since that means - // they're missing even though we are out of options for possibly fetching them: - handlePotentialSnapshotErrorsForState(environment, state); - } - - useEffect(() => { - // Check for updates since the state was rendered - let currentState = subscribedState; - const updates = handleMissedUpdates(environment, subscribedState); - if (updates !== null) { - const [didMissUpdates, updatedState] = updates; - // TODO: didMissUpdates only checks for changes to snapshot data, but it's possible - // that other snapshot properties may have changed that should also trigger a re-render, - // such as changed missing resolver fields, missing client edges, etc. - // A potential alternative is for handleMissedUpdates() to recycle the entire state - // value, and return the new (recycled) state only if there was some change. In that - // case the code would always setState if something in the snapshot changed, in addition - // to using the latest snapshot to subscribe. - if (didMissUpdates) { - setState(updatedState); - } - currentState = updatedState; - } - return subscribeToSnapshot(environment, currentState, setState); - }, [environment, subscribedState]); - - let data: ?SelectorData | Array; - if (isPlural) { - // Plural fragments require allocating an array of the snasphot data values, - // which has to be memoized to avoid triggering downstream re-renders. - // - // Note that isPlural is a constant property of the fragment and does not change - // for a particular useFragment invocation site - const fragmentRefIsNullish = fragmentRef == null; // for less sensitive memoization - // eslint-disable-next-line react-hooks/rules-of-hooks - data = useMemo(() => { - if (state.kind === 'bailout') { - // Bailout state can happen if the fragmentRef is a plural array that is empty or has no - // non-null entries. In that case, the compatible behavior is to return [] instead of null. - return fragmentRefIsNullish ? null : []; - } else { - invariant( - state.kind === 'plural', - 'Expected state to be plural because fragment is plural', - ); - return state.snapshots.map(s => s.data); - } - }, [state, fragmentRefIsNullish]); - } else if (state.kind === 'bailout') { - // This case doesn't allocate a new object so it doesn't have to be memoized - data = null; - } else { - // This case doesn't allocate a new object so it doesn't have to be memoized - invariant( - state.kind === 'singular', - 'Expected state to be singular because fragment is singular', - ); - data = state.snapshot.data; - } - - if (__DEV__) { - if ( - fragmentRef != null && - (data === undefined || - (Array.isArray(data) && - data.length > 0 && - data.every(d => d === undefined))) - ) { - warning( - false, - 'Relay: Expected to have been able to read non-null data for ' + - 'fragment `%s` declared in ' + - '`%s`, since fragment reference was non-null. ' + - "Make sure that that `%s`'s parent isn't " + - 'holding on to and/or passing a fragment reference for data that ' + - 'has been deleted.', - fragmentNode.name, - hookDisplayName, - hookDisplayName, - ); - } - } - - if (__DEV__) { - // eslint-disable-next-line react-hooks/rules-of-hooks - useDebugValue({fragment: fragmentNode.name, data}); - } - - return data; -} - -module.exports = useFragmentInternal_REACT_CACHE; diff --git a/packages/react-relay/relay-hooks/react-cache/useFragment_REACT_CACHE.js b/packages/react-relay/relay-hooks/react-cache/useFragment_REACT_CACHE.js deleted file mode 100644 index 58dc9ed2e1967..0000000000000 --- a/packages/react-relay/relay-hooks/react-cache/useFragment_REACT_CACHE.js +++ /dev/null @@ -1,72 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; - -import type {Fragment, FragmentType, GraphQLTaggedNode} from 'relay-runtime'; - -const {useTrackLoadQueryInRender} = require('../loadQuery'); -const useStaticFragmentNodeWarning = require('../useStaticFragmentNodeWarning'); -const useFragmentInternal = require('./useFragmentInternal_REACT_CACHE'); -const {useDebugValue} = require('react'); -const {getFragment} = require('relay-runtime'); - -type HasSpread = { - +$fragmentSpreads: TFragmentType, - ... -}; - -// if the key is non-nullable, return non-nullable value -declare function useFragment( - fragment: Fragment, - key: HasSpread, -): TData; - -// if the key is nullable, return nullable value -declare function useFragment( - fragment: Fragment, - key: ?HasSpread, -): ?TData; - -// if the key is a non-nullable array of keys, return non-nullable array -declare function useFragment( - fragment: Fragment, - key: $ReadOnlyArray>, -): TData; - -// if the key is a nullable array of keys, return nullable array -declare function useFragment( - fragment: Fragment, - key: ?$ReadOnlyArray>, -): ?TData; - -function useFragment(fragment: GraphQLTaggedNode, key: mixed): mixed { - // We need to use this hook in order to be able to track if - // loadQuery was called during render - useTrackLoadQueryInRender(); - - const fragmentNode = getFragment(fragment); - if (__DEV__) { - // eslint-disable-next-line react-hooks/rules-of-hooks - useStaticFragmentNodeWarning( - fragmentNode, - 'first argument of useFragment()', - ); - } - const data = useFragmentInternal(fragmentNode, key, 'useFragment()'); - if (__DEV__) { - // eslint-disable-next-line react-hooks/rules-of-hooks - useDebugValue({fragment: fragmentNode.name, data}); - } - return data; -} - -module.exports = useFragment; diff --git a/packages/react-relay/relay-hooks/react-cache/useLazyLoadQuery_REACT_CACHE.js b/packages/react-relay/relay-hooks/react-cache/useLazyLoadQuery_REACT_CACHE.js deleted file mode 100644 index bcaaee00fe0a6..0000000000000 --- a/packages/react-relay/relay-hooks/react-cache/useLazyLoadQuery_REACT_CACHE.js +++ /dev/null @@ -1,70 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; - -import type { - CacheConfig, - FetchPolicy, - Query, - RenderPolicy, - Variables, -} from 'relay-runtime'; - -const {useTrackLoadQueryInRender} = require('../loadQuery'); -const useMemoOperationDescriptor = require('../useMemoOperationDescriptor'); -const useRelayEnvironment = require('../useRelayEnvironment'); -const getQueryResultOrFetchQuery = require('./getQueryResultOrFetchQuery_REACT_CACHE'); -const useFragmentInternal = require('./useFragmentInternal_REACT_CACHE'); -const {useEffect} = require('react'); - -function useLazyLoadQuery_REACT_CACHE( - gqlQuery: Query, - variables: TVariables, - options?: { - fetchKey?: string | number, - fetchPolicy?: FetchPolicy, - networkCacheConfig?: CacheConfig, - UNSTABLE_renderPolicy?: RenderPolicy, - }, -): TData { - useTrackLoadQueryInRender(); - const environment = useRelayEnvironment(); - - const queryOperationDescriptor = useMemoOperationDescriptor( - gqlQuery, - variables, - options?.networkCacheConfig ?? {force: true}, - ); - - // Get the query going if needed -- this may suspend. - const [queryResult, effect] = getQueryResultOrFetchQuery( - environment, - queryOperationDescriptor, - { - fetchPolicy: options?.fetchPolicy, - renderPolicy: options?.UNSTABLE_renderPolicy, - fetchKey: options?.fetchKey, - }, - ); - - useEffect(effect); - - // Read the query's root fragment -- this may suspend. - const {fragmentNode, fragmentRef} = queryResult; - // $FlowExpectedError[incompatible-return] Is this a fixable incompatible-return? - return useFragmentInternal(fragmentNode, fragmentRef, 'useLazyLoadQuery()', { - fetchPolicy: options?.fetchPolicy, - networkCacheConfig: options?.networkCacheConfig, - }); -} - -module.exports = useLazyLoadQuery_REACT_CACHE; diff --git a/packages/react-relay/relay-hooks/react-cache/usePaginationFragment_REACT_CACHE.js b/packages/react-relay/relay-hooks/react-cache/usePaginationFragment_REACT_CACHE.js deleted file mode 100644 index b244110937efb..0000000000000 --- a/packages/react-relay/relay-hooks/react-cache/usePaginationFragment_REACT_CACHE.js +++ /dev/null @@ -1,190 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; - -import type {LoadMoreFn, UseLoadMoreFunctionArgs} from '../useLoadMoreFunction'; -import type {Options} from './useRefetchableFragmentInternal_REACT_CACHE'; -import type {RefetchFnDynamic} from './useRefetchableFragmentInternal_REACT_CACHE'; -import type { - FragmentType, - GraphQLResponse, - GraphQLTaggedNode, - Observer, - OperationType, - Variables, -} from 'relay-runtime'; -import type {VariablesOf} from 'relay-runtime/util/RelayRuntimeTypes'; - -const useLoadMoreFunction = require('../useLoadMoreFunction'); -const useRelayEnvironment = require('../useRelayEnvironment'); -const useStaticFragmentNodeWarning = require('../useStaticFragmentNodeWarning'); -const useRefetchableFragmentInternal = require('./useRefetchableFragmentInternal_REACT_CACHE'); -const {useCallback, useDebugValue, useState} = require('react'); -const { - getFragment, - getFragmentIdentifier, - getPaginationMetadata, -} = require('relay-runtime'); - -export type ReturnType = { - data: TFragmentData, - loadNext: LoadMoreFn, - loadPrevious: LoadMoreFn, - hasNext: boolean, - hasPrevious: boolean, - isLoadingNext: boolean, - isLoadingPrevious: boolean, - refetch: RefetchFnDynamic, -}; - -function usePaginationFragment< - TQuery: OperationType, - TKey: ?{+$data?: mixed, +$fragmentSpreads: FragmentType, ...}, ->( - fragmentInput: GraphQLTaggedNode, - parentFragmentRef: TKey, -): ReturnType< - TQuery, - TKey, - // NOTE: This $Call ensures that the type of the returned data is either: - // - nullable if the provided ref type is nullable - // - non-nullable if the provided ref type is non-nullable - // prettier-ignore - $Call< - & (( { +$data?: TFragmentData, ... }) => TFragmentData) - & ((?{ +$data?: TFragmentData, ... }) => ?TFragmentData), - TKey, - >, -> { - const fragmentNode = getFragment(fragmentInput); - useStaticFragmentNodeWarning( - fragmentNode, - 'first argument of usePaginationFragment()', - ); - const componentDisplayName = 'usePaginationFragment()'; - - const { - connectionPathInFragmentData, - paginationRequest, - paginationMetadata, - identifierField, - } = getPaginationMetadata(fragmentNode, componentDisplayName); - - const {fragmentData, fragmentRef, refetch} = useRefetchableFragmentInternal< - TQuery, - TKey, - >(fragmentNode, parentFragmentRef, componentDisplayName); - const fragmentIdentifier = getFragmentIdentifier(fragmentNode, fragmentRef); - - // Backward pagination - const [loadPrevious, hasPrevious, isLoadingPrevious, disposeFetchPrevious] = - useLoadMore({ - componentDisplayName, - connectionPathInFragmentData, - direction: 'backward', - fragmentData, - fragmentIdentifier, - fragmentNode, - fragmentRef, - identifierField, - paginationMetadata, - paginationRequest, - }); - - // Forward pagination - const [loadNext, hasNext, isLoadingNext, disposeFetchNext] = useLoadMore< - TQuery['variables'], - >({ - componentDisplayName, - connectionPathInFragmentData, - direction: 'forward', - fragmentData, - fragmentIdentifier, - fragmentNode, - fragmentRef, - identifierField, - paginationMetadata, - paginationRequest, - }); - - const refetchPagination: RefetchFnDynamic = useCallback( - (variables: VariablesOf, options: void | Options) => { - disposeFetchNext(); - disposeFetchPrevious(); - return refetch(variables, {...options, __environment: undefined}); - }, - [disposeFetchNext, disposeFetchPrevious, refetch], - ); - - if (__DEV__) { - // eslint-disable-next-line react-hooks/rules-of-hooks - useDebugValue({ - fragment: fragmentNode.name, - data: fragmentData, - hasNext, - isLoadingNext, - hasPrevious, - isLoadingPrevious, - }); - } - return { - data: fragmentData, - loadNext, - loadPrevious, - hasNext, - hasPrevious, - isLoadingNext, - isLoadingPrevious, - refetch: refetchPagination, - }; -} - -function useLoadMore( - args: $Diff< - UseLoadMoreFunctionArgs, - { - observer: Observer, - onReset: () => void, - ... - }, - >, -): [LoadMoreFn, boolean, boolean, () => void] { - const environment = useRelayEnvironment(); - const [isLoadingMore, reallySetIsLoadingMore] = useState(false); - // Schedule this update since it must be observed by components at the same - // batch as when hasNext changes. hasNext is read from the store and store - // updates are scheduled, so this must be scheduled too. - const setIsLoadingMore = (value: boolean) => { - const schedule = environment.getScheduler()?.schedule; - if (schedule) { - schedule(() => { - reallySetIsLoadingMore(value); - }); - } else { - reallySetIsLoadingMore(value); - } - }; - const observer = { - start: () => setIsLoadingMore(true), - complete: () => setIsLoadingMore(false), - error: () => setIsLoadingMore(false), - }; - const handleReset = () => setIsLoadingMore(false); - const [loadMore, hasMore, disposeFetch] = useLoadMoreFunction({ - ...args, - observer, - onReset: handleReset, - }); - return [loadMore, hasMore, isLoadingMore, disposeFetch]; -} - -module.exports = usePaginationFragment; diff --git a/packages/react-relay/relay-hooks/react-cache/usePreloadedQuery_REACT_CACHE.js b/packages/react-relay/relay-hooks/react-cache/usePreloadedQuery_REACT_CACHE.js deleted file mode 100644 index bbba5e5453df1..0000000000000 --- a/packages/react-relay/relay-hooks/react-cache/usePreloadedQuery_REACT_CACHE.js +++ /dev/null @@ -1,150 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; - -import type {PreloadedQuery} from '../EntryPointTypes.flow'; -import type { - GraphQLTaggedNode, - OperationType, - RenderPolicy, -} from 'relay-runtime'; - -const {useTrackLoadQueryInRender} = require('../loadQuery'); -const useMemoOperationDescriptor = require('../useMemoOperationDescriptor'); -const useRelayEnvironment = require('../useRelayEnvironment'); -const getQueryResultOrFetchQuery = require('./getQueryResultOrFetchQuery_REACT_CACHE'); -const useFragmentInternal = require('./useFragmentInternal_REACT_CACHE'); -const invariant = require('invariant'); -const {useDebugValue, useEffect} = require('react'); -const { - __internal: {fetchQueryDeduped, fetchQuery}, -} = require('relay-runtime'); -const warning = require('warning'); - -function usePreloadedQuery_REACT_CACHE( - gqlQuery: GraphQLTaggedNode, - preloadedQuery: PreloadedQuery, - options?: { - UNSTABLE_renderPolicy?: RenderPolicy, - }, -): TQuery['response'] { - const environment = useRelayEnvironment(); - - useTrackLoadQueryInRender(); - - const {fetchKey, fetchPolicy, source, variables, networkCacheConfig} = - preloadedQuery; - const operation = useMemoOperationDescriptor( - gqlQuery, - variables, - networkCacheConfig, - ); - - let fetchObservable; - if (preloadedQuery.kind === 'PreloadedQuery_DEPRECATED') { - invariant( - operation.request.node.params.name === preloadedQuery.name, - 'usePreloadedQuery(): Expected data to be prefetched for query `%s`, ' + - 'got prefetch results for query `%s`.', - operation.request.node.params.name, - preloadedQuery.name, - ); - fetchObservable = fetchQueryDeduped( - environment, - operation.request.identifier, - () => { - if (environment === preloadedQuery.environment && source != null) { - return environment.executeWithSource({operation, source}); - } else { - return environment.execute({operation}); - } - }, - ); - } else { - warning( - preloadedQuery.isDisposed === false, - 'usePreloadedQuery(): Expected preloadedQuery to not be disposed yet. ' + - 'This is because disposing the query marks it for future garbage ' + - 'collection, and as such query results may no longer be present in the Relay ' + - 'store. In the future, this will become a hard error.', - ); - const fallbackFetchObservable = fetchQuery(environment, operation); - if (source != null && environment === preloadedQuery.environment) { - // If the source observable exists and the environments match, reuse - // the source observable. - // If the source observable happens to be empty, we need to fall back - // and re-execute and de-dupe the query (at render time). - fetchObservable = source.ifEmpty(fallbackFetchObservable); - } else if (environment !== preloadedQuery.environment) { - // If a call to loadQuery is made with a particular environment, and that - // preloaded query is passed to usePreloadedQuery in a different environment - // context, we cannot re-use the existing preloaded query. - // Instead, we need to fall back and re-execute and de-dupe the query with - // the new environment (at render time). - // TODO T68036756 track occurences of this warning and turn it into a hard error - warning( - false, - 'usePreloadedQuery(): usePreloadedQuery was passed a preloaded query ' + - 'that was created with a different environment than the one that is currently ' + - 'in context. In the future, this will become a hard error.', - ); - fetchObservable = fallbackFetchObservable; - } else { - // if (source == null) - // If the source observable does not exist, we need to - // fall back and re-execute and de-dupe the query (at render time). - fetchObservable = fallbackFetchObservable; - } - } - - // Get the query going if needed -- this may suspend. - const [queryResult, effect] = getQueryResultOrFetchQuery( - environment, - operation, - { - fetchPolicy, - renderPolicy: options?.UNSTABLE_renderPolicy, - fetchKey, - fetchObservable, - }, - ); - - useEffect(effect); - - // Read the query's root fragment -- this may suspend. - const {fragmentNode, fragmentRef} = queryResult; - const data = useFragmentInternal( - fragmentNode, - fragmentRef, - 'usePreloadedQuery()', - { - fetchPolicy: fetchPolicy, - networkCacheConfig: networkCacheConfig, - }, - ); - - if (__DEV__) { - // eslint-disable-next-line react-hooks/rules-of-hooks - useDebugValue({ - query: preloadedQuery.name, - variables: preloadedQuery.variables, - data, - fetchKey, - fetchPolicy, - renderPolicy: options?.UNSTABLE_renderPolicy, - }); - } - - return data; -} - -module.exports = usePreloadedQuery_REACT_CACHE; diff --git a/packages/react-relay/relay-hooks/react-cache/useRefetchableFragmentInternal_REACT_CACHE.js b/packages/react-relay/relay-hooks/react-cache/useRefetchableFragmentInternal_REACT_CACHE.js deleted file mode 100644 index 9466f8ed3a9ab..0000000000000 --- a/packages/react-relay/relay-hooks/react-cache/useRefetchableFragmentInternal_REACT_CACHE.js +++ /dev/null @@ -1,601 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; - -import type {LoaderFn} from '../useQueryLoader'; -import type { - ConcreteRequest, - Disposable, - FetchPolicy, - IEnvironment, - OperationDescriptor, - OperationType, - ReaderFragment, - RenderPolicy, - Variables, - VariablesOf, -} from 'relay-runtime'; - -const ProfilerContext = require('../ProfilerContext'); -const {getQueryResourceForEnvironment} = require('../QueryResource'); -const useIsMountedRef = require('../useIsMountedRef'); -const useQueryLoader = require('../useQueryLoader'); -const useRelayEnvironment = require('../useRelayEnvironment'); -const readFragmentInternal = require('./readFragmentInternal_REACT_CACHE'); -const useFragmentInternal = require('./useFragmentInternal_REACT_CACHE'); -const invariant = require('invariant'); -const {useCallback, useContext, useReducer} = require('react'); -const { - __internal: {fetchQuery}, - createOperationDescriptor, - getFragmentIdentifier, - getRefetchMetadata, - getSelector, - getValueAtPath, -} = require('relay-runtime'); -const warning = require('warning'); - -export type RefetchFn< - TQuery: OperationType, - TOptions = Options, -> = RefetchFnExact; - -// NOTE: RefetchFnDynamic returns a refetch function that: -// - Expects the /exact/ set of query variables if the provided key type is -// /nullable/. -// - Or, expects /a subset/ of the query variables if the provided key type is -// /non-null/. -// prettier-ignore -export type RefetchFnDynamic< - TQuery: OperationType, - TKey: ?{ +$data?: mixed, ... }, - TOptions = Options, -> = $Call< - & (( { +$data?: mixed, ... }) => RefetchFnInexact) - & ((?{ +$data?: mixed, ... }) => RefetchFnExact), - TKey ->; - -export type ReturnType< - TQuery: OperationType, - TKey: ?{+$data?: mixed, ...}, - TOptions = Options, -> = { - fragmentData: mixed, - fragmentRef: mixed, - refetch: RefetchFnDynamic, -}; - -export type Options = { - fetchPolicy?: FetchPolicy, - onComplete?: (Error | null) => void, - UNSTABLE_renderPolicy?: RenderPolicy, -}; - -type InternalOptions = { - ...Options, - __environment?: IEnvironment, -}; - -type RefetchFnBase = ( - vars: TVars, - options?: TOptions, -) => Disposable; - -type RefetchFnExact = RefetchFnBase< - VariablesOf, - TOptions, ->; -type RefetchFnInexact< - TQuery: OperationType, - TOptions = Options, -> = RefetchFnBase>, TOptions>; - -type Action = - | { - type: 'reset', - environment: IEnvironment, - fragmentIdentifier: string, - } - | { - type: 'refetch', - refetchQuery: OperationDescriptor, - fetchPolicy?: FetchPolicy, - renderPolicy?: RenderPolicy, - onComplete?: (Error | null) => void, - refetchEnvironment: ?IEnvironment, - }; - -type RefetchState = { - fetchPolicy: FetchPolicy | void, - mirroredEnvironment: IEnvironment, - mirroredFragmentIdentifier: string, - onComplete: ((Error | null) => void) | void, - refetchEnvironment?: ?IEnvironment, - refetchQuery: OperationDescriptor | null, - renderPolicy: RenderPolicy | void, -}; - -type DebugIDandTypename = { - id: string, - typename: string, - ... -}; - -function reducer(state: RefetchState, action: Action): RefetchState { - switch (action.type) { - case 'refetch': { - return { - ...state, - fetchPolicy: action.fetchPolicy, - mirroredEnvironment: - action.refetchEnvironment ?? state.mirroredEnvironment, - onComplete: action.onComplete, - refetchEnvironment: action.refetchEnvironment, - refetchQuery: action.refetchQuery, - renderPolicy: action.renderPolicy, - }; - } - case 'reset': { - return { - fetchPolicy: undefined, - mirroredEnvironment: action.environment, - mirroredFragmentIdentifier: action.fragmentIdentifier, - onComplete: undefined, - refetchQuery: null, - renderPolicy: undefined, - }; - } - default: { - (action.type: empty); - throw new Error('useRefetchableFragmentNode: Unexpected action type'); - } - } -} - -function useRefetchableFragmentNode< - TQuery: OperationType, - TKey: ?{+$data?: mixed, ...}, ->( - fragmentNode: ReaderFragment, - parentFragmentRef: mixed, - componentDisplayName: string, -): ReturnType { - const parentEnvironment = useRelayEnvironment(); - const {refetchableRequest, fragmentRefPathInResponse, identifierField} = - getRefetchMetadata(fragmentNode, componentDisplayName); - const fragmentIdentifier = getFragmentIdentifier( - fragmentNode, - parentFragmentRef, - ); - - const [refetchState, dispatch] = useReducer(reducer, { - fetchPolicy: undefined, - mirroredEnvironment: parentEnvironment, - mirroredFragmentIdentifier: fragmentIdentifier, - onComplete: undefined, - refetchEnvironment: null, - refetchQuery: null, - renderPolicy: undefined, - }); - const { - fetchPolicy, - mirroredEnvironment, - mirroredFragmentIdentifier, - onComplete, - refetchEnvironment, - refetchQuery, - renderPolicy, - } = refetchState; - const environment = refetchEnvironment ?? parentEnvironment; - - const QueryResource = getQueryResourceForEnvironment(environment); - const profilerContext = useContext(ProfilerContext); - - const shouldReset = - environment !== mirroredEnvironment || - fragmentIdentifier !== mirroredFragmentIdentifier; - const [queryRef, loadQuery, disposeQuery] = useQueryLoader< - TQuery['variables'], - TQuery['response'], - TQuery['rawResponse'], - >((refetchableRequest: $FlowFixMe)); - - let fragmentRef = parentFragmentRef; - if (shouldReset) { - dispatch({ - type: 'reset', - environment, - fragmentIdentifier, - }); - disposeQuery(); - } else if (refetchQuery != null && queryRef != null) { - // If refetch was called, we expect to have a refetchQuery and queryRef - // in state, since both state updates to set the refetchQuery and the - // queryRef occur simultaneously. - // In this case, we need to read the refetched query data (potentially - // suspending if it's in flight), and extract the new fragment ref - // from the query in order read the current @refetchable fragment - // with the updated fragment owner as the new refetchQuery. - - // Before observing the refetch, record the current ID and typename - // so that, if we are refetching existing data on - // a field that implements Node, after refetching we - // can validate that the received data is consistent - let debugPreviousIDAndTypename: ?DebugIDandTypename; - if (__DEV__) { - debugPreviousIDAndTypename = debugFunctions.getInitialIDAndType( - refetchQuery.request.variables, - fragmentRefPathInResponse, - environment, - ); - } - - const handleQueryCompleted = (maybeError: void | Error) => { - onComplete && onComplete(maybeError ?? null); - }; - - // The queryRef.source obtained from useQueryLoader will be - // an observable we can consume /if/ a network request was - // started. Otherwise, given that QueryResource.prepare - // always expects an observable we fall back to a new network - // observable. Note however that if loadQuery did not make a network - // request, we don't expect to make one here, unless the state of - // the cache has changed between the call to refetch and this - // render. - const fetchObservable = - queryRef.source != null - ? queryRef.source - : fetchQuery(environment, refetchQuery); - - // Now wwe can we read the refetch query here using the - // queryRef provided from useQueryLoader. Note that the - // network request is started during the call to refetch, - // but if the refetch query is still in flight, we will suspend - // at this point: - const queryResult = profilerContext.wrapPrepareQueryResource(() => { - return QueryResource.prepare( - refetchQuery, - fetchObservable, - fetchPolicy, - renderPolicy, - { - error: handleQueryCompleted, - complete: () => { - // Validate that the type of the object we got back matches the type - // of the object already in the store - if (__DEV__) { - debugFunctions.checkSameTypeAfterRefetch( - debugPreviousIDAndTypename, - environment, - fragmentNode, - componentDisplayName, - ); - } - handleQueryCompleted(); - }, - }, - queryRef.fetchKey, - profilerContext, - ); - }); - - const queryData = readFragmentInternal( - environment, - queryResult.fragmentNode, - queryResult.fragmentRef, - componentDisplayName, - ).data; - invariant( - queryData != null, - 'Relay: Expected to be able to read refetch query response. ' + - "If you're seeing this, this is likely a bug in Relay.", - ); - - // After reading/fetching the refetch query, we extract from the - // refetch query response the new fragment ref we need to use to read - // the fragment. The new fragment ref will point to the refetch query - // as its fragment owner. - const refetchedFragmentRef = getValueAtPath( - queryData, - fragmentRefPathInResponse, - ); - fragmentRef = refetchedFragmentRef; - - if (__DEV__) { - // Validate that the id of the object we got back matches the id - // we queried for in the variables. - // We do this during render instead of onComplete to make sure we are - // only validating the most recent refetch. - debugFunctions.checkSameIDAfterRefetch( - debugPreviousIDAndTypename, - fragmentRef, - fragmentNode, - componentDisplayName, - ); - } - } - - // We read and subscribe to the fragment using useFragmentNode. - // If refetch was called, we read the fragment using the new computed - // fragment ref from the refetch query response; otherwise, we use the - // fragment ref passed by the caller as normal. - const fragmentData = useFragmentInternal( - fragmentNode, - fragmentRef, - componentDisplayName, - ); - - const refetch = useRefetchFunction( - componentDisplayName, - dispatch, - disposeQuery, - fragmentData, - fragmentIdentifier, - fragmentNode, - fragmentRefPathInResponse, - identifierField, - loadQuery, - parentFragmentRef, - refetchableRequest, - ); - return { - fragmentData, - fragmentRef, - refetch, - }; -} - -function useRefetchFunction( - componentDisplayName: string, - dispatch: ( - | { - environment: IEnvironment, - fragmentIdentifier: string, - type: 'reset', - } - | { - fetchPolicy?: FetchPolicy, - onComplete?: (Error | null) => void, - refetchEnvironment: ?IEnvironment, - refetchQuery: OperationDescriptor, - renderPolicy?: RenderPolicy, - type: 'refetch', - }, - ) => void, - disposeQuery: () => void, - fragmentData: mixed, - fragmentIdentifier: string, - fragmentNode: ReaderFragment, - fragmentRefPathInResponse: $ReadOnlyArray, - identifierField: ?string, - loadQuery: LoaderFn, - parentFragmentRef: mixed, - refetchableRequest: ConcreteRequest, -): RefetchFn { - const isMountedRef = useIsMountedRef(); - const identifierValue = - identifierField != null && - fragmentData != null && - typeof fragmentData === 'object' - ? fragmentData[identifierField] - : null; - return useCallback( - ( - providedRefetchVariables: VariablesOf, - options: void | InternalOptions, - ) => { - // Bail out and warn if we're trying to refetch after the component - // has unmounted - if (isMountedRef.current !== true) { - warning( - false, - 'Relay: Unexpected call to `refetch` on unmounted component for fragment ' + - '`%s` in `%s`. It looks like some instances of your component are ' + - 'still trying to fetch data but they already unmounted. ' + - 'Please make sure you clear all timers, intervals, ' + - 'async calls, etc that may trigger a fetch.', - fragmentNode.name, - componentDisplayName, - ); - return {dispose: () => {}}; - } - if (parentFragmentRef == null) { - warning( - false, - 'Relay: Unexpected call to `refetch` while using a null fragment ref ' + - 'for fragment `%s` in `%s`. When calling `refetch`, we expect ' + - "initial fragment data to be non-null. Please make sure you're " + - 'passing a valid fragment ref to `%s` before calling ' + - '`refetch`, or make sure you pass all required variables to `refetch`.', - fragmentNode.name, - componentDisplayName, - componentDisplayName, - ); - } - - const refetchEnvironment = options?.__environment; - const fetchPolicy = options?.fetchPolicy; - const renderPolicy = options?.UNSTABLE_renderPolicy; - const onComplete = options?.onComplete; - const fragmentSelector = getSelector(fragmentNode, parentFragmentRef); - let parentVariables: Variables; - let fragmentVariables: Variables; - if (fragmentSelector == null) { - parentVariables = {}; - fragmentVariables = {}; - } else if (fragmentSelector.kind === 'PluralReaderSelector') { - parentVariables = fragmentSelector.selectors[0]?.owner.variables ?? {}; - fragmentVariables = fragmentSelector.selectors[0]?.variables ?? {}; - } else { - parentVariables = fragmentSelector.owner.variables; - fragmentVariables = fragmentSelector.variables; - } - - // A user of `useRefetchableFragment()` may pass a subset of - // all variables required by the fragment when calling `refetch()`. - // We fill in any variables not passed by the call to `refetch()` with the - // variables from the original parent fragment owner. - const refetchVariables: VariablesOf = { - ...(parentVariables: $FlowFixMe), - ...fragmentVariables, - ...providedRefetchVariables, - }; - - // If the query needs an identifier value ('id' or similar) and one - // was not explicitly provided, read it from the fragment data. - if ( - identifierField != null && - !providedRefetchVariables.hasOwnProperty('id') - ) { - // @refetchable fragments are guaranteed to have an `id` selection - // if the type is Node, implements Node, or is @fetchable. Double-check - // that there actually is a value at runtime. - if (typeof identifierValue !== 'string') { - warning( - false, - 'Relay: Expected result to have a string ' + - '`%s` in order to refetch, got `%s`.', - identifierField, - identifierValue, - ); - } - (refetchVariables: $FlowFixMe).id = identifierValue; - } - - const refetchQuery = createOperationDescriptor( - refetchableRequest, - refetchVariables, - {force: true}, - ); - - // We call loadQuery which will start a network request if necessary - // and update the querRef from useQueryLoader. - // Note the following: - // - loadQuery will dispose of any previously refetched queries. - // - We use the variables extracted off the OperationDescriptor - // so that they have been filtered out to include only the - // variables actually declared in the query. - loadQuery(refetchQuery.request.variables, { - fetchPolicy, - __environment: refetchEnvironment, - __nameForWarning: 'refetch', - }); - - dispatch({ - type: 'refetch', - fetchPolicy, - onComplete, - refetchEnvironment, - refetchQuery, - renderPolicy, - }); - return {dispose: disposeQuery}; - }, - // NOTE: We disable react-hooks-deps warning because: - // - We know fragmentRefPathInResponse is static, so it can be omitted from - // deps - // - We know fragmentNode is static, so it can be omitted from deps. - // - fragmentNode and parentFragmentRef are also captured by including - // fragmentIdentifier - // eslint-disable-next-line react-hooks/exhaustive-deps - [fragmentIdentifier, dispatch, disposeQuery, identifierValue, loadQuery], - ); -} - -let debugFunctions; -if (__DEV__) { - debugFunctions = { - getInitialIDAndType( - memoRefetchVariables: ?Variables, - fragmentRefPathInResponse: $ReadOnlyArray, - environment: IEnvironment, - ): ?DebugIDandTypename { - const {Record} = require('relay-runtime'); - const id = memoRefetchVariables?.id; - if ( - fragmentRefPathInResponse.length !== 1 || - fragmentRefPathInResponse[0] !== 'node' || - id == null - ) { - return null; - } - const recordSource = environment.getStore().getSource(); - const record = recordSource.get(id); - const typename = record && Record.getType(record); - if (typename == null) { - return null; - } - return { - id, - typename, - }; - }, - - checkSameTypeAfterRefetch( - previousIDAndType: ?DebugIDandTypename, - environment: IEnvironment, - fragmentNode: ReaderFragment, - componentDisplayName: string, - ): void { - const {Record} = require('relay-runtime'); - if (!previousIDAndType) { - return; - } - const recordSource = environment.getStore().getSource(); - const record = recordSource.get(previousIDAndType.id); - const typename = record && Record.getType(record); - if (typename !== previousIDAndType.typename) { - warning( - false, - 'Relay: Call to `refetch` returned data with a different ' + - '__typename: was `%s`, now `%s`, on `%s` in `%s`. ' + - 'Please make sure the server correctly implements' + - 'unique id requirement.', - previousIDAndType.typename, - typename, - fragmentNode.name, - componentDisplayName, - ); - } - }, - - checkSameIDAfterRefetch( - previousIDAndTypename: ?DebugIDandTypename, - refetchedFragmentRef: mixed, - fragmentNode: ReaderFragment, - componentDisplayName: string, - ): void { - if (previousIDAndTypename == null) { - return; - } - const {ID_KEY} = require('relay-runtime'); - // $FlowExpectedError[incompatible-use] - const resultID = refetchedFragmentRef[ID_KEY]; - if (resultID != null && resultID !== previousIDAndTypename.id) { - warning( - false, - 'Relay: Call to `refetch` returned a different id, expected ' + - '`%s`, got `%s`, on `%s` in `%s`. ' + - 'Please make sure the server correctly implements ' + - 'unique id requirement.', - resultID, - previousIDAndTypename.id, - fragmentNode.name, - componentDisplayName, - ); - } - }, - }; -} - -module.exports = useRefetchableFragmentNode; diff --git a/packages/react-relay/relay-hooks/react-cache/useRefetchableFragment_REACT_CACHE.js b/packages/react-relay/relay-hooks/react-cache/useRefetchableFragment_REACT_CACHE.js deleted file mode 100644 index 6a82008c2c479..0000000000000 --- a/packages/react-relay/relay-hooks/react-cache/useRefetchableFragment_REACT_CACHE.js +++ /dev/null @@ -1,65 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; - -import type {RefetchFnDynamic} from './useRefetchableFragmentInternal_REACT_CACHE'; -import type { - FragmentType, - GraphQLTaggedNode, - OperationType, -} from 'relay-runtime'; - -const useStaticFragmentNodeWarning = require('../useStaticFragmentNodeWarning'); -const useRefetchableFragmentInternal = require('./useRefetchableFragmentInternal_REACT_CACHE'); -const {useDebugValue} = require('react'); -const {getFragment} = require('relay-runtime'); - -type ReturnType = [ - // NOTE: This $Call ensures that the type of the returned data is either: - // - nullable if the provided ref type is nullable - // - non-nullable if the provided ref type is non-nullable - // prettier-ignore - $Call< - & (( { +$data?: TFragmentData, ... }) => TFragmentData) - & ((?{ +$data?: TFragmentData, ... }) => ?TFragmentData), - TKey, - >, - RefetchFnDynamic, -]; - -function useRefetchableFragment< - TQuery: OperationType, - TKey: ?{+$data?: mixed, +$fragmentSpreads: FragmentType, ...}, ->( - fragmentInput: GraphQLTaggedNode, - fragmentRef: TKey, -): ReturnType { - const fragmentNode = getFragment(fragmentInput); - useStaticFragmentNodeWarning( - fragmentNode, - 'first argument of useRefetchableFragment()', - ); - const {fragmentData, refetch} = useRefetchableFragmentInternal( - fragmentNode, - fragmentRef, - 'useRefetchableFragment()', - ); - if (__DEV__) { - // eslint-disable-next-line react-hooks/rules-of-hooks - useDebugValue({fragment: fragmentNode.name, data: fragmentData}); - } - /* $FlowExpectedError[prop-missing] : Exposed options is a subset of internal - * options */ - return [fragmentData, (refetch: RefetchFnDynamic)]; -} - -module.exports = useRefetchableFragment; diff --git a/packages/react-relay/relay-hooks/readFragmentInternal.js b/packages/react-relay/relay-hooks/readFragmentInternal.js new file mode 100644 index 0000000000000..25920a144a9cd --- /dev/null +++ b/packages/react-relay/relay-hooks/readFragmentInternal.js @@ -0,0 +1,302 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; + +import type {QueryResult} from './QueryResource'; +import type { + CacheConfig, + FetchPolicy, + IEnvironment, + ReaderFragment, + ReaderSelector, + SelectorData, + Snapshot, +} from 'relay-runtime'; +import type {MissingClientEdgeRequestInfo} from 'relay-runtime/store/RelayStoreTypes'; + +const {getQueryResourceForEnvironment} = require('./QueryResource'); +const invariant = require('invariant'); +const { + __internal: {fetchQuery: fetchQueryInternal}, + RelayFeatureFlags, + createOperationDescriptor, + getPendingOperationsForFragment, + getSelector, + getVariablesFromFragment, + handlePotentialSnapshotErrors, +} = require('relay-runtime'); +const warning = require('warning'); + +type FragmentQueryOptions = { + fetchPolicy?: FetchPolicy, + networkCacheConfig?: ?CacheConfig, +}; + +type FragmentState = $ReadOnly< + | {kind: 'bailout'} + | {kind: 'singular', snapshot: Snapshot, epoch: number} + | {kind: 'plural', snapshots: $ReadOnlyArray, epoch: number}, +>; + +function isMissingData(state: FragmentState): boolean { + if (state.kind === 'bailout') { + return false; + } else if (state.kind === 'singular') { + return state.snapshot.isMissingData; + } else { + return state.snapshots.some(s => s.isMissingData); + } +} + +function getMissingClientEdges( + state: FragmentState, +): $ReadOnlyArray | null { + if (state.kind === 'bailout') { + return null; + } else if (state.kind === 'singular') { + return state.snapshot.missingClientEdges ?? null; + } else { + let edges: null | Array = null; + for (const snapshot of state.snapshots) { + if (snapshot.missingClientEdges) { + edges = edges ?? []; + for (const edge of snapshot.missingClientEdges) { + edges.push(edge); + } + } + } + return edges; + } +} + +function handlePotentialSnapshotErrorsForState( + environment: IEnvironment, + state: FragmentState, +): void { + if (state.kind === 'singular') { + handlePotentialSnapshotErrors( + environment, + state.snapshot.missingRequiredFields, + state.snapshot.relayResolverErrors, + state.snapshot.errorResponseFields, + state.snapshot.selector.node.metadata?.throwOnFieldError ?? false, + ); + } else if (state.kind === 'plural') { + for (const snapshot of state.snapshots) { + handlePotentialSnapshotErrors( + environment, + snapshot.missingRequiredFields, + snapshot.relayResolverErrors, + snapshot.errorResponseFields, + snapshot.selector.node.metadata?.throwOnFieldError ?? false, + ); + } + } +} + +function handleMissingClientEdge( + environment: IEnvironment, + parentFragmentNode: ReaderFragment, + parentFragmentRef: mixed, + missingClientEdgeRequestInfo: MissingClientEdgeRequestInfo, + queryOptions?: FragmentQueryOptions, +): QueryResult { + const originalVariables = getVariablesFromFragment( + parentFragmentNode, + parentFragmentRef, + ); + const variables = { + ...originalVariables, + id: missingClientEdgeRequestInfo.clientEdgeDestinationID, // TODO should be a reserved name + }; + const queryOperationDescriptor = createOperationDescriptor( + missingClientEdgeRequestInfo.request, + variables, + queryOptions?.networkCacheConfig, + ); + // This may suspend. We don't need to do anything with the results; all we're + // doing here is started the query if needed and retaining and releasing it + // according to the component mount/suspense cycle; QueryResource + // already handles this by itself. + const QueryResource = getQueryResourceForEnvironment(environment); + return QueryResource.prepare( + queryOperationDescriptor, + fetchQueryInternal(environment, queryOperationDescriptor), + queryOptions?.fetchPolicy, + ); +} + +function getFragmentState( + environment: IEnvironment, + fragmentSelector: ?ReaderSelector, +): FragmentState { + if (fragmentSelector == null) { + return {kind: 'bailout'}; + } else if (fragmentSelector.kind === 'PluralReaderSelector') { + if (fragmentSelector.selectors.length === 0) { + return {kind: 'bailout'}; + } else { + return { + kind: 'plural', + snapshots: fragmentSelector.selectors.map(s => environment.lookup(s)), + epoch: environment.getStore().getEpoch(), + }; + } + } else { + return { + kind: 'singular', + snapshot: environment.lookup(fragmentSelector), + epoch: environment.getStore().getEpoch(), + }; + } +} + +// fragmentNode cannot change during the lifetime of the component, though fragmentRef may change. +function readFragmentInternal( + environment: IEnvironment, + fragmentNode: ReaderFragment, + fragmentRef: mixed, + hookDisplayName: string, + queryOptions?: FragmentQueryOptions, + fragmentKey?: string, +): { + +data: ?SelectorData | Array, + +clientEdgeQueries: ?Array, +} { + const fragmentSelector = getSelector(fragmentNode, fragmentRef); + const isPlural = fragmentNode?.metadata?.plural === true; + + if (isPlural) { + invariant( + fragmentRef == null || Array.isArray(fragmentRef), + 'Relay: Expected fragment pointer%s for fragment `%s` to be ' + + 'an array, instead got `%s`. Remove `@relay(plural: true)` ' + + 'from fragment `%s` to allow the prop to be an object.', + fragmentKey != null ? ` for key \`${fragmentKey}\`` : '', + fragmentNode.name, + typeof fragmentRef, + fragmentNode.name, + ); + } else { + invariant( + !Array.isArray(fragmentRef), + 'Relay: Expected fragment pointer%s for fragment `%s` not to be ' + + 'an array, instead got `%s`. Add `@relay(plural: true)` ' + + 'to fragment `%s` to allow the prop to be an array.', + fragmentKey != null ? ` for key \`${fragmentKey}\`` : '', + fragmentNode.name, + typeof fragmentRef, + fragmentNode.name, + ); + } + invariant( + fragmentRef == null || + (isPlural && Array.isArray(fragmentRef) && fragmentRef.length === 0) || + fragmentSelector != null, + 'Relay: Expected to receive an object where `...%s` was spread, ' + + 'but the fragment reference was not found`. This is most ' + + 'likely the result of:\n' + + "- Forgetting to spread `%s` in `%s`'s parent's fragment.\n" + + '- Conditionally fetching `%s` but unconditionally passing %s prop ' + + 'to `%s`. If the parent fragment only fetches the fragment conditionally ' + + '- with e.g. `@include`, `@skip`, or inside a `... on SomeType { }` ' + + 'spread - then the fragment reference will not exist. ' + + 'In this case, pass `null` if the conditions for evaluating the ' + + 'fragment are not met (e.g. if the `@include(if)` value is false.)', + fragmentNode.name, + fragmentNode.name, + hookDisplayName, + fragmentNode.name, + fragmentKey == null ? 'a fragment reference' : `the \`${fragmentKey}\``, + hookDisplayName, + ); + + const state = getFragmentState(environment, fragmentSelector); + + // Handle the queries for any missing client edges; this may suspend. + // FIXME handle client edges in parallel. + let clientEdgeQueries = null; + if (fragmentNode.metadata?.hasClientEdges === true) { + const missingClientEdges = getMissingClientEdges(state); + if (missingClientEdges?.length) { + clientEdgeQueries = ([]: Array); + for (const edge of missingClientEdges) { + clientEdgeQueries.push( + handleMissingClientEdge( + environment, + fragmentNode, + fragmentRef, + edge, + queryOptions, + ), + ); + } + } + } + + if (isMissingData(state)) { + // Suspend if an active operation bears on this fragment, either the + // fragment's owner or some other mutation etc. that could affect it: + invariant(fragmentSelector != null, 'refinement, see invariants above'); + const fragmentOwner = + fragmentSelector.kind === 'PluralReaderSelector' + ? fragmentSelector.selectors[0].owner + : fragmentSelector.owner; + const pendingOperationsResult = getPendingOperationsForFragment( + environment, + fragmentNode, + fragmentOwner, + ); + if (pendingOperationsResult) { + throw pendingOperationsResult.promise; + } + // Report required fields only if we're not suspending, since that means + // they're missing even though we are out of options for possibly fetching them: + handlePotentialSnapshotErrorsForState(environment, state); + } + + let data: ?SelectorData | Array; + if (state.kind === 'bailout') { + data = isPlural ? [] : null; + } else if (state.kind === 'singular') { + data = state.snapshot.data; + } else { + data = state.snapshots.map(s => s.data); + } + + if (RelayFeatureFlags.LOG_MISSING_RECORDS_IN_PROD || __DEV__) { + if ( + fragmentRef != null && + (data === undefined || + (Array.isArray(data) && + data.length > 0 && + data.every(d => d === undefined))) + ) { + warning( + false, + 'Relay: Expected to have been able to read non-null data for ' + + 'fragment `%s` declared in ' + + '`%s`, since fragment reference was non-null. ' + + "Make sure that that `%s`'s parent isn't " + + 'holding on to and/or passing a fragment reference for data that ' + + 'has been deleted.', + fragmentNode.name, + hookDisplayName, + hookDisplayName, + ); + } + } + + return {data, clientEdgeQueries}; +} + +module.exports = readFragmentInternal; diff --git a/packages/react-relay/relay-hooks/useClientQuery.js b/packages/react-relay/relay-hooks/useClientQuery.js index 983316ba1f5a7..0d8204475680a 100644 --- a/packages/react-relay/relay-hooks/useClientQuery.js +++ b/packages/react-relay/relay-hooks/useClientQuery.js @@ -20,8 +20,8 @@ const useLazyLoadQuery = require('./useLazyLoadQuery'); * These queries are consist of queries for client-only data, * schematized via local schema extensions and/or Relay resolvers. */ -function useClientQuery( - gqlQuery: ClientQuery, +hook useClientQuery( + gqlQuery: ClientQuery, variables: TVariables, options?: { UNSTABLE_renderPolicy?: RenderPolicy, diff --git a/packages/react-relay/relay-hooks/useEntryPointLoader.js b/packages/react-relay/relay-hooks/useEntryPointLoader.js index 7040a87fb67a8..6c59cbf8a011f 100644 --- a/packages/react-relay/relay-hooks/useEntryPointLoader.js +++ b/packages/react-relay/relay-hooks/useEntryPointLoader.js @@ -50,7 +50,7 @@ type NullEntryPointReference = { }; const initialNullEntryPointReferenceState = {kind: 'NullEntryPointReference'}; -function useLoadEntryPoint< +hook useLoadEntryPoint< TEntryPointParams: {...}, TPreloadedQueries: {...}, TPreloadedEntryPoints: {...}, diff --git a/packages/react-relay/relay-hooks/useFetchTrackingRef.js b/packages/react-relay/relay-hooks/useFetchTrackingRef.js index 5e79d27fe3e44..be9f04b822f21 100644 --- a/packages/react-relay/relay-hooks/useFetchTrackingRef.js +++ b/packages/react-relay/relay-hooks/useFetchTrackingRef.js @@ -28,7 +28,7 @@ const {useCallback, useEffect} = require('react'); * The additional functions returned by this Hook can be used to mutate * the ref. */ -function useFetchTrackingRef(): { +hook useFetchTrackingRef(): { isFetchingRef: {current: ?boolean, ...}, startFetch: Subscription => void, disposeFetch: () => void, diff --git a/packages/react-relay/relay-hooks/useFragment.js b/packages/react-relay/relay-hooks/useFragment.js index 724b6bd586323..8a4f7f2735700 100644 --- a/packages/react-relay/relay-hooks/useFragment.js +++ b/packages/react-relay/relay-hooks/useFragment.js @@ -13,9 +13,8 @@ import type {Fragment, FragmentType, GraphQLTaggedNode} from 'relay-runtime'; -const HooksImplementation = require('./HooksImplementation'); const {useTrackLoadQueryInRender} = require('./loadQuery'); -const useFragmentNode = require('./useFragmentNode'); +const useFragmentInternal = require('./useFragmentInternal'); const useStaticFragmentNodeWarning = require('./useStaticFragmentNodeWarning'); const {useDebugValue} = require('react'); const {getFragment} = require('relay-runtime'); @@ -26,48 +25,43 @@ type HasSpread = { }; // if the key is non-nullable, return non-nullable value -declare function useFragment( +declare hook useFragment( fragment: Fragment, key: HasSpread, ): TData; +// if the key is nullable, return nullable value +declare hook useFragment( + fragment: Fragment, + key: ?HasSpread, +): ?TData; + // if the key is a non-nullable array of keys, return non-nullable array -declare function useFragment( +declare hook useFragment( fragment: Fragment, key: $ReadOnlyArray>, ): TData; -// if the key is null/void, return null/void value -declare function useFragment( +// if the key is a nullable array of keys, return nullable array +declare hook useFragment( fragment: Fragment, - key: null | void, -): null | void; + key: ?$ReadOnlyArray>, +): ?TData; -function useFragment_LEGACY(fragment: GraphQLTaggedNode, key: mixed): mixed { +hook useFragment(fragment: GraphQLTaggedNode, key: mixed): mixed { // We need to use this hook in order to be able to track if // loadQuery was called during render useTrackLoadQueryInRender(); const fragmentNode = getFragment(fragment); useStaticFragmentNodeWarning(fragmentNode, 'first argument of useFragment()'); - const {data} = useFragmentNode(fragmentNode, key, 'useFragment()'); + const data = useFragmentInternal(fragmentNode, key, 'useFragment()'); if (__DEV__) { // eslint-disable-next-line react-hooks/rules-of-hooks + // $FlowFixMe[react-rule-hook] useDebugValue({fragment: fragmentNode.name, data}); } return data; } -function useFragment(fragment: GraphQLTaggedNode, key: mixed): mixed { - const impl = HooksImplementation.get(); - if (impl) { - // $FlowFixMe This is safe because impl.useFragment has the type of useFragment... - return impl.useFragment(fragment, key); - // (i.e. type declared above, but not the supertype used in this function definition) - } else { - // eslint-disable-next-line react-hooks/rules-of-hooks - return useFragment_LEGACY(fragment, key); - } -} - module.exports = useFragment; diff --git a/packages/react-relay/relay-hooks/useFragmentInternal.js b/packages/react-relay/relay-hooks/useFragmentInternal.js new file mode 100644 index 0000000000000..e843769dcf6e4 --- /dev/null +++ b/packages/react-relay/relay-hooks/useFragmentInternal.js @@ -0,0 +1,669 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; + +import type {QueryResult} from './QueryResource'; +import type { + CacheConfig, + FetchPolicy, + IEnvironment, + ReaderFragment, + ReaderSelector, + SelectorData, + Snapshot, +} from 'relay-runtime'; +import type { + MissingClientEdgeRequestInfo, + MissingLiveResolverField, +} from 'relay-runtime/store/RelayStoreTypes'; + +const {getQueryResourceForEnvironment} = require('./QueryResource'); +const useRelayEnvironment = require('./useRelayEnvironment'); +const invariant = require('invariant'); +const {useDebugValue, useEffect, useMemo, useRef, useState} = require('react'); +const { + __internal: {fetchQuery: fetchQueryInternal, getPromiseForActiveRequest}, + RelayFeatureFlags, + areEqualSelectors, + createOperationDescriptor, + getPendingOperationsForFragment, + getSelector, + getVariablesFromFragment, + handlePotentialSnapshotErrors, + recycleNodesInto, +} = require('relay-runtime'); +const warning = require('warning'); + +type FragmentQueryOptions = { + fetchPolicy?: FetchPolicy, + networkCacheConfig?: ?CacheConfig, +}; + +type FragmentState = $ReadOnly< + | {kind: 'bailout'} + | {kind: 'singular', snapshot: Snapshot, epoch: number} + | {kind: 'plural', snapshots: $ReadOnlyArray, epoch: number}, +>; + +type StateUpdaterFunction = ((T) => T) => void; + +function isMissingData(state: FragmentState): boolean { + if (state.kind === 'bailout') { + return false; + } else if (state.kind === 'singular') { + return state.snapshot.isMissingData; + } else { + return state.snapshots.some(s => s.isMissingData); + } +} + +function getMissingClientEdges( + state: FragmentState, +): $ReadOnlyArray | null { + if (state.kind === 'bailout') { + return null; + } else if (state.kind === 'singular') { + return state.snapshot.missingClientEdges ?? null; + } else { + let edges: null | Array = null; + for (const snapshot of state.snapshots) { + if (snapshot.missingClientEdges) { + edges = edges ?? []; + for (const edge of snapshot.missingClientEdges) { + edges.push(edge); + } + } + } + return edges; + } +} + +function getSuspendingLiveResolver( + state: FragmentState, +): $ReadOnlyArray | null { + if (state.kind === 'bailout') { + return null; + } else if (state.kind === 'singular') { + return state.snapshot.missingLiveResolverFields ?? null; + } else { + let missingFields: null | Array = null; + for (const snapshot of state.snapshots) { + if (snapshot.missingLiveResolverFields) { + missingFields = missingFields ?? []; + for (const edge of snapshot.missingLiveResolverFields) { + missingFields.push(edge); + } + } + } + return missingFields; + } +} + +function handlePotentialSnapshotErrorsForState( + environment: IEnvironment, + state: FragmentState, +): void { + if (state.kind === 'singular') { + handlePotentialSnapshotErrors( + environment, + state.snapshot.missingRequiredFields, + state.snapshot.relayResolverErrors, + state.snapshot.errorResponseFields, + state.snapshot.selector.node.metadata?.throwOnFieldError ?? false, + ); + } else if (state.kind === 'plural') { + for (const snapshot of state.snapshots) { + handlePotentialSnapshotErrors( + environment, + snapshot.missingRequiredFields, + snapshot.relayResolverErrors, + snapshot.errorResponseFields, + snapshot.selector.node.metadata?.throwOnFieldError ?? false, + ); + } + } +} + +/** + * Check for updates to the store that occurred concurrently with rendering the given `state` value, + * returning a new (updated) state if there were updates or null if there were no changes. + */ +function handleMissedUpdates( + environment: IEnvironment, + state: FragmentState, +): null | [/* has data changed */ boolean, FragmentState] { + if (state.kind === 'bailout') { + return null; + } + // FIXME this is invalid if we've just switched environments. + const currentEpoch = environment.getStore().getEpoch(); + if (currentEpoch === state.epoch) { + return null; + } + // The store has updated since we rendered (without us being subscribed yet), + // so check for any updates to the data we're rendering: + if (state.kind === 'singular') { + const currentSnapshot = environment.lookup(state.snapshot.selector); + const updatedData = recycleNodesInto( + state.snapshot.data, + currentSnapshot.data, + ); + const updatedCurrentSnapshot: Snapshot = { + data: updatedData, + isMissingData: currentSnapshot.isMissingData, + missingClientEdges: currentSnapshot.missingClientEdges, + missingLiveResolverFields: currentSnapshot.missingLiveResolverFields, + seenRecords: currentSnapshot.seenRecords, + selector: currentSnapshot.selector, + missingRequiredFields: currentSnapshot.missingRequiredFields, + relayResolverErrors: currentSnapshot.relayResolverErrors, + errorResponseFields: currentSnapshot.errorResponseFields, + }; + return [ + updatedData !== state.snapshot.data, + { + kind: 'singular', + snapshot: updatedCurrentSnapshot, + epoch: currentEpoch, + }, + ]; + } else { + let didMissUpdates = false; + const currentSnapshots = []; + for (let index = 0; index < state.snapshots.length; index++) { + const snapshot = state.snapshots[index]; + const currentSnapshot = environment.lookup(snapshot.selector); + const updatedData = recycleNodesInto(snapshot.data, currentSnapshot.data); + const updatedCurrentSnapshot: Snapshot = { + data: updatedData, + isMissingData: currentSnapshot.isMissingData, + missingClientEdges: currentSnapshot.missingClientEdges, + missingLiveResolverFields: currentSnapshot.missingLiveResolverFields, + seenRecords: currentSnapshot.seenRecords, + selector: currentSnapshot.selector, + missingRequiredFields: currentSnapshot.missingRequiredFields, + relayResolverErrors: currentSnapshot.relayResolverErrors, + errorResponseFields: currentSnapshot.errorResponseFields, + }; + if (updatedData !== snapshot.data) { + didMissUpdates = true; + } + currentSnapshots.push(updatedCurrentSnapshot); + } + invariant( + currentSnapshots.length === state.snapshots.length, + 'Expected same number of snapshots', + ); + return [ + didMissUpdates, + { + kind: 'plural', + snapshots: currentSnapshots, + epoch: currentEpoch, + }, + ]; + } +} + +function handleMissingClientEdge( + environment: IEnvironment, + parentFragmentNode: ReaderFragment, + parentFragmentRef: mixed, + missingClientEdgeRequestInfo: MissingClientEdgeRequestInfo, + queryOptions?: FragmentQueryOptions, +): [QueryResult, ?Promise] { + const originalVariables = getVariablesFromFragment( + parentFragmentNode, + parentFragmentRef, + ); + const variables = { + ...originalVariables, + id: missingClientEdgeRequestInfo.clientEdgeDestinationID, // TODO should be a reserved name + }; + const queryOperationDescriptor = createOperationDescriptor( + missingClientEdgeRequestInfo.request, + variables, + queryOptions?.networkCacheConfig, + ); + // This may suspend. We don't need to do anything with the results; all we're + // doing here is started the query if needed and retaining and releasing it + // according to the component mount/suspense cycle; QueryResource + // already handles this by itself. + const QueryResource = getQueryResourceForEnvironment(environment); + const queryResult = QueryResource.prepare( + queryOperationDescriptor, + fetchQueryInternal(environment, queryOperationDescriptor), + queryOptions?.fetchPolicy, + ); + + return [ + queryResult, + getPromiseForActiveRequest(environment, queryOperationDescriptor.request), + ]; +} + +function subscribeToSnapshot( + environment: IEnvironment, + state: FragmentState, + setState: StateUpdaterFunction, + hasPendingStateChanges: {current: boolean}, +): () => void { + if (state.kind === 'bailout') { + return () => {}; + } else if (state.kind === 'singular') { + const disposable = environment.subscribe(state.snapshot, latestSnapshot => { + setState(prevState => { + // In theory a setState from a subscription could be batched together + // with a setState to change the fragment selector. Guard against this + // by bailing out of the state update if the selector has changed. + if ( + prevState.kind !== 'singular' || + prevState.snapshot.selector !== latestSnapshot.selector + ) { + const updates = handleMissedUpdates(environment, prevState); + if (updates != null) { + const [dataChanged, nextState] = updates; + environment.__log({ + name: 'useFragment.subscription.missedUpdates', + hasDataChanges: dataChanged, + }); + hasPendingStateChanges.current = dataChanged; + return dataChanged ? nextState : prevState; + } else { + return prevState; + } + } + + hasPendingStateChanges.current = true; + return { + kind: 'singular', + snapshot: latestSnapshot, + epoch: environment.getStore().getEpoch(), + }; + }); + }); + return () => { + disposable.dispose(); + }; + } else { + const disposables = state.snapshots.map((snapshot, index) => + environment.subscribe(snapshot, latestSnapshot => { + setState(prevState => { + // In theory a setState from a subscription could be batched together + // with a setState to change the fragment selector. Guard against this + // by bailing out of the state update if the selector has changed. + if ( + prevState.kind !== 'plural' || + prevState.snapshots[index]?.selector !== latestSnapshot.selector + ) { + const updates = handleMissedUpdates(environment, prevState); + if (updates != null) { + const [dataChanged, nextState] = updates; + environment.__log({ + name: 'useFragment.subscription.missedUpdates', + hasDataChanges: dataChanged, + }); + hasPendingStateChanges.current = + hasPendingStateChanges.current || dataChanged; + return dataChanged ? nextState : prevState; + } else { + return prevState; + } + } + const updated = [...prevState.snapshots]; + updated[index] = latestSnapshot; + hasPendingStateChanges.current = true; + return { + kind: 'plural', + snapshots: updated, + epoch: environment.getStore().getEpoch(), + }; + }); + }), + ); + return () => { + for (const d of disposables) { + d.dispose(); + } + }; + } +} + +function getFragmentState( + environment: IEnvironment, + fragmentSelector: ?ReaderSelector, +): FragmentState { + if (fragmentSelector == null) { + return {kind: 'bailout'}; + } else if (fragmentSelector.kind === 'PluralReaderSelector') { + // Note that if fragmentRef is an empty array, fragmentSelector will be null so we'll hit the above case. + // Null is returned by getSelector if fragmentRef has no non-null items. + return { + kind: 'plural', + snapshots: fragmentSelector.selectors.map(s => environment.lookup(s)), + epoch: environment.getStore().getEpoch(), + }; + } else { + return { + kind: 'singular', + snapshot: environment.lookup(fragmentSelector), + epoch: environment.getStore().getEpoch(), + }; + } +} + +// fragmentNode cannot change during the lifetime of the component, though fragmentRef may change. +hook useFragmentInternal( + fragmentNode: ReaderFragment, + fragmentRef: mixed, + hookDisplayName: string, + queryOptions?: FragmentQueryOptions, +): ?SelectorData | Array { + const fragmentSelector = useMemo( + () => getSelector(fragmentNode, fragmentRef), + [fragmentNode, fragmentRef], + ); + + const isPlural = fragmentNode?.metadata?.plural === true; + + if (isPlural) { + invariant( + fragmentRef == null || Array.isArray(fragmentRef), + 'Relay: Expected fragment pointer%s for fragment `%s` to be ' + + 'an array, instead got `%s`. Remove `@relay(plural: true)` ' + + 'from fragment `%s` to allow the prop to be an object.', + fragmentNode.name, + typeof fragmentRef, + fragmentNode.name, + ); + } else { + invariant( + !Array.isArray(fragmentRef), + 'Relay: Expected fragment pointer%s for fragment `%s` not to be ' + + 'an array, instead got `%s`. Add `@relay(plural: true)` ' + + 'to fragment `%s` to allow the prop to be an array.', + fragmentNode.name, + typeof fragmentRef, + fragmentNode.name, + ); + } + invariant( + fragmentRef == null || + (isPlural && Array.isArray(fragmentRef) && fragmentRef.length === 0) || + fragmentSelector != null, + 'Relay: Expected to receive an object where `...%s` was spread, ' + + 'but the fragment reference was not found`. This is most ' + + 'likely the result of:\n' + + "- Forgetting to spread `%s` in `%s`'s parent's fragment.\n" + + '- Conditionally fetching `%s` but unconditionally passing %s prop ' + + 'to `%s`. If the parent fragment only fetches the fragment conditionally ' + + '- with e.g. `@include`, `@skip`, or inside a `... on SomeType { }` ' + + 'spread - then the fragment reference will not exist. ' + + 'In this case, pass `null` if the conditions for evaluating the ' + + 'fragment are not met (e.g. if the `@include(if)` value is false.)', + fragmentNode.name, + fragmentNode.name, + hookDisplayName, + fragmentNode.name, + hookDisplayName, + ); + + const environment = useRelayEnvironment(); + const [_state, setState] = useState(() => + getFragmentState(environment, fragmentSelector), + ); + let state = _state; + + // This copy of the state we only update when something requires us to + // unsubscribe and re-subscribe, namely a changed environment or + // fragment selector. + const [_subscribedState, setSubscribedState] = useState(state); + // FIXME since this is used as an effect dependency, it needs to be memoized. + let subscribedState = _subscribedState; + + const [previousFragmentSelector, setPreviousFragmentSelector] = + useState(fragmentSelector); + const [previousEnvironment, setPreviousEnvironment] = useState(environment); + if ( + !areEqualSelectors(fragmentSelector, previousFragmentSelector) || + environment !== previousEnvironment + ) { + // Enqueue setState to record the new selector and state + setPreviousFragmentSelector(fragmentSelector); + setPreviousEnvironment(environment); + const newState = getFragmentState(environment, fragmentSelector); + setState(newState); + setSubscribedState(newState); // This causes us to form a new subscription + // But render with the latest state w/o waiting for the setState. Otherwise + // the component would render the wrong information temporarily (including + // possibly incorrectly triggering some warnings below). + state = newState; + subscribedState = newState; + } + + // The purpose of this is to detect whether we have ever committed, because we + // don't suspend on store updates, only when the component either is first trying + // to mount or when the our selector changes. The selector change in particular is + // how we suspend for pagination and refetch. Also, fragment selector can be null + // or undefined, so we use false as a special value to distinguish from all fragment + // selectors; false means that the component hasn't mounted yet. + const committedFragmentSelectorRef = useRef(false); + useEffect(() => { + committedFragmentSelectorRef.current = fragmentSelector; + }, [fragmentSelector]); + + // Handle the queries for any missing client edges; this may suspend. + // FIXME handle client edges in parallel. + if (fragmentNode.metadata?.hasClientEdges === true) { + // The fragment is validated to be static (in useFragment) and hasClientEdges is + // a static (constant) property of the fragment. In practice, this effect will + // always or never run for a given invocation of this hook. + // eslint-disable-next-line react-hooks/rules-of-hooks + // $FlowFixMe[react-rule-hook] + const [clientEdgeQueries, activeRequestPromises] = useMemo(() => { + const missingClientEdges = getMissingClientEdges(state); + // eslint-disable-next-line no-shadow + let clientEdgeQueries; + const activeRequestPromises = []; + if (missingClientEdges?.length) { + clientEdgeQueries = ([]: Array); + for (const edge of missingClientEdges) { + const [queryResult, requestPromise] = handleMissingClientEdge( + environment, + fragmentNode, + fragmentRef, + edge, + queryOptions, + ); + clientEdgeQueries.push(queryResult); + if (requestPromise != null) { + activeRequestPromises.push(requestPromise); + } + } + } + return [clientEdgeQueries, activeRequestPromises]; + }, [state, environment, fragmentNode, fragmentRef, queryOptions]); + + if (activeRequestPromises.length) { + throw Promise.all(activeRequestPromises); + } + + // See above note + // eslint-disable-next-line react-hooks/rules-of-hooks + // $FlowFixMe[react-rule-hook] + useEffect(() => { + const QueryResource = getQueryResourceForEnvironment(environment); + if (clientEdgeQueries?.length) { + const disposables = []; + for (const query of clientEdgeQueries) { + disposables.push(QueryResource.retain(query)); + } + return () => { + for (const disposable of disposables) { + disposable.dispose(); + } + }; + } + }, [environment, clientEdgeQueries]); + } + + if (isMissingData(state)) { + // Suspend if a Live Resolver within this fragment is in a suspended state: + const suspendingLiveResolvers = getSuspendingLiveResolver(state); + if (suspendingLiveResolvers != null && suspendingLiveResolvers.length > 0) { + throw Promise.all( + suspendingLiveResolvers.map(({liveStateID}) => { + // $FlowFixMe[prop-missing] This is expected to be a LiveResolverStore + return environment.getStore().getLiveResolverPromise(liveStateID); + }), + ); + } + // Suspend if an active operation bears on this fragment, either the + // fragment's owner or some other mutation etc. that could affect it. + // We only suspend when the component is first trying to mount or changing + // selectors, not if data becomes missing later: + if ( + RelayFeatureFlags.ENABLE_RELAY_OPERATION_TRACKER_SUSPENSE || + environment !== previousEnvironment || + !committedFragmentSelectorRef.current || + // $FlowFixMe[react-rule-unsafe-ref] + !areEqualSelectors(committedFragmentSelectorRef.current, fragmentSelector) + ) { + invariant(fragmentSelector != null, 'refinement, see invariants above'); + const fragmentOwner = + fragmentSelector.kind === 'PluralReaderSelector' + ? fragmentSelector.selectors[0].owner + : fragmentSelector.owner; + const pendingOperationsResult = getPendingOperationsForFragment( + environment, + fragmentNode, + fragmentOwner, + ); + if (pendingOperationsResult) { + throw pendingOperationsResult.promise; + } + } + } + + // Report required fields only if we're not suspending, since that means + // they're missing even though we are out of options for possibly fetching them: + handlePotentialSnapshotErrorsForState(environment, state); + + const hasPendingStateChanges = useRef(false); + + useEffect(() => { + // Check for updates since the state was rendered + let currentState = subscribedState; + const updates = handleMissedUpdates(environment, subscribedState); + if (updates !== null) { + const [didMissUpdates, updatedState] = updates; + // TODO: didMissUpdates only checks for changes to snapshot data, but it's possible + // that other snapshot properties may have changed that should also trigger a re-render, + // such as changed missing resolver fields, missing client edges, etc. + // A potential alternative is for handleMissedUpdates() to recycle the entire state + // value, and return the new (recycled) state only if there was some change. In that + // case the code would always setState if something in the snapshot changed, in addition + // to using the latest snapshot to subscribe. + if (didMissUpdates) { + setState(updatedState); + } + currentState = updatedState; + } + return subscribeToSnapshot( + environment, + currentState, + setState, + hasPendingStateChanges, + ); + }, [environment, subscribedState]); + + if (hasPendingStateChanges.current) { + const updates = handleMissedUpdates(environment, state); + if (updates != null) { + const [hasStateUpdates, updatedState] = updates; + if (hasStateUpdates) { + setState(updatedState); + state = updatedState; + } + } + // $FlowFixMe[react-rule-unsafe-ref] + hasPendingStateChanges.current = false; + } + + let data: ?SelectorData | Array; + if (isPlural) { + // Plural fragments require allocating an array of the snapshot data values, + // which has to be memoized to avoid triggering downstream re-renders. + // + // Note that isPlural is a constant property of the fragment and does not change + // for a particular useFragment invocation site + const fragmentRefIsNullish = fragmentRef == null; // for less sensitive memoization + // eslint-disable-next-line react-hooks/rules-of-hooks + // $FlowFixMe[react-rule-hook] + data = useMemo(() => { + if (state.kind === 'bailout') { + // Bailout state can happen if the fragmentRef is a plural array that is empty or has no + // non-null entries. In that case, the compatible behavior is to return [] instead of null. + return fragmentRefIsNullish ? null : []; + } else { + invariant( + state.kind === 'plural', + 'Expected state to be plural because fragment is plural', + ); + return state.snapshots.map(s => s.data); + } + }, [state, fragmentRefIsNullish]); + } else if (state.kind === 'bailout') { + // This case doesn't allocate a new object so it doesn't have to be memoized + data = null; + } else { + // This case doesn't allocate a new object so it doesn't have to be memoized + invariant( + state.kind === 'singular', + 'Expected state to be singular because fragment is singular', + ); + data = state.snapshot.data; + } + + if (RelayFeatureFlags.LOG_MISSING_RECORDS_IN_PROD || __DEV__) { + if ( + fragmentRef != null && + (data === undefined || + (Array.isArray(data) && + data.length > 0 && + data.every(d => d === undefined))) + ) { + warning( + false, + 'Relay: Expected to have been able to read non-null data for ' + + 'fragment `%s` declared in ' + + '`%s`, since fragment reference was non-null. ' + + "Make sure that that `%s`'s parent isn't " + + 'holding on to and/or passing a fragment reference for data that ' + + 'has been deleted.', + fragmentNode.name, + hookDisplayName, + hookDisplayName, + ); + } + } + + if (__DEV__) { + // eslint-disable-next-line react-hooks/rules-of-hooks + // $FlowFixMe[react-rule-hook] + useDebugValue({fragment: fragmentNode.name, data}); + } + + return data; +} + +module.exports = useFragmentInternal; diff --git a/packages/react-relay/relay-hooks/useIsMountedRef.js b/packages/react-relay/relay-hooks/useIsMountedRef.js index 8ce8443eb6898..e00a5ee650f09 100644 --- a/packages/react-relay/relay-hooks/useIsMountedRef.js +++ b/packages/react-relay/relay-hooks/useIsMountedRef.js @@ -13,7 +13,7 @@ const {useEffect, useRef} = require('react'); -function useIsMountedRef(): {current: boolean} { +hook useIsMountedRef(): {current: boolean} { const isMountedRef = useRef(true); useEffect(() => { diff --git a/packages/react-relay/relay-hooks/useIsOperationNodeActive.js b/packages/react-relay/relay-hooks/useIsOperationNodeActive.js index 66aff34feec99..0f204996ba797 100644 --- a/packages/react-relay/relay-hooks/useIsOperationNodeActive.js +++ b/packages/react-relay/relay-hooks/useIsOperationNodeActive.js @@ -23,7 +23,7 @@ const { const {useEffect, useState, useMemo} = React; -function useIsOperationNodeActive( +hook useIsOperationNodeActive( fragmentNode: ReaderFragment, fragmentRef: mixed, ): boolean { diff --git a/packages/react-relay/relay-hooks/useIsParentQueryActive.js b/packages/react-relay/relay-hooks/useIsParentQueryActive.js index fb97fe3a693f5..080598b7998db 100644 --- a/packages/react-relay/relay-hooks/useIsParentQueryActive.js +++ b/packages/react-relay/relay-hooks/useIsParentQueryActive.js @@ -17,9 +17,12 @@ const useIsOperationNodeActive = require('./useIsOperationNodeActive'); const useStaticFragmentNodeWarning = require('./useStaticFragmentNodeWarning'); const {getFragment} = require('relay-runtime'); -function useIsParentQueryActive< +hook useIsParentQueryActive< TKey: ?{+$data?: mixed, +$fragmentSpreads: FragmentType, ...}, ->(fragmentInput: GraphQLTaggedNode, fragmentRef: TKey): boolean { +>( + fragmentInput: GraphQLTaggedNode, + fragmentRef: TKey, +): boolean { const fragmentNode = getFragment(fragmentInput); useStaticFragmentNodeWarning( fragmentNode, diff --git a/packages/react-relay/relay-hooks/useLazyLoadQuery.js b/packages/react-relay/relay-hooks/useLazyLoadQuery.js index a699c65bf1440..8f754ae528bf0 100644 --- a/packages/react-relay/relay-hooks/useLazyLoadQuery.js +++ b/packages/react-relay/relay-hooks/useLazyLoadQuery.js @@ -30,7 +30,7 @@ const { // This separate type export is only needed as long as we are injecting // a separate hooks implementation in ./HooksImplementation -- it can // be removed after we stop doing that. -export type UseLazyLoadQueryHookType = ( +export type UseLazyLoadQueryHookType = hook ( gqlQuery: Query, variables: TVariables, options?: { @@ -41,7 +41,7 @@ export type UseLazyLoadQueryHookType = ( }, ) => TData; -function useLazyLoadQuery( +hook useLazyLoadQuery( gqlQuery: Query, variables: TVariables, options?: { @@ -75,4 +75,5 @@ function useLazyLoadQuery( return data; } +// $FlowFixMe[react-rule-hook-incompatible] module.exports = (useLazyLoadQuery: UseLazyLoadQueryHookType); diff --git a/packages/react-relay/relay-hooks/useLazyLoadQueryNode.js b/packages/react-relay/relay-hooks/useLazyLoadQueryNode.js index 71f15bdb178b2..761437071e424 100644 --- a/packages/react-relay/relay-hooks/useLazyLoadQueryNode.js +++ b/packages/react-relay/relay-hooks/useLazyLoadQueryNode.js @@ -26,13 +26,13 @@ const { getQueryResourceForEnvironment, } = require('./QueryResource'); const useFetchTrackingRef = require('./useFetchTrackingRef'); -const useFragmentNode = require('./useFragmentNode'); +const useFragmentInternal = require('./useFragmentInternal'); const useRelayEnvironment = require('./useRelayEnvironment'); const React = require('react'); const {useContext, useEffect, useState, useRef} = React; -function useLazyLoadQueryNode({ +hook useLazyLoadQueryNode({ query, componentDisplayName, fetchObservable, @@ -125,7 +125,7 @@ function useLazyLoadQueryNode({ }); const {fragmentNode, fragmentRef} = preparedQueryResult; - const {data} = useFragmentNode<$FlowFixMe>( + const data = useFragmentInternal( fragmentNode, fragmentRef, componentDisplayName, diff --git a/packages/react-relay/relay-hooks/useLoadMoreFunction.js b/packages/react-relay/relay-hooks/useLoadMoreFunction.js index 416aca20fba98..8c87e52222ed6 100644 --- a/packages/react-relay/relay-hooks/useLoadMoreFunction.js +++ b/packages/react-relay/relay-hooks/useLoadMoreFunction.js @@ -33,6 +33,7 @@ const { ConnectionInterface, createOperationDescriptor, getPaginationVariables, + getRefetchMetadata, getSelector, getValueAtPath, } = require('relay-runtime'); @@ -42,7 +43,7 @@ export type LoadMoreFn = ( count: number, options?: { onComplete?: (Error | null) => void, - UNSTABLE_extraVariables?: $Shape, + UNSTABLE_extraVariables?: Partial, }, ) => Disposable; @@ -53,7 +54,6 @@ export type UseLoadMoreFunctionArgs = { fragmentIdentifier: string, fragmentData: mixed, connectionPathInFragmentData: $ReadOnlyArray, - identifierField: ?string, paginationRequest: ConcreteRequest, paginationMetadata: ReaderPaginationMetadata, componentDisplayName: string, @@ -61,7 +61,7 @@ export type UseLoadMoreFunctionArgs = { onReset: () => void, }; -function useLoadMoreFunction( +hook useLoadMoreFunction( args: UseLoadMoreFunctionArgs, ): [LoadMoreFn, boolean, () => void] { const { @@ -76,17 +76,22 @@ function useLoadMoreFunction( componentDisplayName, observer, onReset, - identifierField, } = args; const environment = useRelayEnvironment(); const {isFetchingRef, startFetch, disposeFetch, completeFetch} = useFetchTrackingRef(); + + const {identifierInfo} = getRefetchMetadata( + fragmentNode, + componentDisplayName, + ); const identifierValue = - identifierField != null && + identifierInfo?.identifierField != null && fragmentData != null && typeof fragmentData === 'object' - ? fragmentData[identifierField] + ? fragmentData[identifierInfo.identifierField] : null; + const isMountedRef = useIsMountedRef(); const [mirroredEnvironment, setMirroredEnvironment] = useState(environment); const [mirroredFragmentIdentifier, setMirroredFragmentIdentifier] = @@ -125,7 +130,7 @@ function useLoadMoreFunction( ( count: number, options: void | { - UNSTABLE_extraVariables?: $Shape, + UNSTABLE_extraVariables?: Partial, onComplete?: (Error | null) => void, }, ) => { @@ -201,7 +206,7 @@ function useLoadMoreFunction( // If the query needs an identifier value ('id' or similar) and one // was not explicitly provided, read it from the fragment data. - if (identifierField != null) { + if (identifierInfo != null) { // @refetchable fragments are guaranteed to have an `id` selection // if the type is Node, implements Node, or is @fetchable. Double-check // that there actually is a value at runtime. @@ -210,11 +215,12 @@ function useLoadMoreFunction( false, 'Relay: Expected result to have a string ' + '`%s` in order to refetch, got `%s`.', - identifierField, + identifierInfo.identifierField, identifierValue, ); } - paginationVariables.id = identifierValue; + paginationVariables[identifierInfo.identifierQueryVariableName] = + identifierValue; } const paginationQuery = createOperationDescriptor( diff --git a/packages/react-relay/relay-hooks/useMemoOperationDescriptor.js b/packages/react-relay/relay-hooks/useMemoOperationDescriptor.js index ff9a98e0d8d84..1ffe171d67a95 100644 --- a/packages/react-relay/relay-hooks/useMemoOperationDescriptor.js +++ b/packages/react-relay/relay-hooks/useMemoOperationDescriptor.js @@ -24,7 +24,7 @@ const {createOperationDescriptor, getRequest} = require('relay-runtime'); const {useMemo} = React; -function useMemoOperationDescriptor( +hook useMemoOperationDescriptor( gqlQuery: GraphQLTaggedNode, variables: Variables, cacheConfig?: ?CacheConfig, diff --git a/packages/react-relay/relay-hooks/useMemoVariables.js b/packages/react-relay/relay-hooks/useMemoVariables.js index db4f80522580c..85fc54a7b6650 100644 --- a/packages/react-relay/relay-hooks/useMemoVariables.js +++ b/packages/react-relay/relay-hooks/useMemoVariables.js @@ -21,7 +21,7 @@ const {useState} = require('react'); * This is useful when a `variables` object is used as a value in a depencency * array as it might often be constructed during render. */ -function useMemoVariables( +hook useMemoVariables( variables: TVariables, ): TVariables { const [mirroredVariables, setMirroredVariables] = useState(variables); diff --git a/packages/react-relay/relay-hooks/useMutation.js b/packages/react-relay/relay-hooks/useMutation.js index dd5ccc8e1397b..c4031929abf4b 100644 --- a/packages/react-relay/relay-hooks/useMutation.js +++ b/packages/react-relay/relay-hooks/useMutation.js @@ -12,15 +12,16 @@ 'use strict'; import type { + CommitMutationConfig, DeclarativeMutationConfig, Disposable, - GraphQLTaggedNode, IEnvironment, - MutationConfig, + Mutation, MutationParameters, PayloadError, SelectorStoreUpdater, UploadableMap, + Variables, } from 'relay-runtime'; const useIsMountedRef = require('./useIsMountedRef'); @@ -50,13 +51,29 @@ export type UseMutationConfig = { variables: TMutation['variables'], }; -function useMutation( - mutation: GraphQLTaggedNode, +type UseMutationConfigInternal = { + configs?: Array, + onError?: ?(error: Error) => void, + onCompleted?: ?(response: TData, errors: ?Array) => void, + onNext?: ?() => void, + onUnsubscribe?: ?() => void, + optimisticResponse?: TRawResponse, + optimisticUpdater?: ?SelectorStoreUpdater, + updater?: ?SelectorStoreUpdater, + uploadables?: UploadableMap, + variables: TVariables, +}; + +hook useMutation( + mutation: Mutation, commitMutationFn?: ( environment: IEnvironment, - config: MutationConfig, + config: CommitMutationConfig, ) => Disposable = defaultCommitMutation, -): [(UseMutationConfig) => Disposable, boolean] { +): [ + (UseMutationConfigInternal) => Disposable, + boolean, +] { const environment = useRelayEnvironment(); const isMountedRef = useIsMountedRef(); const environmentRef = useRef(environment); @@ -94,18 +111,18 @@ function useMutation( }, [environment, isMountedRef, mutation]); const commit = useCallback( - (config: UseMutationConfig) => { + (config: UseMutationConfigInternal) => { if (isMountedRef.current) { setMutationInFlight(true); } const disposable: Disposable = commitMutationFn(environment, { ...config, mutation, - onCompleted: (response, errors) => { + onCompleted: (response: TData, errors: ?Array) => { cleanup(disposable); config.onCompleted?.(response, errors); }, - onError: error => { + onError: (error: Error) => { cleanup(disposable); config.onError?.(error); }, diff --git a/packages/react-relay/relay-hooks/usePaginationFragment.js b/packages/react-relay/relay-hooks/usePaginationFragment.js index 748dfb55fc892..6e3fd2fb13ff7 100644 --- a/packages/react-relay/relay-hooks/usePaginationFragment.js +++ b/packages/react-relay/relay-hooks/usePaginationFragment.js @@ -12,9 +12,9 @@ 'use strict'; import type {LoadMoreFn, UseLoadMoreFunctionArgs} from './useLoadMoreFunction'; -import type {RefetchFn} from './useRefetchableFragment'; -import type {Options} from './useRefetchableFragmentNode'; +import type {Options} from './useRefetchableFragmentInternal'; import type { + Disposable, FragmentType, GraphQLResponse, Observer, @@ -22,9 +22,9 @@ import type { Variables, } from 'relay-runtime'; -const HooksImplementation = require('./HooksImplementation'); const useLoadMoreFunction = require('./useLoadMoreFunction'); -const useRefetchableFragmentNode = require('./useRefetchableFragmentNode'); +const useRefetchableFragmentInternal = require('./useRefetchableFragmentInternal'); +const useRelayEnvironment = require('./useRelayEnvironment'); const useStaticFragmentNodeWarning = require('./useStaticFragmentNodeWarning'); const {useCallback, useDebugValue, useState} = require('react'); const { @@ -33,6 +33,40 @@ const { getPaginationMetadata, } = require('relay-runtime'); +type RefetchVariables = + // NOTE: This type ensures that the type of the returned variables is either: + // - nullable if the provided ref type is nullable + // - non-nullable if the provided ref type is non-nullable + [+key: TKey] extends [+key: {+$fragmentSpreads: mixed, ...}] + ? Partial + : TVariables; + +type RefetchFnBase = ( + vars: TVars, + options?: TOptions, +) => Disposable; + +type RefetchFn = RefetchFnBase< + RefetchVariables, + TOptions, +>; + +export type ReturnType = { + // NOTE: This type ensures that the type of the returned data is either: + // - nullable if the provided ref type is nullable + // - non-nullable if the provided ref type is non-nullable + data: [+key: TKey] extends [+key: {+$fragmentSpreads: mixed, ...}] + ? TData + : ?TData, + loadNext: LoadMoreFn, + loadPrevious: LoadMoreFn, + hasNext: boolean, + hasPrevious: boolean, + isLoadingNext: boolean, + isLoadingPrevious: boolean, + refetch: RefetchFn, +}; + // This separate type export is only needed as long as we are injecting // a separate hooks implementation in ./HooksImplementation -- it can // be removed after we stop doing that. @@ -46,7 +80,7 @@ export type UsePaginationFragmentType = < parentFragmentRef: TKey, ) => ReturnType; -function usePaginationFragment_LEGACY< +hook usePaginationFragment< TFragmentType: FragmentType, TVariables: Variables, TData, @@ -62,16 +96,12 @@ function usePaginationFragment_LEGACY< ); const componentDisplayName = 'usePaginationFragment()'; - const { - connectionPathInFragmentData, - paginationRequest, - paginationMetadata, - identifierField, - } = getPaginationMetadata(fragmentNode, componentDisplayName); + const {connectionPathInFragmentData, paginationRequest, paginationMetadata} = + getPaginationMetadata(fragmentNode, componentDisplayName); - const {fragmentData, fragmentRef, refetch} = useRefetchableFragmentNode< - $FlowFixMe, - $FlowFixMe, + const {fragmentData, fragmentRef, refetch} = useRefetchableFragmentInternal< + {variables: TVariables, response: TData}, + {data?: TData}, >(fragmentNode, parentFragmentRef, componentDisplayName); const fragmentIdentifier = getFragmentIdentifier(fragmentNode, fragmentRef); @@ -85,7 +115,6 @@ function usePaginationFragment_LEGACY< fragmentIdentifier, fragmentNode, fragmentRef, - identifierField, paginationMetadata, paginationRequest, }); @@ -100,12 +129,11 @@ function usePaginationFragment_LEGACY< fragmentIdentifier, fragmentNode, fragmentRef, - identifierField, paginationMetadata, paginationRequest, }); - const refetchPagination: RefetchFn = useCallback( + const refetchPagination = useCallback( (variables: TVariables, options: void | Options) => { disposeFetchNext(); disposeFetchPrevious(); @@ -116,6 +144,7 @@ function usePaginationFragment_LEGACY< if (__DEV__) { // eslint-disable-next-line react-hooks/rules-of-hooks + // $FlowFixMe[react-rule-hook] useDebugValue({ fragment: fragmentNode.name, data: fragmentData, @@ -126,7 +155,8 @@ function usePaginationFragment_LEGACY< }); } return { - data: (fragmentData: $FlowFixMe), + // $FlowFixMe[incompatible-return] + data: fragmentData, loadNext, loadPrevious, hasNext, @@ -137,7 +167,7 @@ function usePaginationFragment_LEGACY< }; } -function useLoadMore( +hook useLoadMore( args: $Diff< UseLoadMoreFunctionArgs, { @@ -147,7 +177,21 @@ function useLoadMore( }, >, ): [LoadMoreFn, boolean, boolean, () => void] { - const [isLoadingMore, setIsLoadingMore] = useState(false); + const environment = useRelayEnvironment(); + const [isLoadingMore, reallySetIsLoadingMore] = useState(false); + // Schedule this update since it must be observed by components at the same + // batch as when hasNext changes. hasNext is read from the store and store + // updates are scheduled, so this must be scheduled too. + const setIsLoadingMore = (value: boolean) => { + const schedule = environment.getScheduler()?.schedule; + if (schedule) { + schedule(() => { + reallySetIsLoadingMore(value); + }); + } else { + reallySetIsLoadingMore(value); + } + }; const observer = { start: () => setIsLoadingMore(true), complete: () => setIsLoadingMore(false), @@ -162,44 +206,4 @@ function useLoadMore( return [loadMore, hasMore, isLoadingMore, disposeFetch]; } -export type ReturnType = { - // NOTE: This $Call ensures that the type of the returned data is either: - // - nullable if the provided ref type is nullable - // - non-nullable if the provided ref type is non-nullable - // prettier-ignore - data: $Call< - & (( { +$fragmentSpreads: TFragmentType, ... }) => TData) - & ((?{ +$fragmentSpreads: TFragmentType, ... }) => ?TData), - TKey, - >, - loadNext: LoadMoreFn, - loadPrevious: LoadMoreFn, - hasNext: boolean, - hasPrevious: boolean, - isLoadingNext: boolean, - isLoadingPrevious: boolean, - refetch: RefetchFn, -}; - -function usePaginationFragment< - TFragmentType: FragmentType, - TVariables: Variables, - TData, - TKey: ?{+$fragmentSpreads: TFragmentType, ...}, ->( - fragmentInput: RefetchableFragment, - parentFragmentRef: TKey, -): ReturnType { - const impl = HooksImplementation.get(); - if (impl) { - return impl.usePaginationFragment( - fragmentInput, - parentFragmentRef, - ); - } else { - // eslint-disable-next-line react-hooks/rules-of-hooks - return usePaginationFragment_LEGACY(fragmentInput, parentFragmentRef); - } -} - module.exports = usePaginationFragment; diff --git a/packages/react-relay/relay-hooks/usePreloadedQuery.js b/packages/react-relay/relay-hooks/usePreloadedQuery.js index ace1414195c49..346079604171b 100644 --- a/packages/react-relay/relay-hooks/usePreloadedQuery.js +++ b/packages/react-relay/relay-hooks/usePreloadedQuery.js @@ -52,7 +52,7 @@ type PreloadedQuery< TEnvironmentProviderOptions, >; -function usePreloadedQuery< +hook usePreloadedQuery< TVariables: Variables, TData, TRawResponse: ?{...} = void, @@ -164,6 +164,7 @@ function usePreloadedQuery< if (__DEV__) { // eslint-disable-next-line react-hooks/rules-of-hooks + // $FlowFixMe[react-rule-hook] useDebugValue({ query: preloadedQuery.name, variables: preloadedQuery.variables, diff --git a/packages/react-relay/relay-hooks/useQueryLoader.js b/packages/react-relay/relay-hooks/useQueryLoader.js index 9d49dc2d9bacd..8c595e1a68c80 100644 --- a/packages/react-relay/relay-hooks/useQueryLoader.js +++ b/packages/react-relay/relay-hooks/useQueryLoader.js @@ -82,9 +82,6 @@ type UseQueryLoaderHookReturnType< () => void, ]; -type ExtractVariablesType = ({+variables: T, ...}) => T; -type ExtractResponseType = ({+response: T, ...}) => T; - declare function useQueryLoader< TVariables: Variables, TData, @@ -109,16 +106,9 @@ declare function useQueryLoader< declare function useQueryLoader( preloadableRequest: PreloadableConcreteRequest, initialQueryReference?: ?PreloadedQuery, -): UseQueryLoaderHookReturnType< - $Call, - $Call, ->; +): UseQueryLoaderHookReturnType; -function useQueryLoader< - TVariables: Variables, - TData, - TRawResponse: ?{...} = void, ->( +hook useQueryLoader( preloadableRequest: Query, initialQueryReference?: ?PreloadedQuery<{ response: TData, @@ -178,6 +168,7 @@ function useQueryLoader< // necessary here // TODO(T78446637): Handle disposal of managed query references in // components that were never mounted after rendering + // $FlowFixMe[react-rule-unsafe-ref] undisposedQueryReferencesRef.current.add(initialQueryReferenceInternal); setPreviousInitialQueryReference(initialQueryReferenceInternal); setQueryReference(initialQueryReferenceInternal); @@ -201,7 +192,7 @@ function useQueryLoader< } : options; if (isMountedRef.current) { - const updatedQueryReference = loadQuery( + const updatedQueryReference = loadQuery( options?.__environment ?? environment, preloadableRequest, variables, diff --git a/packages/react-relay/relay-hooks/useRefetchableFragment.js b/packages/react-relay/relay-hooks/useRefetchableFragment.js index 731d30389d702..69c99c39d78da 100644 --- a/packages/react-relay/relay-hooks/useRefetchableFragment.js +++ b/packages/react-relay/relay-hooks/useRefetchableFragment.js @@ -11,7 +11,7 @@ 'use strict'; -import type {Options} from './useRefetchableFragmentNode'; +import type {Options} from './useRefetchableFragmentInternal'; import type { Disposable, FragmentType, @@ -19,22 +19,18 @@ import type { Variables, } from 'relay-runtime'; -const HooksImplementation = require('./HooksImplementation'); -const useRefetchableFragmentNode = require('./useRefetchableFragmentNode'); +const useRefetchableFragmentInternal = require('./useRefetchableFragmentInternal'); const useStaticFragmentNodeWarning = require('./useStaticFragmentNodeWarning'); const {useDebugValue} = require('react'); const {getFragment} = require('relay-runtime'); -type RefetchVariables = - // NOTE: This $Call ensures that the type of the returned variables is either: +type RefetchVariables = + // NOTE: This type ensures that the type of the returned variables is either: // - nullable if the provided ref type is nullable // - non-nullable if the provided ref type is non-nullable - // prettier-ignore - $Call< - & (( { +$fragmentSpreads: TFragmentType, ... }) => $Shape) - & ((?{ +$fragmentSpreads: TFragmentType, ... }) => TVariables), - TKey, - >; + [+key: TKey] extends [+key: {+$fragmentSpreads: mixed, ...}] + ? Partial + : TVariables; type RefetchFnBase = ( vars: TVars, @@ -46,16 +42,15 @@ export type RefetchFn = RefetchFnBase< TOptions, >; -type ReturnType = [ - // NOTE: This $Call ensures that the type of the returned data is either: +export type ReturnType< + TVariables, + TData, + TKey: ?{+$fragmentSpreads: mixed, ...}, +> = [ + // NOTE: This type ensures that the type of the returned data is either: // - nullable if the provided ref type is nullable // - non-nullable if the provided ref type is non-nullable - // prettier-ignore - $Call< - & (( { +$fragmentSpreads: TFragmentType, ... }) => TData) - & ((?{ +$fragmentSpreads: TFragmentType, ... }) => ?TData), - TKey, - >, + [+key: TKey] extends [+key: {+$fragmentSpreads: mixed, ...}] ? TData : ?TData, RefetchFn, ]; @@ -69,7 +64,7 @@ export type UseRefetchableFragmentType = < key: TKey, ) => ReturnType; -function useRefetchableFragment_LEGACY< +hook useRefetchableFragment< TFragmentType: FragmentType, TVariables: Variables, TData, @@ -83,45 +78,18 @@ function useRefetchableFragment_LEGACY< fragmentNode, 'first argument of useRefetchableFragment()', ); - const {fragmentData, refetch} = useRefetchableFragmentNode< - { - response: TData, - variables: TVariables, - }, - { - +$data: mixed, - ... - }, + const {fragmentData, refetch} = useRefetchableFragmentInternal< + {variables: TVariables, response: TData}, + {data?: TData}, >(fragmentNode, fragmentRef, 'useRefetchableFragment()'); if (__DEV__) { // eslint-disable-next-line react-hooks/rules-of-hooks + // $FlowFixMe[react-rule-hook] useDebugValue({fragment: fragmentNode.name, data: fragmentData}); } - // $FlowFixMe[incompatible-return] // $FlowFixMe[prop-missing] return [fragmentData, refetch]; } -function useRefetchableFragment< - TFragmentType: FragmentType, - TVariables: Variables, - TData, - TKey: ?{+$fragmentSpreads: TFragmentType, ...}, ->( - fragmentInput: RefetchableFragment, - parentFragmentRef: TKey, -): ReturnType { - const impl = HooksImplementation.get(); - if (impl) { - return impl.useRefetchableFragment( - fragmentInput, - parentFragmentRef, - ); - } else { - // eslint-disable-next-line react-hooks/rules-of-hooks - return useRefetchableFragment_LEGACY(fragmentInput, parentFragmentRef); - } -} - module.exports = useRefetchableFragment; diff --git a/packages/react-relay/relay-hooks/useRefetchableFragmentInternal.js b/packages/react-relay/relay-hooks/useRefetchableFragmentInternal.js new file mode 100644 index 0000000000000..f82dcc25d1b0f --- /dev/null +++ b/packages/react-relay/relay-hooks/useRefetchableFragmentInternal.js @@ -0,0 +1,608 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; + +import type {LoaderFn} from './useQueryLoader'; +import type { + ConcreteRequest, + Disposable, + FetchPolicy, + IEnvironment, + OperationDescriptor, + OperationType, + ReaderFragment, + RefetchableIdentifierInfo, + RenderPolicy, + Variables, + VariablesOf, +} from 'relay-runtime'; + +const ProfilerContext = require('./ProfilerContext'); +const {getQueryResourceForEnvironment} = require('./QueryResource'); +const readFragmentInternal = require('./readFragmentInternal'); +const useFragmentInternal = require('./useFragmentInternal'); +const useIsMountedRef = require('./useIsMountedRef'); +const useQueryLoader = require('./useQueryLoader'); +const useRelayEnvironment = require('./useRelayEnvironment'); +const invariant = require('invariant'); +const {useCallback, useContext, useReducer} = require('react'); +const { + __internal: {fetchQuery}, + createOperationDescriptor, + getFragmentIdentifier, + getRefetchMetadata, + getSelector, + getValueAtPath, +} = require('relay-runtime'); +const warning = require('warning'); + +export type RefetchFn< + TQuery: OperationType, + TOptions = Options, +> = RefetchFnExact; + +// NOTE: RefetchFnDynamic returns a refetch function that: +// - Expects the /exact/ set of query variables if the provided key type is +// /nullable/. +// - Or, expects /a subset/ of the query variables if the provided key type is +// /non-null/. +export type RefetchFnDynamic< + TQuery: OperationType, + TKey: ?{+$data?: mixed, ...}, + TOptions = Options, +> = [TKey] extends [{+$data?: mixed, ...}] + ? RefetchFnInexact + : RefetchFnExact; + +export type ReturnType< + TQuery: OperationType, + TKey: ?{+$data?: mixed, ...}, + TOptions = Options, +> = { + fragmentData: mixed, + fragmentRef: mixed, + refetch: RefetchFnDynamic, +}; + +export type Options = { + fetchPolicy?: FetchPolicy, + onComplete?: (Error | null) => void, + UNSTABLE_renderPolicy?: RenderPolicy, +}; + +type InternalOptions = { + ...Options, + __environment?: IEnvironment, +}; + +type RefetchFnBase = ( + vars: TVars, + options?: TOptions, +) => Disposable; + +type RefetchFnExact = RefetchFnBase< + VariablesOf, + TOptions, +>; +type RefetchFnInexact< + TQuery: OperationType, + TOptions = Options, +> = RefetchFnBase<$ReadOnly>>, TOptions>; + +type Action = + | { + type: 'reset', + environment: IEnvironment, + fragmentIdentifier: string, + } + | { + type: 'refetch', + refetchQuery: OperationDescriptor, + fetchPolicy?: FetchPolicy, + renderPolicy?: RenderPolicy, + onComplete?: (Error | null) => void, + refetchEnvironment: ?IEnvironment, + }; + +type RefetchState = { + fetchPolicy: FetchPolicy | void, + mirroredEnvironment: IEnvironment, + mirroredFragmentIdentifier: string, + onComplete: ((Error | null) => void) | void, + refetchEnvironment?: ?IEnvironment, + refetchQuery: OperationDescriptor | null, + renderPolicy: RenderPolicy | void, +}; + +type DebugIDandTypename = { + id: string, + typename: string, + ... +}; + +function reducer(state: RefetchState, action: Action): RefetchState { + switch (action.type) { + case 'refetch': { + return { + ...state, + fetchPolicy: action.fetchPolicy, + mirroredEnvironment: + action.refetchEnvironment ?? state.mirroredEnvironment, + onComplete: action.onComplete, + refetchEnvironment: action.refetchEnvironment, + refetchQuery: action.refetchQuery, + renderPolicy: action.renderPolicy, + }; + } + case 'reset': { + return { + fetchPolicy: undefined, + mirroredEnvironment: action.environment, + mirroredFragmentIdentifier: action.fragmentIdentifier, + onComplete: undefined, + refetchQuery: null, + renderPolicy: undefined, + }; + } + default: { + (action.type: empty); + throw new Error('useRefetchableFragmentNode: Unexpected action type'); + } + } +} + +hook useRefetchableFragmentNode< + TQuery: OperationType, + TKey: ?{+$data?: mixed, ...}, +>( + fragmentNode: ReaderFragment, + parentFragmentRef: mixed, + componentDisplayName: string, +): ReturnType { + const parentEnvironment = useRelayEnvironment(); + const {refetchableRequest, fragmentRefPathInResponse, identifierInfo} = + getRefetchMetadata(fragmentNode, componentDisplayName); + const fragmentIdentifier = getFragmentIdentifier( + fragmentNode, + parentFragmentRef, + ); + + const [refetchState, dispatch] = useReducer(reducer, { + fetchPolicy: undefined, + mirroredEnvironment: parentEnvironment, + mirroredFragmentIdentifier: fragmentIdentifier, + onComplete: undefined, + refetchEnvironment: null, + refetchQuery: null, + renderPolicy: undefined, + }); + const { + fetchPolicy, + mirroredEnvironment, + mirroredFragmentIdentifier, + onComplete, + refetchEnvironment, + refetchQuery, + renderPolicy, + } = refetchState; + const environment = refetchEnvironment ?? parentEnvironment; + + const QueryResource = getQueryResourceForEnvironment(environment); + const profilerContext = useContext(ProfilerContext); + + const shouldReset = + environment !== mirroredEnvironment || + fragmentIdentifier !== mirroredFragmentIdentifier; + const [queryRef, loadQuery, disposeQuery] = useQueryLoader< + TQuery['variables'], + TQuery['response'], + TQuery['rawResponse'], + >((refetchableRequest: $FlowFixMe)); + + let fragmentRef = parentFragmentRef; + if (shouldReset) { + dispatch({ + type: 'reset', + environment, + fragmentIdentifier, + }); + disposeQuery(); + } else if (refetchQuery != null && queryRef != null) { + // If refetch was called, we expect to have a refetchQuery and queryRef + // in state, since both state updates to set the refetchQuery and the + // queryRef occur simultaneously. + // In this case, we need to read the refetched query data (potentially + // suspending if it's in flight), and extract the new fragment ref + // from the query in order read the current @refetchable fragment + // with the updated fragment owner as the new refetchQuery. + + // Before observing the refetch, record the current ID and typename + // so that, if we are refetching existing data on + // a field that implements Node, after refetching we + // can validate that the received data is consistent + let debugPreviousIDAndTypename: ?DebugIDandTypename; + if (__DEV__) { + debugPreviousIDAndTypename = debugFunctions.getInitialIDAndType( + refetchQuery.request.variables, + fragmentRefPathInResponse, + identifierInfo?.identifierQueryVariableName, + environment, + ); + } + + const handleQueryCompleted = (maybeError: void | Error) => { + onComplete && onComplete(maybeError ?? null); + }; + + // The queryRef.source obtained from useQueryLoader will be + // an observable we can consume /if/ a network request was + // started. Otherwise, given that QueryResource.prepare + // always expects an observable we fall back to a new network + // observable. Note however that if loadQuery did not make a network + // request, we don't expect to make one here, unless the state of + // the cache has changed between the call to refetch and this + // render. + const fetchObservable = + queryRef.source != null + ? queryRef.source + : fetchQuery(environment, refetchQuery); + + // Now wwe can we read the refetch query here using the + // queryRef provided from useQueryLoader. Note that the + // network request is started during the call to refetch, + // but if the refetch query is still in flight, we will suspend + // at this point: + const queryResult = profilerContext.wrapPrepareQueryResource(() => { + return QueryResource.prepare( + refetchQuery, + fetchObservable, + fetchPolicy, + renderPolicy, + { + error: handleQueryCompleted, + complete: () => { + // Validate that the type of the object we got back matches the type + // of the object already in the store + if (__DEV__) { + debugFunctions.checkSameTypeAfterRefetch( + debugPreviousIDAndTypename, + environment, + fragmentNode, + componentDisplayName, + ); + } + handleQueryCompleted(); + }, + }, + queryRef.fetchKey, + profilerContext, + ); + }); + + const queryData = readFragmentInternal( + environment, + queryResult.fragmentNode, + queryResult.fragmentRef, + componentDisplayName, + ).data; + invariant( + queryData != null, + 'Relay: Expected to be able to read refetch query response. ' + + "If you're seeing this, this is likely a bug in Relay.", + ); + + // After reading/fetching the refetch query, we extract from the + // refetch query response the new fragment ref we need to use to read + // the fragment. The new fragment ref will point to the refetch query + // as its fragment owner. + const refetchedFragmentRef = getValueAtPath( + queryData, + fragmentRefPathInResponse, + ); + fragmentRef = refetchedFragmentRef; + + if (__DEV__) { + // Validate that the id of the object we got back matches the id + // we queried for in the variables. + // We do this during render instead of onComplete to make sure we are + // only validating the most recent refetch. + debugFunctions.checkSameIDAfterRefetch( + debugPreviousIDAndTypename, + fragmentRef, + fragmentNode, + componentDisplayName, + ); + } + } + + // We read and subscribe to the fragment using useFragmentNode. + // If refetch was called, we read the fragment using the new computed + // fragment ref from the refetch query response; otherwise, we use the + // fragment ref passed by the caller as normal. + const fragmentData = useFragmentInternal( + fragmentNode, + fragmentRef, + componentDisplayName, + ); + + const refetch = useRefetchFunction( + componentDisplayName, + dispatch, + disposeQuery, + fragmentData, + fragmentIdentifier, + fragmentNode, + fragmentRefPathInResponse, + identifierInfo, + loadQuery, + parentFragmentRef, + refetchableRequest, + ); + return { + fragmentData, + fragmentRef, + // $FlowFixMe[incompatible-return] RefetchFn not compatible with RefetchFnDynamic + refetch, + }; +} + +hook useRefetchFunction( + componentDisplayName: string, + dispatch: ( + | { + environment: IEnvironment, + fragmentIdentifier: string, + type: 'reset', + } + | { + fetchPolicy?: FetchPolicy, + onComplete?: (Error | null) => void, + refetchEnvironment: ?IEnvironment, + refetchQuery: OperationDescriptor, + renderPolicy?: RenderPolicy, + type: 'refetch', + }, + ) => void, + disposeQuery: () => void, + fragmentData: mixed, + fragmentIdentifier: string, + fragmentNode: ReaderFragment, + fragmentRefPathInResponse: $ReadOnlyArray, + identifierInfo: ?RefetchableIdentifierInfo, + loadQuery: LoaderFn, + parentFragmentRef: mixed, + refetchableRequest: ConcreteRequest, +): RefetchFn { + const isMountedRef = useIsMountedRef(); + const identifierValue = + identifierInfo?.identifierField != null && + fragmentData != null && + typeof fragmentData === 'object' + ? fragmentData[identifierInfo.identifierField] + : null; + return useCallback( + ( + providedRefetchVariables: VariablesOf, + options: void | InternalOptions, + ) => { + // Bail out and warn if we're trying to refetch after the component + // has unmounted + if (isMountedRef.current !== true) { + warning( + false, + 'Relay: Unexpected call to `refetch` on unmounted component for fragment ' + + '`%s` in `%s`. It looks like some instances of your component are ' + + 'still trying to fetch data but they already unmounted. ' + + 'Please make sure you clear all timers, intervals, ' + + 'async calls, etc that may trigger a fetch.', + fragmentNode.name, + componentDisplayName, + ); + return {dispose: () => {}}; + } + if (parentFragmentRef == null) { + warning( + false, + 'Relay: Unexpected call to `refetch` while using a null fragment ref ' + + 'for fragment `%s` in `%s`. When calling `refetch`, we expect ' + + "initial fragment data to be non-null. Please make sure you're " + + 'passing a valid fragment ref to `%s` before calling ' + + '`refetch`, or make sure you pass all required variables to `refetch`.', + fragmentNode.name, + componentDisplayName, + componentDisplayName, + ); + } + + const refetchEnvironment = options?.__environment; + const fetchPolicy = options?.fetchPolicy; + const renderPolicy = options?.UNSTABLE_renderPolicy; + const onComplete = options?.onComplete; + const fragmentSelector = getSelector(fragmentNode, parentFragmentRef); + let parentVariables: Variables; + let fragmentVariables: Variables; + if (fragmentSelector == null) { + parentVariables = {}; + fragmentVariables = {}; + } else if (fragmentSelector.kind === 'PluralReaderSelector') { + parentVariables = fragmentSelector.selectors[0]?.owner.variables ?? {}; + fragmentVariables = fragmentSelector.selectors[0]?.variables ?? {}; + } else { + parentVariables = fragmentSelector.owner.variables; + fragmentVariables = fragmentSelector.variables; + } + + // A user of `useRefetchableFragment()` may pass a subset of + // all variables required by the fragment when calling `refetch()`. + // We fill in any variables not passed by the call to `refetch()` with the + // variables from the original parent fragment owner. + const refetchVariables: VariablesOf = { + ...(parentVariables: $FlowFixMe), + ...fragmentVariables, + ...providedRefetchVariables, + }; + + // If the query needs an identifier value ('id' or similar) and one + // was not explicitly provided, read it from the fragment data. + if ( + identifierInfo != null && + !providedRefetchVariables.hasOwnProperty( + identifierInfo.identifierQueryVariableName, + ) + ) { + // @refetchable fragments are guaranteed to have an `id` selection + // if the type is Node, implements Node, or is @fetchable. Double-check + // that there actually is a value at runtime. + if (typeof identifierValue !== 'string') { + warning( + false, + 'Relay: Expected result to have a string ' + + '`%s` in order to refetch, got `%s`.', + identifierInfo.identifierField, + identifierValue, + ); + } + (refetchVariables: $FlowFixMe)[ + identifierInfo.identifierQueryVariableName + ] = identifierValue; + } + + const refetchQuery = createOperationDescriptor( + refetchableRequest, + refetchVariables, + { + force: true, + }, + ); + + // We call loadQuery which will start a network request if necessary + // and update the queryRef from useQueryLoader. + // Note the following: + // - loadQuery will dispose of any previously refetched queries. + // - We use the variables extracted off the OperationDescriptor + // so that they have been filtered out to include only the + // variables actually declared in the query. + loadQuery(refetchQuery.request.variables, { + fetchPolicy, + __environment: refetchEnvironment, + __nameForWarning: 'refetch', + }); + + dispatch({ + type: 'refetch', + fetchPolicy, + onComplete, + refetchEnvironment, + refetchQuery, + renderPolicy, + }); + return {dispose: disposeQuery}; + }, + // NOTE: We disable react-hooks-deps warning because: + // - We know fragmentRefPathInResponse is static, so it can be omitted from + // deps + // - We know fragmentNode is static, so it can be omitted from deps. + // - fragmentNode and parentFragmentRef are also captured by including + // fragmentIdentifier + // eslint-disable-next-line react-hooks/exhaustive-deps + [fragmentIdentifier, dispatch, disposeQuery, identifierValue, loadQuery], + ); +} + +let debugFunctions; +if (__DEV__) { + debugFunctions = { + getInitialIDAndType( + memoRefetchVariables: ?Variables, + fragmentRefPathInResponse: $ReadOnlyArray, + identifierQueryVariableName: ?string, + environment: IEnvironment, + ): ?DebugIDandTypename { + const {Record} = require('relay-runtime'); + const id = memoRefetchVariables?.[identifierQueryVariableName ?? 'id']; + if ( + fragmentRefPathInResponse.length !== 1 || + fragmentRefPathInResponse[0] !== 'node' || + id == null + ) { + return null; + } + const recordSource = environment.getStore().getSource(); + const record = recordSource.get(id); + const typename = record == null ? null : Record.getType(record); + if (typename == null) { + return null; + } + return { + id, + typename, + }; + }, + + checkSameTypeAfterRefetch( + previousIDAndType: ?DebugIDandTypename, + environment: IEnvironment, + fragmentNode: ReaderFragment, + componentDisplayName: string, + ): void { + const {Record} = require('relay-runtime'); + if (!previousIDAndType) { + return; + } + const recordSource = environment.getStore().getSource(); + const record = recordSource.get(previousIDAndType.id); + const typename = record && Record.getType(record); + if (typename !== previousIDAndType.typename) { + warning( + false, + 'Relay: Call to `refetch` returned data with a different ' + + '__typename: was `%s`, now `%s`, on `%s` in `%s`. ' + + 'Please make sure the server correctly implements' + + 'unique id requirement.', + previousIDAndType.typename, + typename, + fragmentNode.name, + componentDisplayName, + ); + } + }, + + checkSameIDAfterRefetch( + previousIDAndTypename: ?DebugIDandTypename, + refetchedFragmentRef: mixed, + fragmentNode: ReaderFragment, + componentDisplayName: string, + ): void { + if (previousIDAndTypename == null) { + return; + } + const {ID_KEY} = require('relay-runtime'); + // $FlowExpectedError[incompatible-use] + const resultID = refetchedFragmentRef[ID_KEY]; + if (resultID != null && resultID !== previousIDAndTypename.id) { + warning( + false, + 'Relay: Call to `refetch` returned a different id, expected ' + + '`%s`, got `%s`, on `%s` in `%s`. ' + + 'Please make sure the server correctly implements ' + + 'unique id requirement.', + resultID, + previousIDAndTypename.id, + fragmentNode.name, + componentDisplayName, + ); + } + }, + }; +} + +module.exports = useRefetchableFragmentNode; diff --git a/packages/react-relay/relay-hooks/useRelayEnvironment.js b/packages/react-relay/relay-hooks/useRelayEnvironment.js index 4ec75b0539537..ddb2a8e41b96d 100644 --- a/packages/react-relay/relay-hooks/useRelayEnvironment.js +++ b/packages/react-relay/relay-hooks/useRelayEnvironment.js @@ -17,7 +17,7 @@ const ReactRelayContext = require('./../ReactRelayContext'); const invariant = require('invariant'); const {useContext} = require('react'); -function useRelayEnvironment(): IEnvironment { +hook useRelayEnvironment(): IEnvironment { const context = useContext(ReactRelayContext); invariant( context != null, diff --git a/packages/react-relay/relay-hooks/useStaticFragmentNodeWarning.js b/packages/react-relay/relay-hooks/useStaticFragmentNodeWarning.js index 3b6975071a22b..cbed8efb8cce6 100644 --- a/packages/react-relay/relay-hooks/useStaticFragmentNodeWarning.js +++ b/packages/react-relay/relay-hooks/useStaticFragmentNodeWarning.js @@ -16,7 +16,7 @@ import type {ReaderFragment} from 'relay-runtime'; const useUnsafeRef_DEPRECATED = require('./useUnsafeRef_DEPRECATED'); const warning = require('warning'); -function useStaticFragmentNodeWarning( +hook useStaticFragmentNodeWarning( fragmentNode: ReaderFragment, warningContext: string, ): void { @@ -25,8 +25,10 @@ function useStaticFragmentNodeWarning( // __DEV__ setting which shouldn't change. This allows us to only pay the // cost of `useRef` in development mode to produce the warning. // eslint-disable-next-line react-hooks/rules-of-hooks + // $FlowFixMe[react-rule-hook] const initialPropRef = useUnsafeRef_DEPRECATED(fragmentNode.name); warning( + // $FlowFixMe[react-rule-unsafe-ref] initialPropRef.current === fragmentNode.name, 'Relay: The %s has to remain the same over the lifetime of a component. ' + 'Changing it is not supported and will result in unexpected behavior.', diff --git a/packages/react-relay/relay-hooks/useSubscribeToInvalidationState.js b/packages/react-relay/relay-hooks/useSubscribeToInvalidationState.js index 12ac10d5330ad..8f24c8ad7d3d5 100644 --- a/packages/react-relay/relay-hooks/useSubscribeToInvalidationState.js +++ b/packages/react-relay/relay-hooks/useSubscribeToInvalidationState.js @@ -25,7 +25,7 @@ const {useEffect, useRef} = require('react'); * re-established and the previous one will be disposed. * The subscription will automatically be disposed on unmount */ -function useSubscribeToInvalidationState( +hook useSubscribeToInvalidationState( dataIDs: $ReadOnlyArray, callback: () => void, ): Disposable { diff --git a/packages/react-relay/relay-hooks/useSubscription.js b/packages/react-relay/relay-hooks/useSubscription.js index d9aa82d558fd0..0256c669d342a 100644 --- a/packages/react-relay/relay-hooks/useSubscription.js +++ b/packages/react-relay/relay-hooks/useSubscription.js @@ -26,7 +26,7 @@ type RequestSubscriptionFn = ( config: GraphQLSubscriptionConfig, ) => Disposable; -function useSubscription( +hook useSubscription( config: GraphQLSubscriptionConfig, requestSubscriptionFn?: RequestSubscriptionFn< TVariables, diff --git a/packages/react-relay/relay-hooks/useUnsafeRef_DEPRECATED.js b/packages/react-relay/relay-hooks/useUnsafeRef_DEPRECATED.js index 72842dbc8d9bc..fb7e7412f9fc9 100644 --- a/packages/react-relay/relay-hooks/useUnsafeRef_DEPRECATED.js +++ b/packages/react-relay/relay-hooks/useUnsafeRef_DEPRECATED.js @@ -18,7 +18,7 @@ const {useMemo} = require('react'); * pattern to read or write from a ref during render as it does not trigger * a rerender and might result in bugs. */ -function useUnsafeRef_DEPRECATED(init: T): {current: T} { +hook useUnsafeRef_DEPRECATED(init: T): {current: T} { return useMemo<{current: T}>(() => ({current: init}), []); } diff --git a/packages/relay-compiler/README.md b/packages/relay-compiler/README.md index adfe85208b530..3213c568b1baf 100644 --- a/packages/relay-compiler/README.md +++ b/packages/relay-compiler/README.md @@ -68,11 +68,12 @@ file sources, and "listen" to the file changes in the "watch" mode. If enabling this the babel plugin needs `artifactDirectory` to be set as well. [string] - `excludes` Directories to ignore under `src`. [array] [default: - ["**/node_modules/**", "**/__mocks__/**", "**/__generated__/**"]] + ["\*\*/node_modules/\*\*", "\*\*/__mocks__/\*\*", "\*\*/__generated__/\*\*"]] - `schemaExtensions` List of directories with schema extensions. [array] - `schemaConfig` - `nodeInterfaceIdField` Configure the name of the globally unique ID field on the Node interface. Useful if you can't use the default `id` field name. + - `nodeInterfaceIdVariableName` Specifies the name of the variable expected by the `node` query to pass the Node id. [string][default: "id"] - `nonNodeIdFields` Restricts the type of all fields named `id` to `ID`. - `allowedIdTypes` Mappings from types in your schema to allowed types @@ -82,8 +83,8 @@ file sources, and "listen" to the file changes in the "watch" mode. If the future. Enabling this means you will have to update your application whenever the GraphQL server schema adds new enum values to prevent it from breaking. [boolean][default: false] -- `customScalars` Mappings from custom scalars in your schema to built-in - GraphQL types, for type emission purposes. [object] +- `customScalarTypes` Mappings from custom scalars in your schema to built-in + GraphQL types, for type emission purposes (eg. {"GqlScalar": "TStype"}). [object] - `eagerEsModules` This option enables emitting ES modules artifacts. [boolean][default: false] - `persistConfig` Relay supports two versions of the config: @@ -95,11 +96,14 @@ file sources, and "listen" to the file changes in the "watch" mode. If contain additional parameters to send. [object] - `concurrency` The maximum number concurrent requests that will be made to `url`. Use a value greater than 0. [number] - + - `include_query_text` Boolean, whether to include the query text in the + generated files. [boolean] [default: false] - - **Local Persisting:** - `file` Path for the JSON file that will contain operations map. Compiler will write queries in the format: { "md5(queryText) => "queryText", ...}. [string] + - `include_query_text` Boolean, whether to include the query text in the + generated files. [boolean] [default: false] - `codegenCommand` Command name that for relay compiler. [string] diff --git a/packages/relay-compiler/package.json b/packages/relay-compiler/package.json index e12b3320c9cdc..1ac53b45796e7 100644 --- a/packages/relay-compiler/package.json +++ b/packages/relay-compiler/package.json @@ -1,7 +1,7 @@ { "name": "relay-compiler", "description": "A compiler tool for building GraphQL-driven applications.", - "version": "15.0.0-microsoft.5", + "version": "17.0.0-microsoft.0", "keywords": [ "graphql", "relay" diff --git a/packages/relay-runtime/experimental.js b/packages/relay-runtime/experimental.js index 733a1a7c8bf2b..4008ed9e59e1b 100644 --- a/packages/relay-runtime/experimental.js +++ b/packages/relay-runtime/experimental.js @@ -11,14 +11,18 @@ 'use strict'; +import type {DataID} from './util/RelayRuntimeTypes'; + const resolverDataInjector = require('./store/experimental-live-resolvers/resolverDataInjector'); -const { - weakObjectWrapper, - weakObjectWrapperLive, -} = require('./store/experimental-live-resolvers/weakObjectWrapper'); + +// Annotates a strong object return type, where `A` is the GraphQL typename +// eslint-disable-next-line no-unused-vars +export type IdOf = DataID; + +// Annotates a `RelayResolverValue` GraphQL return type +// eslint-disable-next-line no-unused-vars +export type RelayResolverValue = A; module.exports = { resolverDataInjector, - weakObjectWrapper, - weakObjectWrapperLive, }; diff --git a/packages/relay-runtime/handlers/connection/ConnectionHandler.js b/packages/relay-runtime/handlers/connection/ConnectionHandler.js index 64ad418f80e1d..e47d6ac9b0d0e 100644 --- a/packages/relay-runtime/handlers/connection/ConnectionHandler.js +++ b/packages/relay-runtime/handlers/connection/ConnectionHandler.js @@ -144,11 +144,23 @@ function update(store: RecordSourceProxy, payload: HandleFieldPayload): void { const args = payload.args; if (prevEdges && serverEdges) { if (args.after != null) { + const clientEndCursor = clientPageInfo?.getValue(END_CURSOR); + const serverEndCursor = serverPageInfo?.getValue(END_CURSOR); + + const isAddingEdgesAfterCurrentPage = + clientPageInfo && args.after === clientEndCursor; + const isFillingOutCurrentPage = + clientPageInfo && clientEndCursor === serverEndCursor; + // Forward pagination from the end of the connection: append edges - if ( - clientPageInfo && - args.after === clientPageInfo.getValue(END_CURSOR) - ) { + // Case 1: We're fetching edges for the first time and pageInfo for + // the upcoming page is missing, but our after cursor matches + // the last ending cursor. (adding after current page) + // Case 2: We've fetched these edges before and we know the end cursor + // from the first edge updating the END_CURSOR field. If the + // end cursor from the server matches the end cursor from the + // client then we're just filling out the rest of this page. + if (isAddingEdgesAfterCurrentPage || isFillingOutCurrentPage) { const nodeIDs = new Set(); mergeEdges(prevEdges, nextEdges, nodeIDs); mergeEdges(serverEdges, nextEdges, nodeIDs); diff --git a/packages/relay-runtime/handlers/connection/MutationHandlers.js b/packages/relay-runtime/handlers/connection/MutationHandlers.js index 01beb75283036..5cf3567ecd1e5 100644 --- a/packages/relay-runtime/handlers/connection/MutationHandlers.js +++ b/packages/relay-runtime/handlers/connection/MutationHandlers.js @@ -63,7 +63,8 @@ const DeleteEdgeHandler = { if (connection == null) { warning( false, - `[Relay][Mutation] The connection with id '${connectionID}' doesn't exist.`, + "[Relay] The connection with id `%s` doesn't exist.", + connectionID, ); continue; } @@ -135,7 +136,8 @@ function edgeUpdater( if (connection == null) { warning( false, - `[Relay][Mutation] The connection with id '${connectionID}' doesn't exist.`, + "[Relay] The connection with id `%s` doesn't exist.", + connectionID, ); continue; } @@ -205,7 +207,8 @@ function nodeUpdater( if (connection == null) { warning( false, - `[Relay][Mutation] The connection with id '${connectionID}' doesn't exist.`, + "[Relay] The connection with id `%s` doesn't exist.", + connectionID, ); continue; } diff --git a/packages/relay-runtime/handlers/connection/__tests__/ConnectionHandler-test.js b/packages/relay-runtime/handlers/connection/__tests__/ConnectionHandler-test.js index 084094e6f4ca5..2ea05b1aeb2db 100644 --- a/packages/relay-runtime/handlers/connection/__tests__/ConnectionHandler-test.js +++ b/packages/relay-runtime/handlers/connection/__tests__/ConnectionHandler-test.js @@ -848,6 +848,190 @@ describe('ConnectionHandler', () => { }); }); + it('appends two streamed edges, which have been streamed before and know their end cursors', () => { + // First edge + normalize( + { + node: { + id: '4', + __typename: 'User', + friends: { + edges: [ + { + cursor: 'cursor:2', + node: { + id: '2', + }, + }, + ], + [PAGE_INFO]: { + // EACH EDGE ALREADY WILL KNOW ITS END CURSOR FOR THAT PAGE + [END_CURSOR]: 'cursor:3', + [HAS_NEXT_PAGE]: false, + [HAS_PREV_PAGE]: false, + [START_CURSOR]: 'cursor:2', + }, + }, + }, + }, + { + after: 'cursor:1', + before: null, + count: 10, + orderby: ['first name'], + id: '4', + }, + ); + const args = {after: 'cursor:1', first: 10, orderby: ['first name']}; + const handleKey = + getRelayHandleKey( + 'connection', + 'ConnectionQuery_friends', + 'friends', + ) + '(orderby:["first name"])'; + const payload = { + args, + dataID: '4', + fieldKey: getStableStorageKey('friends', args), + handleKey, + }; + ConnectionHandler.update(proxy, payload); + expect(sinkSource.toJSON()).toEqual({ + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"])': + { + [ID_KEY]: + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"])', + [TYPENAME_KEY]: 'FriendsConnection', + edges: { + [REFS_KEY]: [ + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):edges:0', + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):edges:1', + ], + }, + pageInfo: { + [REF_KEY]: + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):pageInfo', + }, + __connection_next_edge_index: 2, + }, + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):edges:1': + { + [ID_KEY]: + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):edges:1', + [TYPENAME_KEY]: 'FriendsEdge', + cursor: 'cursor:2', + node: {[REF_KEY]: '2'}, + }, + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):pageInfo': + { + [ID_KEY]: + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):pageInfo', + [TYPENAME_KEY]: 'PageInfo', + [END_CURSOR]: 'cursor:3', + [HAS_NEXT_PAGE]: false, + }, + }); + + // Second Edge + normalize( + { + node: { + id: '4', + __typename: 'User', + friends: { + edges: [ + { + cursor: 'cursor:3', + node: { + id: '3', + }, + }, + ], + [PAGE_INFO]: { + // EACH EDGE ALREADY WILL KNOW ITS END CURSOR FOR THAT PAGE + // (THIS IS FINAL EDGE, BUT STILL...) + [END_CURSOR]: 'cursor:3', + [HAS_NEXT_PAGE]: false, + [HAS_PREV_PAGE]: false, + [START_CURSOR]: 'cursor:2', + }, + }, + }, + }, + { + after: 'cursor:1', + before: null, + count: 10, + orderby: ['first name'], + id: '4', + }, + ); + const secondArgs = { + after: 'cursor:1', + first: 10, + orderby: ['first name'], + }; + const secondHandleKey = + getRelayHandleKey( + 'connection', + 'ConnectionQuery_friends', + 'friends', + ) + '(orderby:["first name"])'; + const secondPayload = { + args, + dataID: '4', + fieldKey: getStableStorageKey('friends', secondArgs), + handleKey: secondHandleKey, + }; + ConnectionHandler.update(proxy, secondPayload); + + const result = { + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"])': + { + [ID_KEY]: + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"])', + [TYPENAME_KEY]: 'FriendsConnection', + edges: { + [REFS_KEY]: [ + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):edges:0', + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):edges:1', + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):edges:2', + ], + }, + pageInfo: { + [REF_KEY]: + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):pageInfo', + }, + __connection_next_edge_index: 3, + }, + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):edges:1': + { + [ID_KEY]: + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):edges:1', + [TYPENAME_KEY]: 'FriendsEdge', + cursor: 'cursor:2', + node: {[REF_KEY]: '2'}, + }, + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):edges:2': + { + [ID_KEY]: + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):edges:2', + [TYPENAME_KEY]: 'FriendsEdge', + cursor: 'cursor:3', + node: {[REF_KEY]: '3'}, + }, + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):pageInfo': + { + [ID_KEY]: + 'client:4:__ConnectionQuery_friends_connection(orderby:["first name"]):pageInfo', + [TYPENAME_KEY]: 'PageInfo', + [END_CURSOR]: 'cursor:3', + [HAS_NEXT_PAGE]: false, + }, + }; + expect(sinkSource.toJSON()).toEqual(result); + }); + it('prepends new edges', () => { normalize( { diff --git a/packages/relay-runtime/index.js b/packages/relay-runtime/index.js index 9addfa0f62cf3..45e3af0ea72ea 100644 --- a/packages/relay-runtime/index.js +++ b/packages/relay-runtime/index.js @@ -34,7 +34,12 @@ const { } = require('./store/ClientID'); const createFragmentSpecResolver = require('./store/createFragmentSpecResolver'); const createRelayContext = require('./store/createRelayContext'); +const { + isSuspenseSentinel, + suspenseSentinel, +} = require('./store/experimental-live-resolvers/LiveResolverSuspenseSentinel'); const isRelayModernEnvironment = require('./store/isRelayModernEnvironment'); +const normalizeResponse = require('./store/normalizeResponse'); const readInlineData = require('./store/readInlineData'); const RelayConcreteVariables = require('./store/RelayConcreteVariables'); const RelayModernEnvironment = require('./store/RelayModernEnvironment'); @@ -83,7 +88,10 @@ export type { RangeOperation, } from './mutations/RelayDeclarativeMutationConfig'; export type {OptimisticMutationConfig} from './mutations/applyOptimisticMutation'; -export type {MutationConfig} from './mutations/commitMutation'; +export type { + MutationConfig, + CommitMutationConfig, +} from './mutations/commitMutation'; export type { ExecuteFunction, FetchFunction, @@ -95,10 +103,6 @@ export type { LogRequestInfoFunction, PayloadData, PayloadError, - ReactFlightPayloadData, - ReactFlightPayloadQuery, - ReactFlightServerTree, - ReactFlightServerError, SubscribeFunction, Uploadable, UploadableMap, @@ -106,6 +110,8 @@ export type { export type { ObservableFromValue, Observer, + Sink, + Source, Subscribable, Subscription, } from './network/RelayObservable'; @@ -132,6 +138,7 @@ export type { MutableRecordSource, MutationParameters, NormalizationSelector, + NormalizeResponseFunction, OperationAvailability, OperationDescriptor, OperationLoader, @@ -141,10 +148,8 @@ export type { OptimisticUpdateFunction, PluralReaderSelector, Props, + RecordSourceJSON, PublishQueue, - ReactFlightClientResponse, - ReactFlightPayloadDeserializer, - ReactFlightServerErrorHandler, ReaderSelector, ReadOnlyRecordProxy, ReadOnlyRecordSourceProxy, @@ -153,7 +158,7 @@ export type { RecordSourceSelectorProxy, RelayContext, RequestDescriptor, - RequiredFieldLogger, + RelayFieldLogger, SelectorData, SelectorStoreUpdater, SingularReaderSelector, @@ -161,6 +166,7 @@ export type { StoreUpdater, UpdatableData, TaskScheduler, + LiveState, } from './store/RelayStoreTypes'; export type { GraphQLSubscriptionConfig, @@ -171,7 +177,6 @@ export type { NormalizationArgument, NormalizationDefer, NormalizationField, - NormalizationFlightField, NormalizationLinkedField, NormalizationLinkedHandle, NormalizationLocalArgumentDefinition, @@ -188,7 +193,6 @@ export type { ReaderArgument, ReaderArgumentDefinition, ReaderField, - ReaderFlightField, ReaderFragment, ReaderInlineDataFragment, ReaderInlineDataFragmentSpread, @@ -200,6 +204,7 @@ export type { ReaderRequiredField, ReaderScalarField, ReaderSelection, + RefetchableIdentifierInfo, RequiredFieldAction, } from './util/ReaderNode'; export type { @@ -233,6 +238,8 @@ export type { export type {Local3DPayload} from './util/createPayloadFor3DField'; export type {Direction} from './util/getPaginationVariables'; export type {RequestIdentifier} from './util/getRequestIdentifier'; +export type {ResolverFunction} from './util/ReaderNode'; +export type {IdOf, RelayResolverValue} from './experimental'; // As early as possible, check for the existence of the JavaScript globals which // Relay Runtime relies upon, and produce a clear message if they do not exist. @@ -300,6 +307,8 @@ module.exports = { graphql: GraphQLTag.graphql, isFragment: GraphQLTag.isFragment, isInlineDataFragment: GraphQLTag.isInlineDataFragment, + isSuspenseSentinel, + suspenseSentinel, isRequest: GraphQLTag.isRequest, readInlineData, @@ -365,11 +374,13 @@ module.exports = { OperationTracker: RelayOperationTracker, createRelayContext: createRelayContext, getOperationVariables: RelayConcreteVariables.getOperationVariables, + getLocalVariables: RelayConcreteVariables.getLocalVariables, fetchQuery: fetchQueryInternal.fetchQuery, fetchQueryDeduped: fetchQueryInternal.fetchQueryDeduped, getPromiseForActiveRequest: fetchQueryInternal.getPromiseForActiveRequest, getObservableForActiveRequest: fetchQueryInternal.getObservableForActiveRequest, + normalizeResponse: normalizeResponse, withProvidedVariables: withProvidedVariables, }, }; diff --git a/packages/relay-runtime/multi-actor-environment/ActorSpecificEnvironment.js b/packages/relay-runtime/multi-actor-environment/ActorSpecificEnvironment.js index 9cc08011c7444..422102c3884cf 100644 --- a/packages/relay-runtime/multi-actor-environment/ActorSpecificEnvironment.js +++ b/packages/relay-runtime/multi-actor-environment/ActorSpecificEnvironment.js @@ -25,7 +25,7 @@ import type { OperationTracker, OptimisticResponseConfig, OptimisticUpdateFunction, - RequiredFieldLogger, + RelayFieldLogger, SelectorStoreUpdater, SingularReaderSelector, Snapshot, @@ -54,7 +54,7 @@ export type ActorSpecificEnvironmentConfig = $ReadOnly<{ logFn: LogFunction, multiActorEnvironment: IMultiActorEnvironment, network: INetwork, - requiredFieldLogger: RequiredFieldLogger, + relayFieldLogger: RelayFieldLogger, store: Store, missingFieldHandlers: $ReadOnlyArray, }>; @@ -70,7 +70,7 @@ class ActorSpecificEnvironment implements IActorEnvironment { +configName: ?string; +multiActorEnvironment: IMultiActorEnvironment; +options: mixed; - requiredFieldLogger: RequiredFieldLogger; + relayFieldLogger: RelayFieldLogger; constructor(config: ActorSpecificEnvironmentConfig) { this.configName = config.configName; @@ -78,7 +78,7 @@ class ActorSpecificEnvironment implements IActorEnvironment { this.multiActorEnvironment = config.multiActorEnvironment; this.__log = config.logFn; - this.requiredFieldLogger = config.requiredFieldLogger; + this.relayFieldLogger = config.relayFieldLogger; this._operationTracker = new RelayOperationTracker(); this._store = config.store; this._network = wrapNetworkWithLogObserver(this, config.network); diff --git a/packages/relay-runtime/multi-actor-environment/MultiActorEnvironment.js b/packages/relay-runtime/multi-actor-environment/MultiActorEnvironment.js index 60f48a24c7983..18f55a366d30a 100644 --- a/packages/relay-runtime/multi-actor-environment/MultiActorEnvironment.js +++ b/packages/relay-runtime/multi-actor-environment/MultiActorEnvironment.js @@ -22,14 +22,13 @@ import type { MissingFieldHandler, MutableRecordSource, MutationParameters, + NormalizeResponseFunction, OperationAvailability, OperationDescriptor, OperationLoader, OptimisticResponseConfig, OptimisticUpdateFunction, - ReactFlightPayloadDeserializer, - ReactFlightServerErrorHandler, - RequiredFieldLogger, + RelayFieldLogger, SelectorStoreUpdater, SingularReaderSelector, Snapshot, @@ -49,7 +48,8 @@ import type { const RelayDefaultHandlerProvider = require('../handlers/RelayDefaultHandlerProvider'); const RelayObservable = require('../network/RelayObservable'); const defaultGetDataID = require('../store/defaultGetDataID'); -const defaultRequiredFieldLogger = require('../store/defaultRequiredFieldLogger'); +const defaultRelayFieldLogger = require('../store/defaultRelayFieldLogger'); +const normalizeResponse = require('../store/normalizeResponse'); const OperationExecutor = require('../store/OperationExecutor'); const RelayModernStore = require('../store/RelayModernStore'); const RelayRecordSource = require('../store/RelayRecordSource'); @@ -65,10 +65,9 @@ export type MultiActorEnvironmentConfig = $ReadOnly<{ isServer?: ?boolean, logFn?: ?LogFunction, missingFieldHandlers?: ?$ReadOnlyArray, + normalizeResponse?: NormalizeResponseFunction, operationLoader?: ?OperationLoader, - reactFlightPayloadDeserializer?: ?ReactFlightPayloadDeserializer, - reactFlightServerErrorHandler?: ?ReactFlightServerErrorHandler, - requiredFieldLogger?: ?RequiredFieldLogger, + relayFieldLogger?: ?RelayFieldLogger, scheduler?: ?TaskScheduler, shouldProcessClientComponents?: ?boolean, treatMissingFieldsAsNull?: boolean, @@ -85,11 +84,10 @@ class MultiActorEnvironment implements IMultiActorEnvironment { +_isServer: boolean; +_logFn: LogFunction; +_missingFieldHandlers: $ReadOnlyArray; + +_normalizeResponse: NormalizeResponseFunction; +_operationExecutions: Map; +_operationLoader: ?OperationLoader; - +_reactFlightPayloadDeserializer: ?ReactFlightPayloadDeserializer; - +_reactFlightServerErrorHandler: ?ReactFlightServerErrorHandler; - +_requiredFieldLogger: RequiredFieldLogger; + +_relayFieldLogger: RelayFieldLogger; +_scheduler: ?TaskScheduler; +_shouldProcessClientComponents: ?boolean; +_treatMissingFieldsAsNull: boolean; @@ -105,18 +103,15 @@ class MultiActorEnvironment implements IMultiActorEnvironment { : RelayDefaultHandlerProvider; this._logFn = config.logFn ?? emptyFunction; this._operationExecutions = new Map(); - this._requiredFieldLogger = - config.requiredFieldLogger ?? defaultRequiredFieldLogger; + this._relayFieldLogger = config.relayFieldLogger ?? defaultRelayFieldLogger; this._shouldProcessClientComponents = config.shouldProcessClientComponents; this._treatMissingFieldsAsNull = config.treatMissingFieldsAsNull ?? false; this._isServer = config.isServer ?? false; this._missingFieldHandlers = config.missingFieldHandlers ?? []; this._createStoreForActor = config.createStoreForActor; - this._reactFlightPayloadDeserializer = - config.reactFlightPayloadDeserializer; - this._reactFlightServerErrorHandler = config.reactFlightServerErrorHandler; this._createConfigNameForActor = config.createConfigNameForActor; this._defaultRenderPolicy = config.defaultRenderPolicy ?? 'partial'; + this._normalizeResponse = config.normalizeResponse ?? normalizeResponse; } /** @@ -134,7 +129,7 @@ class MultiActorEnvironment implements IMultiActorEnvironment { actorIdentifier, multiActorEnvironment: this, logFn: this._logFn, - requiredFieldLogger: this._requiredFieldLogger, + relayFieldLogger: this._relayFieldLogger, store: this._createStoreForActor != null ? this._createStoreForActor(actorIdentifier) @@ -466,8 +461,6 @@ class MultiActorEnvironment implements IMultiActorEnvironment { getPublishQueue: (actorIdentifier: ActorIdentifier) => { return this.forActor(actorIdentifier).getPublishQueue(); }, - reactFlightPayloadDeserializer: this._reactFlightPayloadDeserializer, - reactFlightServerErrorHandler: this._reactFlightServerErrorHandler, scheduler: this._scheduler, shouldProcessClientComponents: this._shouldProcessClientComponents, sink, @@ -480,6 +473,7 @@ class MultiActorEnvironment implements IMultiActorEnvironment { treatMissingFieldsAsNull: this._treatMissingFieldsAsNull, updater, log: this._logFn, + normalizeResponse: this._normalizeResponse, }); return () => executor.cancel(); }); diff --git a/packages/relay-runtime/multi-actor-environment/__tests__/MultiActorEnvironment-commitMultiActorUpdate-test.js b/packages/relay-runtime/multi-actor-environment/__tests__/MultiActorEnvironment-commitMultiActorUpdate-test.js index 97c29a8f9476f..1c93c271a9398 100644 --- a/packages/relay-runtime/multi-actor-environment/__tests__/MultiActorEnvironment-commitMultiActorUpdate-test.js +++ b/packages/relay-runtime/multi-actor-environment/__tests__/MultiActorEnvironment-commitMultiActorUpdate-test.js @@ -13,6 +13,7 @@ import type {MultiActorStoreUpdater} from '../MultiActorEnvironmentTypes'; +const RelayModernRecord = require('../../store/RelayModernRecord'); const {getActorIdentifier} = require('../ActorIdentifier'); const MultiActorEnvironment = require('../MultiActorEnvironment'); @@ -47,7 +48,7 @@ describe('commitMultiActorUpdate', () => { throw new Error('Test record is null.'); } expect(testRecord).toHaveProperty('test'); - expect(testRecord.test).toBe(42); + expect(RelayModernRecord.getValue(testRecord, 'test')).toBe(42); }); expect(actorsCalled.includes('actor1')).toBe(true); diff --git a/packages/relay-runtime/multi-actor-environment/__tests__/actorEnvironment_execute-test.js b/packages/relay-runtime/multi-actor-environment/__tests__/actorEnvironment_execute-test.js index 4bf1a81fe379a..dc5dfeef50444 100644 --- a/packages/relay-runtime/multi-actor-environment/__tests__/actorEnvironment_execute-test.js +++ b/packages/relay-runtime/multi-actor-environment/__tests__/actorEnvironment_execute-test.js @@ -32,7 +32,7 @@ test('send a network request with actor specific params', () => { // $FlowFixMe[incompatible-call] error found when enabling Flow LTI mode createNetworkForActor: () => create(fetchFn), logFn: jest.fn(), - requiredFieldLogger: jest.fn(), + relayFieldLogger: jest.fn(), }); const actorEnvironment = multiActorEnvironment.forActor(actorIdentifier); diff --git a/packages/relay-runtime/multi-actor-environment/__tests__/forActor-test.js b/packages/relay-runtime/multi-actor-environment/__tests__/forActor-test.js index 9cddd98055977..c92e6593ab9b3 100644 --- a/packages/relay-runtime/multi-actor-environment/__tests__/forActor-test.js +++ b/packages/relay-runtime/multi-actor-environment/__tests__/forActor-test.js @@ -38,7 +38,7 @@ test('forActor: creates an environment', () => { const multiActorEnvironment = new MultiActorEnvironment({ createNetworkForActor: () => create(fetchFn), logFn: jest.fn(), - requiredFieldLogger: jest.fn(), + relayFieldLogger: jest.fn(), }); const actorEnvironment = multiActorEnvironment.forActor(actorIdentifer); @@ -61,7 +61,7 @@ test('forActor: memoize an environment', () => { const multiActorEnvironment = new MultiActorEnvironment({ createNetworkForActor: () => create(fetchFn), logFn: jest.fn(), - requiredFieldLogger: jest.fn(), + relayFieldLogger: jest.fn(), }); const actorEnvironment = multiActorEnvironment.forActor(actorIdentifer); diff --git a/packages/relay-runtime/mutations/RelayRecordProxy.js b/packages/relay-runtime/mutations/RelayRecordProxy.js index a3b06dee61786..10afb8f873c22 100644 --- a/packages/relay-runtime/mutations/RelayRecordProxy.js +++ b/packages/relay-runtime/mutations/RelayRecordProxy.js @@ -139,7 +139,7 @@ class RelayRecordProxy implements RecordProxy { } setLinkedRecords( - records: Array, + records: $ReadOnlyArray, name: string, args?: ?Arguments, ): RecordProxy { diff --git a/packages/relay-runtime/mutations/__tests__/RelayRecordSourceProxy-test.js b/packages/relay-runtime/mutations/__tests__/RelayRecordSourceProxy-test.js index aa7aadd0b3617..77bc183bb5d41 100644 --- a/packages/relay-runtime/mutations/__tests__/RelayRecordSourceProxy-test.js +++ b/packages/relay-runtime/mutations/__tests__/RelayRecordSourceProxy-test.js @@ -12,6 +12,7 @@ 'use strict'; const defaultGetDataID = require('../../store/defaultGetDataID'); +const RelayModernRecord = require('../../store/RelayModernRecord'); const RelayRecordSource = require('../../store/RelayRecordSource'); const RelayStoreUtils = require('../../store/RelayStoreUtils'); const RelayRecordProxy = require('../RelayRecordProxy'); @@ -132,8 +133,7 @@ describe('RelayRecordSourceProxy', () => { const root = baseSource.get(ROOT_ID); expect(root).not.toBeUndefined(); if (root != null) { - // Flow - root.__typename = 'User'; + RelayModernRecord.setValue(root, TYPENAME_KEY, 'User'); } expect(() => { store.getRoot(); diff --git a/packages/relay-runtime/mutations/__tests__/__generated__/commitMutationTestRequiredRootFieldMutation.graphql.js b/packages/relay-runtime/mutations/__tests__/__generated__/commitMutationTestRequiredRootFieldMutation.graphql.js index 9eff776e6fedf..92130bbb739cd 100644 --- a/packages/relay-runtime/mutations/__tests__/__generated__/commitMutationTestRequiredRootFieldMutation.graphql.js +++ b/packages/relay-runtime/mutations/__tests__/__generated__/commitMutationTestRequiredRootFieldMutation.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<017ca18e8f83a3e70285277746fbb0ba>> * @flow * @lightSyntaxTransform * @nogrep @@ -25,7 +25,7 @@ export type commitMutationTestRequiredRootFieldMutation$variables = {| input?: ?CommentDeleteInput, |}; export type commitMutationTestRequiredRootFieldMutation$data = {| - +commentDelete: {| + +commentDelete: ?{| +deletedCommentId: ?string, |}, |}; @@ -43,44 +43,39 @@ var v0 = [ "name": "input" } ], -v1 = { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "input", - "variableName": "input" - } - ], - "concreteType": "CommentDeleteResponsePayload", - "kind": "LinkedField", - "name": "commentDelete", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "deletedCommentId", - "storageKey": null - } - ], - "storageKey": null -}; +v1 = [ + { + "alias": null, + "args": [ + { + "kind": "Variable", + "name": "input", + "variableName": "input" + } + ], + "concreteType": "CommentDeleteResponsePayload", + "kind": "LinkedField", + "name": "commentDelete", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "deletedCommentId", + "storageKey": null + } + ], + "storageKey": null + } +]; return { "fragment": { "argumentDefinitions": (v0/*: any*/), "kind": "Fragment", "metadata": null, "name": "commitMutationTestRequiredRootFieldMutation", - "selections": [ - { - "kind": "RequiredField", - "field": (v1/*: any*/), - "action": "THROW", - "path": "commentDelete" - } - ], + "selections": (v1/*: any*/), "type": "Mutation", "abstractKey": null }, @@ -89,9 +84,7 @@ return { "argumentDefinitions": (v0/*: any*/), "kind": "Operation", "name": "commitMutationTestRequiredRootFieldMutation", - "selections": [ - (v1/*: any*/) - ] + "selections": (v1/*: any*/) }, "params": { "cacheID": "19f1e1c50328f89205857394403b5d9b", @@ -105,7 +98,7 @@ return { })(); if (__DEV__) { - (node/*: any*/).hash = "c0ee6bcae636236c0564c8da132daeac"; + (node/*: any*/).hash = "b75215ee7b976cd4f043bc5a88b05931"; } module.exports = ((node/*: any*/)/*: Mutation< diff --git a/packages/relay-runtime/mutations/__tests__/__generated__/readUpdatableQueryTest_node.graphql.js b/packages/relay-runtime/mutations/__tests__/__generated__/readUpdatableQueryTest_node.graphql.js index 047d714c8fd75..f5ad5e797f6bd 100644 --- a/packages/relay-runtime/mutations/__tests__/__generated__/readUpdatableQueryTest_node.graphql.js +++ b/packages/relay-runtime/mutations/__tests__/__generated__/readUpdatableQueryTest_node.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<53b3b22d71f7629f39c4f4603c4586ff>> + * @generated SignedSource<<3d2291db4ba78ef6b4b4c6638c9e5bef>> * @flow * @lightSyntaxTransform * @nogrep @@ -21,6 +21,14 @@ import type { FragmentType } from "relay-runtime"; declare export opaque type readUpdatableQueryTest_node$fragmentType: FragmentType; */ +var node/*: any*/ = {}; + +if (__DEV__) { + (node/*: any*/).hash = "1e10d6074f00480fc34548623674b3da"; +} + +module.exports = node; + module.exports.validate = function validate(value/*: { +__id: string, +__isreadUpdatableQueryTest_node?: string, diff --git a/packages/relay-runtime/mutations/__tests__/__generated__/readUpdatableQueryTest_user.graphql.js b/packages/relay-runtime/mutations/__tests__/__generated__/readUpdatableQueryTest_user.graphql.js index 9dcac1e2e1fac..dc0d5f6ed8e23 100644 --- a/packages/relay-runtime/mutations/__tests__/__generated__/readUpdatableQueryTest_user.graphql.js +++ b/packages/relay-runtime/mutations/__tests__/__generated__/readUpdatableQueryTest_user.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -21,6 +21,14 @@ import type { FragmentType } from "relay-runtime"; declare export opaque type readUpdatableQueryTest_user$fragmentType: FragmentType; */ +var node/*: any*/ = {}; + +if (__DEV__) { + (node/*: any*/).hash = "b4c3265697d01e4f38a505ed5bb58bf7"; +} + +module.exports = node; + module.exports.validate = function validate(value/*: { +__typename: string, +__id: string, diff --git a/packages/relay-runtime/mutations/__tests__/__generated__/validateMutationTestFlightMutation.graphql.js b/packages/relay-runtime/mutations/__tests__/__generated__/validateMutationTestFlightMutation.graphql.js deleted file mode 100644 index 820f27663ae30..0000000000000 --- a/packages/relay-runtime/mutations/__tests__/__generated__/validateMutationTestFlightMutation.graphql.js +++ /dev/null @@ -1,187 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -// @ReactFlightServerDependency FlightComponent.server - -/*:: -import type { ConcreteRequest, Mutation } from 'relay-runtime'; -export type StoryUpdateInput = {| - body?: ?InputText, -|}; -export type InputText = {| - ranges?: ?$ReadOnlyArray, - text?: ?string, -|}; -export type validateMutationTestFlightMutation$variables = {| - count: number, - input: StoryUpdateInput, -|}; -export type validateMutationTestFlightMutation$data = {| - +storyUpdate: ?{| - +story: ?{| - +body: ?{| - +text: ?string, - |}, - +flightComponentValidateMutation: ?any, - +id: string, - |}, - |}, -|}; -export type validateMutationTestFlightMutation = {| - response: validateMutationTestFlightMutation$data, - variables: validateMutationTestFlightMutation$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "count" -}, -v1 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "input" -}, -v2 = [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "input", - "variableName": "input" - } - ], - "concreteType": "StoryUpdateResponsePayload", - "kind": "LinkedField", - "name": "storyUpdate", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "concreteType": "Story", - "kind": "LinkedField", - "name": "story", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - }, - { - "alias": null, - "args": null, - "concreteType": "Text", - "kind": "LinkedField", - "name": "body", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "text", - "storageKey": null - } - ], - "storageKey": null - }, - { - "alias": "flightComponentValidateMutation", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "FlightComponent.server" - }, - { - "fields": [ - { - "kind": "Literal", - "name": "condition", - "value": true - }, - { - "kind": "Variable", - "name": "count", - "variableName": "count" - } - ], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": null - } - ], - "storageKey": null - } - ], - "storageKey": null - } -]; -return { - "fragment": { - "argumentDefinitions": [ - (v0/*: any*/), - (v1/*: any*/) - ], - "kind": "Fragment", - "metadata": null, - "name": "validateMutationTestFlightMutation", - "selections": (v2/*: any*/), - "type": "Mutation", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - (v1/*: any*/), - (v0/*: any*/) - ], - "kind": "Operation", - "name": "validateMutationTestFlightMutation", - "selections": (v2/*: any*/) - }, - "params": { - "cacheID": "118f5bcd280fc4045732b0d197c1e4b6", - "id": null, - "metadata": {}, - "name": "validateMutationTestFlightMutation", - "operationKind": "mutation", - "text": "mutation validateMutationTestFlightMutation(\n $input: StoryUpdateInput!\n $count: Int!\n) {\n storyUpdate(input: $input) {\n story {\n id\n body {\n text\n }\n flightComponentValidateMutation: flight(component: \"FlightComponent.server\", props: {condition: true, count: $count})\n }\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "c2572b38ee685a5a6aabe18d3b11f56b"; -} - -module.exports = ((node/*: any*/)/*: Mutation< - validateMutationTestFlightMutation$variables, - validateMutationTestFlightMutation$data, ->*/); diff --git a/packages/relay-runtime/mutations/__tests__/commitMutation-test.js b/packages/relay-runtime/mutations/__tests__/commitMutation-test.js index baac50bf24153..4cecfe2ea0cce 100644 --- a/packages/relay-runtime/mutations/__tests__/commitMutation-test.js +++ b/packages/relay-runtime/mutations/__tests__/commitMutation-test.js @@ -144,6 +144,8 @@ describe('Configs: NODE_DELETE', () => { store.subscribe(snapshot, callback); commitMutation(environment, { configs, + /* $FlowFixMe[prop-missing] error exposed when improving flow typing of + * commitMutation */ mutation, optimisticResponse, optimisticUpdater, @@ -595,6 +597,10 @@ describe('Configs: RANGE_ADD', () => { store.subscribe(snapshot, callback); commitMutation(environment, { configs, + /* $FlowFixMe[prop-missing] error exposed when improving flow typing of + * commitMutation */ + /* $FlowFixMe[incompatible-call] error exposed when improving flow typing + * of commitMutation */ mutation, optimisticResponse, optimisticUpdater, @@ -651,6 +657,10 @@ describe('Configs: RANGE_ADD', () => { // send mutation commitMutation(environment, { configs, + /* $FlowFixMe[prop-missing] error exposed when improving flow typing of + * commitMutation */ + /* $FlowFixMe[incompatible-call] error exposed when improving flow typing + * of commitMutation */ mutation, variables, }); @@ -736,6 +746,10 @@ describe('Configs: RANGE_ADD', () => { // send the same mutation again commitMutation(environment, { configs, + /* $FlowFixMe[prop-missing] error exposed when improving flow typing of + * commitMutation */ + /* $FlowFixMe[incompatible-call] error exposed when improving flow typing + * of commitMutation */ mutation, variables, }); @@ -816,6 +830,10 @@ describe('Configs: RANGE_ADD', () => { store.subscribe(snapshot, callback); commitMutation(environment, { configs, + /* $FlowFixMe[prop-missing] error exposed when improving flow typing of + * commitMutation */ + /* $FlowFixMe[incompatible-call] error exposed when improving flow typing + * of commitMutation */ mutation, optimisticResponse, optimisticUpdater, @@ -881,6 +899,10 @@ describe('Configs: RANGE_ADD', () => { store.subscribe(snapshot, callback); commitMutation(environment, { configs, + /* $FlowFixMe[prop-missing] error exposed when improving flow typing of + * commitMutation */ + /* $FlowFixMe[incompatible-call] error exposed when improving flow typing + * of commitMutation */ mutation, optimisticResponse, optimisticUpdater, @@ -936,6 +958,10 @@ describe('Configs: RANGE_ADD', () => { // send mutation commitMutation(environment, { updater, + /* $FlowFixMe[prop-missing] error exposed when improving flow typing of + * commitMutation */ + /* $FlowFixMe[incompatible-call] error exposed when improving flow typing + * of commitMutation */ mutation, variables, }); @@ -1027,6 +1053,10 @@ describe('Configs: RANGE_ADD', () => { // send the same mutation again commitMutation(environment, { updater, + /* $FlowFixMe[prop-missing] error exposed when improving flow typing of + * commitMutation */ + /* $FlowFixMe[incompatible-call] error exposed when improving flow typing + * of commitMutation */ mutation, variables, }); @@ -1139,7 +1169,7 @@ describe('Required mutation roots', () => { mutation commitMutationTestRequiredRootFieldMutation( $input: CommentDeleteInput ) { - commentDelete(input: $input) @required(action: THROW) { + commentDelete(input: $input) { deletedCommentId } } diff --git a/packages/relay-runtime/mutations/__tests__/readUpdatableFragment-test.js b/packages/relay-runtime/mutations/__tests__/readUpdatableFragment-test.js index ce812d6c7431e..45a9fff50c7c2 100644 --- a/packages/relay-runtime/mutations/__tests__/readUpdatableFragment-test.js +++ b/packages/relay-runtime/mutations/__tests__/readUpdatableFragment-test.js @@ -96,8 +96,9 @@ describe('readUpdatableFragment', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableFragmentTestRegularQuery['response']); + const readOnlyData: readUpdatableFragmentTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); const me = readOnlyData.me; if (me == null) { @@ -122,8 +123,9 @@ describe('readUpdatableFragment', () => { expect(updatableData.firstName2).toEqual('Repulsa'); }); - const readOnlyData2 = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableFragmentTestRegularQuery['response']); + const readOnlyData2: readUpdatableFragmentTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); expect(readOnlyData2?.me?.firstName).toBe('Rita'); expect(readOnlyData2?.me?.firstName2).toBe('Repulsa'); }); @@ -141,8 +143,9 @@ describe('readUpdatableFragment', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableFragmentTestRegularQuery['response']); + const readOnlyData: readUpdatableFragmentTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); const me = readOnlyData.me; if (me == null) { @@ -167,8 +170,9 @@ describe('readUpdatableFragment', () => { expect(updatableData.firstName3).toEqual('Zedd'); }); - const readOnlyData2 = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableFragmentTestRegularQuery['response']); + const readOnlyData2: readUpdatableFragmentTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); expect(readOnlyData2?.me?.firstName2).toBe('Lord'); expect(readOnlyData2?.me?.firstName3).toBe('Zedd'); }); diff --git a/packages/relay-runtime/mutations/__tests__/readUpdatableQuery-test.js b/packages/relay-runtime/mutations/__tests__/readUpdatableQuery-test.js index ed444e419f5b8..f135da58c1f21 100644 --- a/packages/relay-runtime/mutations/__tests__/readUpdatableQuery-test.js +++ b/packages/relay-runtime/mutations/__tests__/readUpdatableQuery-test.js @@ -234,8 +234,9 @@ describe('readUpdatableQuery', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); expect(readOnlyData?.me?.name).toEqual('MetaZuck'); }); @@ -384,8 +385,9 @@ describe('readUpdatableQuery', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); expect(readOnlyData.me?.id).toBe('42'); @@ -416,8 +418,9 @@ describe('readUpdatableQuery', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); expect(readOnlyData.me?.id).toBe('4'); }); @@ -452,8 +455,9 @@ describe('readUpdatableQuery', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); const validUser = (() => { if (readOnlyData.node != null) { @@ -492,8 +496,9 @@ describe('readUpdatableQuery', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); if (readOnlyData.node2?.parents != null) { expect(readOnlyData.node2?.parents[0]?.name).toBe( 'Gaius Julius Caesar', @@ -534,8 +539,9 @@ describe('readUpdatableQuery', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); const author = (() => { if (updatableData.me?.author != null) { @@ -561,8 +567,9 @@ describe('readUpdatableQuery', () => { }); const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); expect(readOnlyData.me?.author?.client_best_friend?.name).toBe('Mark'); }); @@ -579,8 +586,9 @@ describe('readUpdatableQuery', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); if (readOnlyData.node != null) { expect(validateUser(readOnlyData.node)).toBe(false); } else { @@ -601,8 +609,9 @@ describe('readUpdatableQuery', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); if (readOnlyData.node != null) { expect(validateUser(readOnlyData.node)).toEqual(readOnlyData.node); } else { @@ -633,8 +642,9 @@ describe('readUpdatableQuery', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); const validNode = (() => { if (readOnlyData.me != null) { @@ -668,8 +678,9 @@ describe('readUpdatableQuery', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); expect(readOnlyData.me?.__isreadUpdatableQueryTest_node).toBe('User'); if (readOnlyData.me != null) { expect(validateNode(readOnlyData.me)).toBe(readOnlyData.me); @@ -720,8 +731,9 @@ describe('readUpdatableQuery', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); expect(readOnlyData.me).toBe(null); }); @@ -779,8 +791,9 @@ describe('readUpdatableQuery', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); expect(readOnlyData.node2?.parents).toEqual([]); }); @@ -815,8 +828,9 @@ describe('readUpdatableQuery', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); if (updatableData.node2 != null) { if (updatableData.node2.__typename === 'User') { @@ -1004,8 +1018,9 @@ describe('readUpdatableQuery', () => { }); const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); expect(readOnlyData.me?.author?.client_nickname).toBe('Mr. Right'); }); @@ -1046,8 +1061,9 @@ describe('readUpdatableQuery', () => { const source = environment.getStore().getSource(); const selector = operation.fragment; - const readOnlyData = ((RelayReader.read(source, selector) // $FlowFixMe[unclear-type] Just to cast it to a better type! - .data: any): readUpdatableQueryTestRegularQuery['response']); + const readOnlyData: readUpdatableQueryTestRegularQuery['response'] = + // $FlowFixMe[unclear-type] Just to cast it to a better type! + (RelayReader.read(source, selector).data: any); expect(readOnlyData.me?.author?.client_best_friend?.name).toBe('Mr. Right'); }); diff --git a/packages/relay-runtime/mutations/__tests__/validateMutation-test.js b/packages/relay-runtime/mutations/__tests__/validateMutation-test.js index 24d5ca03508f3..fe54f29d60252 100644 --- a/packages/relay-runtime/mutations/__tests__/validateMutation-test.js +++ b/packages/relay-runtime/mutations/__tests__/validateMutation-test.js @@ -12,7 +12,7 @@ 'use strict'; const validateMutation = require('../validateMutation'); -const {RelayFeatureFlags, graphql} = require('relay-runtime'); +const {graphql} = require('relay-runtime'); jest.mock('warning', () => { return (dontWarn, message, ...args) => { @@ -843,104 +843,4 @@ describe('validateOptimisticResponse', () => { } }); }); - - describe('feature ENABLE_REACT_FLIGHT_COMPONENT_FIELD', () => { - let FlightMutation; - beforeEach(() => { - jest.clearAllMocks(); - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = true; - FlightMutation = graphql` - mutation validateMutationTestFlightMutation( - $input: StoryUpdateInput! - $count: Int! - ) { - storyUpdate(input: $input) { - story { - id - body { - text - } - flightComponentValidateMutation(condition: true, count: $count) - } - } - } - `; - }); - - afterEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = false; - }); - - it('Throws an error when optimistic responses contain Flight fields', () => { - const optimisticResponse: $FlowFixMe = { - storyUpdate: { - story: { - id: 1, - body: { - text: 'Hello world', - }, - flightComponentValidateMutation: { - tree: [ - { - type: 'div', - key: null, - ref: null, - props: {foo: 1}, - }, - ], - queries: [], - errors: [], - fragments: [], - }, - }, - }, - }; - const variables = null; - - expect(() => { - validateMutation(optimisticResponse, FlightMutation, variables); - }).toThrowError(/validateMutation: Flight fields are not compatible/); - }); - - it('Does not error when optimistic responses contain null or undefined Flight fields', () => { - const optimisticResponseWithUndefinedFlightField = { - storyUpdate: { - story: { - id: 1, - body: { - text: 'Hello world', - }, - flightComponentValidateMutation: undefined, - }, - }, - }; - const optimisticResponseWithNullFlightField = { - storyUpdate: { - story: { - id: 1, - body: { - text: 'Hello world', - }, - flightComponentValidateMutation: null, - }, - }, - }; - const variables = null; - - expect(() => { - validateMutation( - optimisticResponseWithUndefinedFlightField, - FlightMutation, - variables, - ); - }).not.toThrow(); - expect(() => { - validateMutation( - optimisticResponseWithNullFlightField, - FlightMutation, - variables, - ); - }).not.toThrow(); - }); - }); }); diff --git a/packages/relay-runtime/mutations/commitMutation.js b/packages/relay-runtime/mutations/commitMutation.js index c4eed0b6b8558..00754e506b829 100644 --- a/packages/relay-runtime/mutations/commitMutation.js +++ b/packages/relay-runtime/mutations/commitMutation.js @@ -18,7 +18,12 @@ import type { MutationParameters, SelectorStoreUpdater, } from '../store/RelayStoreTypes'; -import type {CacheConfig, Disposable} from '../util/RelayRuntimeTypes'; +import type { + CacheConfig, + Disposable, + Mutation, + Variables, +} from '../util/RelayRuntimeTypes'; import type {DeclarativeMutationConfig} from './RelayDeclarativeMutationConfig'; const {getRequest} = require('../query/GraphQLTag'); @@ -54,13 +59,28 @@ export type MutationConfig = { variables: TMutation['variables'], }; +export type CommitMutationConfig = { + cacheConfig?: CacheConfig, + configs?: Array, + mutation: Mutation, + onCompleted?: ?(response: TData, errors: ?Array) => void, + onError?: ?(error: Error) => void, + onNext?: ?() => void, + onUnsubscribe?: ?() => void, + optimisticResponse?: TRawResponse, + optimisticUpdater?: ?SelectorStoreUpdater, + updater?: ?SelectorStoreUpdater, + uploadables?: UploadableMap, + variables: TVariables, +}; + /** * Higher-level helper function to execute a mutation against a specific * environment. */ -function commitMutation( +function commitMutation( environment: IEnvironment, - config: MutationConfig, + config: CommitMutationConfig, ): Disposable { invariant( isRelayModernEnvironment(environment), @@ -85,6 +105,8 @@ function commitMutation( ); // TODO: remove this check after we fix flow. if (typeof optimisticResponse === 'function') { + /* $FlowFixMe[incompatible-use] error exposed when improving flow typing of + * commitMutation */ optimisticResponse = optimisticResponse(); warning( false, @@ -98,16 +120,21 @@ function commitMutation( } } if (configs) { - ({optimisticUpdater, updater} = RelayDeclarativeMutationConfig.convert( - configs, - mutation, - optimisticUpdater, - updater, - )); + ({optimisticUpdater, updater} = RelayDeclarativeMutationConfig.convert<{ + variables: TVariables, + /* $FlowFixMe[incompatible-call] error exposed when improving flow typing + * of commitMutation */ + response: TData, + }>(configs, mutation, optimisticUpdater, updater)); } const errors: Array = []; const subscription = environment - .executeMutation({ + .executeMutation<{ + variables: TVariables, + /* $FlowFixMe[incompatible-call] error exposed when improving flow typing + * of commitMutation */ + response: TData, + }>({ operation, optimisticResponse, optimisticUpdater, diff --git a/packages/relay-runtime/mutations/createUpdatableProxy.js b/packages/relay-runtime/mutations/createUpdatableProxy.js index 282cd861e38d4..32000fa879e1e 100644 --- a/packages/relay-runtime/mutations/createUpdatableProxy.js +++ b/packages/relay-runtime/mutations/createUpdatableProxy.js @@ -28,12 +28,12 @@ const { ACTOR_CHANGE, ALIASED_FRAGMENT_SPREAD, ALIASED_INLINE_FRAGMENT_SPREAD, + CATCH_FIELD, CLIENT_EDGE_TO_CLIENT_OBJECT, CLIENT_EDGE_TO_SERVER_OBJECT, CLIENT_EXTENSION, CONDITION, DEFER, - FLIGHT_FIELD, FRAGMENT_SPREAD, INLINE_DATA_FRAGMENT_SPREAD, INLINE_FRAGMENT, @@ -136,11 +136,11 @@ function updateProxyFromSelections( ); // Flow incorrect assumes that the return value for the get method must match // the set parameter. - let value = (updatableProxyRootRecord.getValue( + // $FlowFixMe[unclear-type] Typed by the generated updatable query flow type + let value: any = updatableProxyRootRecord.getValue( selection.name, newVariables, - // $FlowFixMe[unclear-type] Typed by the generated updatable query flow type - ): any); + ); if (value == null) { value = getScalarUsingMissingFieldHandlers( selection, @@ -202,10 +202,10 @@ function updateProxyFromSelections( case CLIENT_EDGE_TO_CLIENT_OBJECT: case CLIENT_EDGE_TO_SERVER_OBJECT: case DEFER: - case FLIGHT_FIELD: case MODULE_IMPORT: case RELAY_LIVE_RESOLVER: case REQUIRED_FIELD: + case CATCH_FIELD: case STREAM: case RELAY_RESOLVER: // These types of reader nodes are not currently handled. diff --git a/packages/relay-runtime/mutations/validateMutation.js b/packages/relay-runtime/mutations/validateMutation.js index 065f9e9f49c2b..42303c0c6032d 100644 --- a/packages/relay-runtime/mutations/validateMutation.js +++ b/packages/relay-runtime/mutations/validateMutation.js @@ -25,12 +25,12 @@ const { CLIENT_EXTENSION, CONDITION, DEFER, - FLIGHT_FIELD, FRAGMENT_SPREAD, INLINE_FRAGMENT, LINKED_FIELD, LINKED_HANDLE, MODULE_IMPORT, + RELAY_LIVE_RESOLVER, RELAY_RESOLVER, SCALAR_FIELD, SCALAR_HANDLE, @@ -137,7 +137,6 @@ if (__DEV__) { return; case SCALAR_FIELD: case LINKED_FIELD: - case FLIGHT_FIELD: return validateField(optimisticResponse, selection, context); case ACTOR_CHANGE: return validateField( @@ -166,6 +165,7 @@ if (__DEV__) { case TYPE_DISCRIMINATOR: return validateAbstractKey(context, selection.abstractKey); case RELAY_RESOLVER: + case RELAY_LIVE_RESOLVER: case CLIENT_EDGE_TO_CLIENT_OBJECT: case LINKED_HANDLE: case SCALAR_HANDLE: @@ -244,21 +244,6 @@ if (__DEV__) { return; } } - case FLIGHT_FIELD: - if ( - optimisticResponse[fieldName] === null || - (hasOwnProperty.call(optimisticResponse, fieldName) && - optimisticResponse[fieldName] === undefined) - ) { - return; - } - throw new Error( - 'validateMutation: Flight fields are not compatible with ' + - 'optimistic updates, as React does not have the component code ' + - 'necessary to process new data on the client. Instead, you ' + - 'should update your code to require a full refetch of the Flight ' + - 'field so your UI can be updated.', - ); } }; diff --git a/packages/relay-runtime/network/RelayNetworkTypes.js b/packages/relay-runtime/network/RelayNetworkTypes.js index 781fed51054fc..244edae3fcfed 100644 --- a/packages/relay-runtime/network/RelayNetworkTypes.js +++ b/packages/relay-runtime/network/RelayNetworkTypes.js @@ -34,6 +34,7 @@ export type PayloadError = interface { column: number, ... }>, + path?: Array, // Not officially part of the spec, but used at Facebook severity?: 'CRITICAL' | 'ERROR' | 'WARNING', }; @@ -82,6 +83,15 @@ export type GraphQLResponse = | GraphQLSingularResponse | $ReadOnlyArray; +/** + * A function that pre-process the response at the network layer. This + * function is invoked right after the network operation and before cache + * operations. + */ +export type preprocessResponseFunction = ( + response: RelayObservable, +) => RelayObservable; + /** * A function that returns an Observable representing the response of executing * a GraphQL operation. @@ -92,6 +102,8 @@ export type ExecuteFunction = ( cacheConfig: CacheConfig, uploadables?: ?UploadableMap, logRequestInfo?: ?LogRequestInfoFunction, + encryptedVariables?: ?string, + preprocessResponse?: ?preprocessResponseFunction, ) => RelayObservable; /** @@ -120,45 +132,3 @@ export type SubscribeFunction = ( export type Uploadable = File | Blob; export type UploadableMap = {[key: string]: Uploadable}; - -/** - * React Flight tree created on the server. - */ -export type ReactFlightServerTree = mixed; -export type ReactFlightPayloadQuery = { - +id: mixed, - +module: mixed, - +response: GraphQLSingularResponse, - +variables: Variables, -}; -export type ReactFlightPayloadFragment = { - +__id: string, - +__typename: string, - +module: mixed, - +response: GraphQLSingularResponse, - +variables: Variables, -}; -export type ReactFlightServerError = { - +message: string, - +stack: string, - ... -}; -/** - * Data that is returned by a Flight compliant GraphQL server. - * - * - status: string representing status of the server response. - * - tree: React Server Components written into a row protocol that can be later - * read on the client. If this is null, this indicates that no rows were - * were written on the server. - * - queries: an array of queries that the server preloaded for the client. - * - errors: an array of errors that were encountered while rendering the - * Server Component. - * - fragments: an array of fragments that the server preloaded for the client. - */ -export type ReactFlightPayloadData = { - +status: string, - +tree: ?Array, - +queries: Array, - +errors: Array, - +fragments: Array, -}; diff --git a/packages/relay-runtime/network/RelayObservable.js b/packages/relay-runtime/network/RelayObservable.js index 8220f20cd5aab..6d5a11547a320 100644 --- a/packages/relay-runtime/network/RelayObservable.js +++ b/packages/relay-runtime/network/RelayObservable.js @@ -71,9 +71,7 @@ export interface Subscribable<+T> { subscribe(observer: Observer | Sink): Subscription; } -// Note: This should accept Subscribable instead of RelayObservable, -// however Flow cannot yet distinguish it from T. -export type ObservableFromValue<+T> = RelayObservable | Promise | T; +export type ObservableFromValue<+T> = Subscribable | Promise | T; let hostReportError: | ((Error, isUncaughtThrownError: boolean) => mixed) @@ -144,11 +142,11 @@ class RelayObservable<+T> implements Subscribable { * useful for accepting the result of a user-provided FetchFunction. */ static from(obj: ObservableFromValue): RelayObservable { - return isObservable(obj) + return isObservable(obj) ? fromObservable(obj) - : isPromise(obj) - ? fromPromise(obj) - : fromValue(obj); + : isPromise(obj) + ? fromPromise(obj) + : fromValue(obj); } /** @@ -272,7 +270,8 @@ class RelayObservable<+T> implements Subscribable { ifEmpty(alternate: RelayObservable): RelayObservable { return RelayObservable.create(sink => { let hasValue = false; - let current: Subscription = this.subscribe({ + let current: ?Subscription; + current = this.subscribe({ next(value) { hasValue = true; sink.next(value); @@ -287,7 +286,7 @@ class RelayObservable<+T> implements Subscribable { }, }); return () => { - current.unsubscribe(); + current && current.unsubscribe(); }; }); } @@ -446,8 +445,7 @@ class RelayObservable<+T> implements Subscribable { } // Use declarations to teach Flow how to check isObservable. -declare function isObservable(p: mixed): boolean %checks(p instanceof - RelayObservable); +declare function isObservable(obj: mixed): obj is Subscribable; function isObservable(obj: mixed) { return ( diff --git a/packages/relay-runtime/package.json b/packages/relay-runtime/package.json index 8f00038272fbb..40e3624af8df5 100644 --- a/packages/relay-runtime/package.json +++ b/packages/relay-runtime/package.json @@ -1,7 +1,7 @@ { "name": "relay-runtime", "description": "A core runtime for building GraphQL-driven applications.", - "version": "15.0.0", + "version": "17.0.0", "keywords": [ "graphql", "relay" diff --git a/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest2UserFragment.graphql.js b/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest2UserFragment.graphql.js index 2017f8de5f041..7138626f61d42 100644 --- a/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest2UserFragment.graphql.js +++ b/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest2UserFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<99b2659aac414e5620cf540a3b13791b>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./GraphQLTagTestUserFragment1RefetchQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "GraphQLTagTest2UserFragment", diff --git a/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest3UserFragment.graphql.js b/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest3UserFragment.graphql.js index 838f63678793c..aec7110766974 100644 --- a/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest3UserFragment.graphql.js +++ b/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest3UserFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<4018b4e80f1d6432e56d1dfa7535a059>> + * @generated SignedSource<<8daad82cbca6de3764f2bf2ed141b351>> * @flow * @lightSyntaxTransform * @nogrep @@ -85,7 +85,10 @@ return { "node" ], "operation": require('./GraphQLTagTestUserFragment2RefetchQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "GraphQLTagTest3UserFragment", diff --git a/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest5UserFragment.graphql.js b/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest5UserFragment.graphql.js index 24785fff621b4..412f610ef37da 100644 --- a/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest5UserFragment.graphql.js +++ b/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest5UserFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<11da578433d138975c94a54c2626cc87>> + * @generated SignedSource<<7b5a78643acce4af8a23edc051d9a7d3>> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./GraphQLTagTestUserFragment3RefetchQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "GraphQLTagTest5UserFragment", diff --git a/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest6UserFragment.graphql.js b/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest6UserFragment.graphql.js index 31333eef99c3c..528ddcdb152c3 100644 --- a/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest6UserFragment.graphql.js +++ b/packages/relay-runtime/query/__tests__/__generated__/GraphQLTagTest6UserFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<990accbcac33e8970b31abebfdb8ddf9>> + * @generated SignedSource<<7fa67ff6bb012fe145942de10f12f8a5>> * @flow * @lightSyntaxTransform * @nogrep @@ -85,7 +85,10 @@ return { "node" ], "operation": require('./GraphQLTagTestUserFragment4RefetchQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "GraphQLTagTest6UserFragment", diff --git a/packages/relay-runtime/query/__tests__/fetchQuery-test.js b/packages/relay-runtime/query/__tests__/fetchQuery-test.js index 4b9bd2b5a273e..17d9e4ab8a739 100644 --- a/packages/relay-runtime/query/__tests__/fetchQuery-test.js +++ b/packages/relay-runtime/query/__tests__/fetchQuery-test.js @@ -11,6 +11,7 @@ 'use strict'; +import type {RelayFieldLoggerEvent} from '../../store/RelayStoreTypes'; import type {fetchQueryTest1Query$data} from './__generated__/fetchQueryTest1Query.graphql'; import type {RequestParameters} from 'relay-runtime'; @@ -239,21 +240,9 @@ describe('fetchQuery', () => { describe('fetchQuery with missing @required value', () => { it('provides data snapshot on next', () => { - const requiredFieldLogger = jest.fn< - [ - | {+fieldPath: string, +kind: 'missing_field.log', +owner: string} - | {+fieldPath: string, +kind: 'missing_field.throw', +owner: string} - | { - +error: Error, - +fieldPath: string, - +kind: 'relay_resolver.error', - +owner: string, - }, - ], - void, - >(); + const relayFieldLogger = jest.fn<[RelayFieldLoggerEvent], void>(); const environment = createMockEnvironment({ - requiredFieldLogger, + relayFieldLogger, }); const query = graphql` query fetchQueryTest2Query { @@ -278,7 +267,7 @@ describe('fetchQuery with missing @required value', () => { }); subscription.unsubscribe(); expect(observer.next).toHaveBeenCalledWith({me: null}); - expect(requiredFieldLogger).toHaveBeenCalledWith({ + expect(relayFieldLogger).toHaveBeenCalledWith({ fieldPath: 'me.name', kind: 'missing_field.log', owner: 'fetchQueryTest2Query', @@ -286,20 +275,8 @@ describe('fetchQuery with missing @required value', () => { }); it('throws on resolution', () => { - const requiredFieldLogger = jest.fn< - [ - | {+fieldPath: string, +kind: 'missing_field.log', +owner: string} - | {+fieldPath: string, +kind: 'missing_field.throw', +owner: string} - | { - +error: Error, - +fieldPath: string, - +kind: 'relay_resolver.error', - +owner: string, - }, - ], - void, - >(); - const environment = createMockEnvironment({requiredFieldLogger}); + const relayFieldLogger = jest.fn<[RelayFieldLoggerEvent], void>(); + const environment = createMockEnvironment({relayFieldLogger}); const query = graphql` query fetchQueryTest3Query { me { @@ -322,7 +299,7 @@ describe('fetchQuery with missing @required value', () => { data: {me: {id: 'ID-1', name: null}}, }); subscription.unsubscribe(); - expect(requiredFieldLogger).toHaveBeenCalledWith({ + expect(relayFieldLogger).toHaveBeenCalledWith({ fieldPath: 'me.name', kind: 'missing_field.throw', owner: 'fetchQueryTest3Query', diff --git a/packages/relay-runtime/query/fetchQuery.js b/packages/relay-runtime/query/fetchQuery.js index 11b7a8d6683c1..dfbe77533de0d 100644 --- a/packages/relay-runtime/query/fetchQuery.js +++ b/packages/relay-runtime/query/fetchQuery.js @@ -140,6 +140,8 @@ function fetchQuery( environment, snapshot.missingRequiredFields, snapshot.relayResolverErrors, + snapshot.errorResponseFields, + queryNode.fragment.metadata?.throwOnFieldError ?? false, ); /* $FlowFixMe[incompatible-return] we assume readData returns the right * data just having written it from network or checked availability. */ diff --git a/packages/relay-runtime/query/fetchQueryInternal.js b/packages/relay-runtime/query/fetchQueryInternal.js index 5dab8b751f109..51a47bedb9789 100644 --- a/packages/relay-runtime/query/fetchQueryInternal.js +++ b/packages/relay-runtime/query/fetchQueryInternal.js @@ -21,7 +21,6 @@ import type { import type {RequestIdentifier} from '../util/getRequestIdentifier'; const Observable = require('../network/RelayObservable'); -const RelayFeatureFlags = require('../util/RelayFeatureFlags'); const RelayReplaySubject = require('../util/RelayReplaySubject'); const invariant = require('invariant'); @@ -250,12 +249,6 @@ function getPromiseForActiveRequest( if (!environment.isRequestActive(cachedRequest.identifier)) { return null; } - if (RelayFeatureFlags.USE_REACT_CACHE) { - const existing = cachedRequest.promise; - if (existing) { - return existing; - } - } const promise = new Promise((resolve, reject) => { let resolveOnNext = false; getActiveStatusObservableForCachedRequest( @@ -278,17 +271,6 @@ function getPromiseForActiveRequest( }); resolveOnNext = true; }); - if (RelayFeatureFlags.USE_REACT_CACHE) { - // React Suspense should get thrown the same promise each time, so we cache it. - // However, the promise gets resolved on each payload, so subsequently we need - // to provide a new fresh promise that isn't already resolved. (When the feature - // flag is off we do this in QueryResource.) - cachedRequest.promise = promise; - const cleanup = () => { - cachedRequest.promise = null; - }; - promise.then(cleanup, cleanup); - } return promise; } diff --git a/packages/relay-runtime/store/DataChecker.js b/packages/relay-runtime/store/DataChecker.js index 63cf62f6bf0ec..a8bc64e6d523f 100644 --- a/packages/relay-runtime/store/DataChecker.js +++ b/packages/relay-runtime/store/DataChecker.js @@ -13,8 +13,8 @@ import type {ActorIdentifier} from '../multi-actor-environment/ActorIdentifier'; import type { - NormalizationFlightField, NormalizationLinkedField, + NormalizationLiveResolverField, NormalizationModuleImport, NormalizationNode, NormalizationResolverField, @@ -28,7 +28,6 @@ import type { MutableRecordSource, NormalizationSelector, OperationLoader, - ReactFlightReachableExecutableDefinitions, RecordSource, } from './RelayStoreTypes'; @@ -36,14 +35,12 @@ const RelayRecordSourceMutator = require('../mutations/RelayRecordSourceMutator' const RelayRecordSourceProxy = require('../mutations/RelayRecordSourceProxy'); const getOperation = require('../util/getOperation'); const RelayConcreteNode = require('../util/RelayConcreteNode'); -const RelayFeatureFlags = require('../util/RelayFeatureFlags'); const {isClientID} = require('./ClientID'); const cloneRelayHandleSourceField = require('./cloneRelayHandleSourceField'); const cloneRelayScalarHandleSourceField = require('./cloneRelayScalarHandleSourceField'); const {getLocalVariables} = require('./RelayConcreteVariables'); const RelayModernRecord = require('./RelayModernRecord'); const {EXISTENT, UNKNOWN} = require('./RelayRecordState'); -const RelayStoreReactFlightUtils = require('./RelayStoreReactFlightUtils'); const RelayStoreUtils = require('./RelayStoreUtils'); const {TYPE_SCHEMA_TYPE, generateTypeID} = require('./TypeID'); const invariant = require('invariant'); @@ -60,19 +57,19 @@ const { CLIENT_EXTENSION, CLIENT_EDGE_TO_CLIENT_OBJECT, DEFER, - FLIGHT_FIELD, FRAGMENT_SPREAD, INLINE_FRAGMENT, LINKED_FIELD, LINKED_HANDLE, MODULE_IMPORT, RELAY_RESOLVER, + RELAY_LIVE_RESOLVER, SCALAR_FIELD, SCALAR_HANDLE, STREAM, TYPE_DISCRIMINATOR, } = RelayConcreteNode; -const {ROOT_ID, getModuleOperationKey, getStorageKey, getArgumentValues} = +const {getModuleOperationKey, getStorageKey, getArgumentValues} = RelayStoreUtils; /** @@ -117,7 +114,6 @@ class DataChecker { _mostRecentlyInvalidatedAt: number | null; _mutator: RelayRecordSourceMutator; _operationLoader: OperationLoader | null; - _operationLastWrittenAt: ?number; _recordSourceProxy: RelayRecordSourceProxy; _recordWasMissing: boolean; _source: RecordSource; @@ -451,13 +447,6 @@ class DataChecker { this._handleMissing(); } // else: if it does or doesn't implement, we don't need to check or skip anything else break; - case FLIGHT_FIELD: - if (RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD) { - this._checkFlightField(selection, dataID); - } else { - throw new Error('Flight fields are not yet supported.'); - } - break; case CLIENT_COMPONENT: if (this._shouldProcessClientComponents === false) { break; @@ -467,6 +456,9 @@ class DataChecker { case RELAY_RESOLVER: this._checkResolver(selection, dataID); break; + case RELAY_LIVE_RESOLVER: + this._checkResolver(selection, dataID); + break; case CLIENT_EDGE_TO_CLIENT_OBJECT: this._checkResolver(selection.backingField, dataID); break; @@ -480,7 +472,10 @@ class DataChecker { } }); } - _checkResolver(resolver: NormalizationResolverField, dataID: DataID) { + _checkResolver( + resolver: NormalizationResolverField | NormalizationLiveResolverField, + dataID: DataID, + ) { if (resolver.fragment) { this._traverseSelections([resolver.fragment], dataID); } @@ -602,57 +597,6 @@ class DataChecker { } } - _checkFlightField(field: NormalizationFlightField, dataID: DataID): void { - const storageKey = getStorageKey(field, this._variables); - const linkedID = this._mutator.getLinkedRecordID(dataID, storageKey); - - if (linkedID == null) { - if (linkedID === undefined) { - this._handleMissing(); - return; - } - return; - } - - const tree = this._mutator.getValue( - linkedID, - RelayStoreReactFlightUtils.REACT_FLIGHT_TREE_STORAGE_KEY, - ); - const reachableExecutableDefinitions = this._mutator.getValue( - linkedID, - RelayStoreReactFlightUtils.REACT_FLIGHT_EXECUTABLE_DEFINITIONS_STORAGE_KEY, - ); - - if (tree == null || !Array.isArray(reachableExecutableDefinitions)) { - this._handleMissing(); - return; - } - - const operationLoader = this._operationLoader; - invariant( - operationLoader !== null, - 'DataChecker: Expected an operationLoader to be configured when using ' + - 'React Flight.', - ); - // In Flight, the variables that are in scope for reachable executable - // definitions aren't the same as what's in scope for the outer query. - const prevVariables = this._variables; - // $FlowFixMe[incompatible-cast] - for (const definition of (reachableExecutableDefinitions: Array)) { - this._variables = definition.variables; - const normalizationRootNode = operationLoader.get(definition.module); - if (normalizationRootNode != null) { - const operation = getOperation(normalizationRootNode); - this._traverseSelections(operation.selections, ROOT_ID); - } else { - // If the fragment is not available, we assume that the data cannot have - // been processed yet and must therefore be missing. - this._handleMissing(); - } - } - this._variables = prevVariables; - } - // For abstract types defined in the client schema extension, we won't be // getting `__is` hints from the server. To handle this, the // compiler attaches additional metadata on the normalization artifact, diff --git a/packages/relay-runtime/store/OperationExecutor.js b/packages/relay-runtime/store/OperationExecutor.js index 97444ede5a187..88424d3993130 100644 --- a/packages/relay-runtime/store/OperationExecutor.js +++ b/packages/relay-runtime/store/OperationExecutor.js @@ -16,7 +16,6 @@ import type { GraphQLResponse, GraphQLResponseWithData, GraphQLSingularResponse, - ReactFlightServerTree, } from '../network/RelayNetworkTypes'; import type {Sink, Subscription} from '../network/RelayObservable'; import type { @@ -27,16 +26,12 @@ import type { LogFunction, ModuleImportPayload, MutationParameters, - NormalizationSelector, OperationDescriptor, OperationLoader, OperationTracker, OptimisticResponseConfig, OptimisticUpdate, PublishQueue, - ReactFlightClientResponse, - ReactFlightPayloadDeserializer, - ReactFlightServerErrorHandler, Record, RelayResponsePayload, RequestDescriptor, @@ -54,7 +49,7 @@ import type { } from '../util/NormalizationNode'; import type {DataID, Disposable, Variables} from '../util/RelayRuntimeTypes'; import type {GetDataID} from './RelayResponseNormalizer'; -import type {NormalizationOptions} from './RelayResponseNormalizer'; +import type {NormalizeResponseFunction} from './RelayStoreTypes'; const RelayObservable = require('../network/RelayObservable'); const generateID = require('../util/generateID'); @@ -71,7 +66,6 @@ const { createReaderSelector, } = require('./RelayModernSelector'); const RelayRecordSource = require('./RelayRecordSource'); -const RelayResponseNormalizer = require('./RelayResponseNormalizer'); const {ROOT_TYPE, TYPENAME_KEY, getStorageKey} = require('./RelayStoreUtils'); const invariant = require('invariant'); const warning = require('warning'); @@ -81,14 +75,13 @@ export type ExecuteConfig = { +getDataID: GetDataID, +getPublishQueue: (actorIdentifier: ActorIdentifier) => PublishQueue, +getStore: (actorIdentifier: ActorIdentifier) => Store, + +normalizeResponse: NormalizeResponseFunction, +isClientPayload?: boolean, +operation: OperationDescriptor, +operationExecutions: Map, +operationLoader: ?OperationLoader, +operationTracker: OperationTracker, +optimisticConfig: ?OptimisticResponseConfig, - +reactFlightPayloadDeserializer?: ?ReactFlightPayloadDeserializer, - +reactFlightServerErrorHandler?: ?ReactFlightServerErrorHandler, +scheduler?: ?TaskScheduler, +shouldProcessClientComponents?: ?boolean, +sink: Sink, @@ -146,8 +139,6 @@ class Executor { _optimisticUpdates: null | Array>; _pendingModulePayloadsCount: number; +_getPublishQueue: (actorIdentifier: ActorIdentifier) => PublishQueue; - _reactFlightPayloadDeserializer: ?ReactFlightPayloadDeserializer; - _reactFlightServerErrorHandler: ?ReactFlightServerErrorHandler; _shouldProcessClientComponents: ?boolean; _scheduler: ?TaskScheduler; _sink: Sink; @@ -165,6 +156,7 @@ class Executor { +_isClientPayload: boolean; +_isSubscriptionOperation: boolean; +_seenActors: Set; + _normalizeResponse: NormalizeResponseFunction; constructor({ actorIdentifier, @@ -177,8 +169,6 @@ class Executor { operationLoader, operationTracker, optimisticConfig, - reactFlightPayloadDeserializer, - reactFlightServerErrorHandler, scheduler, shouldProcessClientComponents, sink, @@ -186,6 +176,7 @@ class Executor { treatMissingFieldsAsNull, updater, log, + normalizeResponse, }: ExecuteConfig): void { this._actorIdentifier = actorIdentifier; this._getDataID = getDataID; @@ -211,16 +202,28 @@ class Executor { this._subscriptions = new Map(); this._updater = updater; this._isClientPayload = isClientPayload === true; - this._reactFlightPayloadDeserializer = reactFlightPayloadDeserializer; - this._reactFlightServerErrorHandler = reactFlightServerErrorHandler; this._isSubscriptionOperation = this._operation.request.node.params.operationKind === 'subscription'; this._shouldProcessClientComponents = shouldProcessClientComponents; this._retainDisposables = new Map(); this._seenActors = new Set(); this._completeFns = []; + this._normalizeResponse = normalizeResponse; const id = this._nextSubscriptionId++; + + if ( + RelayFeatureFlags.PROCESS_OPTIMISTIC_UPDATE_BEFORE_SUBSCRIPTION && + optimisticConfig != null + ) { + this._processOptimisticResponse( + optimisticConfig.response != null + ? {data: optimisticConfig.response} + : null, + optimisticConfig.updater, + false, + ); + } source.subscribe({ complete: () => this._complete(id), error: error => this._error(error), @@ -243,7 +246,10 @@ class Executor { }, }); - if (optimisticConfig != null) { + if ( + !RelayFeatureFlags.PROCESS_OPTIMISTIC_UPDATE_BEFORE_SUBSCRIPTION && + optimisticConfig != null + ) { this._processOptimisticResponse( optimisticConfig.response != null ? {data: optimisticConfig.response} @@ -285,26 +291,6 @@ class Executor { this._disposeRetainedData(); } - _deserializeReactFlightPayloadWithLogging = ( - tree: ReactFlightServerTree, - ): ReactFlightClientResponse => { - const reactFlightPayloadDeserializer = this._reactFlightPayloadDeserializer; - invariant( - typeof reactFlightPayloadDeserializer === 'function', - 'OperationExecutor: Expected reactFlightPayloadDeserializer to be available when calling _deserializeReactFlightPayloadWithLogging.', - ); - const [duration, result] = withDuration(() => { - return reactFlightPayloadDeserializer(tree); - }); - this._log({ - name: 'execute.flight.payload_deserialize', - executeId: this._executeId, - operationName: this._operation.request.node.params.name, - duration, - }); - return result; - }; - _updateActiveState(): void { let activeState; switch (this._state) { @@ -607,7 +593,7 @@ class Executor { } const optimisticUpdates: Array> = []; if (response) { - const payload = normalizeResponse( + const payload = this._normalizeResponse( response, this._operation.root, ROOT_TYPE, @@ -615,11 +601,6 @@ class Executor { actorIdentifier: this._actorIdentifier, getDataID: this._getDataID, path: [], - reactFlightPayloadDeserializer: - this._reactFlightPayloadDeserializer != null - ? this._deserializeReactFlightPayloadWithLogging - : null, - reactFlightServerErrorHandler: this._reactFlightServerErrorHandler, shouldProcessClientComponents: this._shouldProcessClientComponents, treatMissingFieldsAsNull, }, @@ -651,7 +632,10 @@ class Executor { ); // OK: only called on construction and when receiving an optimistic payload from network, // which doesn't fall-through to the regular next() handling - this._runPublishQueue(); + const updatedOwners = this._runPublishQueue(); + if (RelayFeatureFlags.ENABLE_OPERATION_TRACKER_OPTIMISTIC_UPDATES) { + this._updateOperationTracker(updatedOwners); + } } _processOptimisticFollowups( @@ -718,7 +702,7 @@ class Executor { followupPayload.dataID, variables, ); - return normalizeResponse( + return this._normalizeResponse( {data: followupPayload.data}, selector, followupPayload.typeName, @@ -726,11 +710,6 @@ class Executor { actorIdentifier: this._actorIdentifier, getDataID: this._getDataID, path: followupPayload.path, - reactFlightPayloadDeserializer: - this._reactFlightPayloadDeserializer != null - ? this._deserializeReactFlightPayloadWithLogging - : null, - reactFlightServerErrorHandler: this._reactFlightServerErrorHandler, treatMissingFieldsAsNull: this._treatMissingFieldsAsNull, shouldProcessClientComponents: this._shouldProcessClientComponents, }, @@ -801,7 +780,7 @@ class Executor { this._incrementalResults.clear(); this._source.clear(); return responses.map(payloadPart => { - const relayPayload = normalizeResponse( + const relayPayload = this._normalizeResponse( payloadPart, this._operation.root, ROOT_TYPE, @@ -809,11 +788,6 @@ class Executor { actorIdentifier: this._actorIdentifier, getDataID: this._getDataID, path: [], - reactFlightPayloadDeserializer: - this._reactFlightPayloadDeserializer != null - ? this._deserializeReactFlightPayloadWithLogging - : null, - reactFlightServerErrorHandler: this._reactFlightServerErrorHandler, treatMissingFieldsAsNull: this._treatMissingFieldsAsNull, shouldProcessClientComponents: this._shouldProcessClientComponents, }, @@ -1259,7 +1233,7 @@ class Executor { const prevActorIdentifier = this._actorIdentifier; this._actorIdentifier = placeholder.actorIdentifier ?? this._actorIdentifier; - const relayPayload = normalizeResponse( + const relayPayload = this._normalizeResponse( response, placeholder.selector, placeholder.typeName, @@ -1267,11 +1241,6 @@ class Executor { actorIdentifier: this._actorIdentifier, getDataID: this._getDataID, path: placeholder.path, - reactFlightPayloadDeserializer: - this._reactFlightPayloadDeserializer != null - ? this._deserializeReactFlightPayloadWithLogging - : null, - reactFlightServerErrorHandler: this._reactFlightServerErrorHandler, treatMissingFieldsAsNull: this._treatMissingFieldsAsNull, shouldProcessClientComponents: this._shouldProcessClientComponents, }, @@ -1495,15 +1464,10 @@ class Executor { record: nextParentRecord, fieldPayloads, }); - const relayPayload = normalizeResponse(response, selector, typeName, { + const relayPayload = this._normalizeResponse(response, selector, typeName, { actorIdentifier: this._actorIdentifier, getDataID: this._getDataID, path: [...normalizationPath, responseKey, String(itemIndex)], - reactFlightPayloadDeserializer: - this._reactFlightPayloadDeserializer != null - ? this._deserializeReactFlightPayloadWithLogging - : null, - reactFlightServerErrorHandler: this._reactFlightServerErrorHandler, treatMissingFieldsAsNull: this._treatMissingFieldsAsNull, shouldProcessClientComponents: this._shouldProcessClientComponents, }); @@ -1636,29 +1600,6 @@ function partitionGraphQLResponses( return [nonIncrementalResponses, incrementalResponses]; } -function normalizeResponse( - response: GraphQLResponseWithData, - selector: NormalizationSelector, - typeName: string, - options: NormalizationOptions, -): RelayResponsePayload { - const {data, errors} = response; - const source = RelayRecordSource.create(); - const record = RelayModernRecord.create(selector.dataID, typeName); - source.set(selector.dataID, record); - const relayPayload = RelayResponseNormalizer.normalize( - source, - selector, - data, - options, - ); - return { - ...relayPayload, - errors, - isFinal: response.extensions?.is_final === true, - }; -} - function stableStringify(value: mixed): string { return JSON.stringify(stableCopy(value)) ?? ''; // null-check for flow } diff --git a/packages/relay-runtime/store/RelayConcreteVariables.js b/packages/relay-runtime/store/RelayConcreteVariables.js index 61fcd0b2bd0b1..f144baaa80414 100644 --- a/packages/relay-runtime/store/RelayConcreteVariables.js +++ b/packages/relay-runtime/store/RelayConcreteVariables.js @@ -124,6 +124,7 @@ function getLocalVariables( const nextVariables = {...currentVariables}; const nextArgs = args ? getArgumentValues(args, currentVariables) : {}; argumentDefinitions.forEach(def => { + // $FlowFixMe[invalid-computed-prop] const value = nextArgs[def.name] ?? def.defaultValue; nextVariables[def.name] = value; }); diff --git a/packages/relay-runtime/store/RelayErrorTrie.js b/packages/relay-runtime/store/RelayErrorTrie.js new file mode 100644 index 0000000000000..2db29ca2941ef --- /dev/null +++ b/packages/relay-runtime/store/RelayErrorTrie.js @@ -0,0 +1,189 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow + * @format + * @oncall relay + */ + +'use strict'; + +import type {PayloadError} from '../network/RelayNetworkTypes'; + +const RelayFeatureFlags = require('../util/RelayFeatureFlags'); + +// $FlowFixMe[recursive-definition] +const SELF: Self = Symbol('$SELF'); + +class RelayFieldError extends Error { + constructor(message: string, errors: Array = []) { + super(message); + this.name = 'RelayFieldError'; + this.message = message; + this.errors = errors; + } + errors: Array; +} + +export opaque type Self = typeof SELF; + +export type TRelayFieldError = $ReadOnly<{ + message: string, + path?: $ReadOnlyArray, + severity?: 'CRITICAL' | 'ERROR' | 'WARNING', +}>; + +/** + * This is a highly-specialized data structure that is designed + * to store the field errors of a GraphQL response in such a way + * that they can be performantly retrieved during normalization. + * + * In particular, the trie can be constructed in O(N) time, where + * N is the number of errors, so long as the depth of the GraphQL + * response data, and therefore the expected length of any error + * paths, is relatively small and constant. + * + * As we recursively traverse the data in the GraphQL response + * during normalization, we can get the sub trie for any field + * in O(1) time. + */ +export opaque type RelayErrorTrie = Map< + string | number | Self, + RelayErrorTrie | Array>, +>; + +function buildErrorTrie( + errors: ?$ReadOnlyArray, +): RelayErrorTrie | null { + if (errors == null) { + return null; + } + if (!RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING) { + return null; + } + const trie: $NonMaybeType = new Map(); + // eslint-disable-next-line no-unused-vars + ERRORS: for (const {path, locations: _, ...error} of errors) { + if (path == null) { + continue; + } + const {length} = path; + if (length === 0) { + continue; + } + const lastIndex = length - 1; + let currentTrie = trie; + for (let index = 0; index < lastIndex; index++) { + const key = path[index]; + const existingValue = currentTrie.get(key); + if (existingValue instanceof Map) { + currentTrie = existingValue; + continue; + } + const newValue: RelayErrorTrie = new Map(); + if (Array.isArray(existingValue)) { + newValue.set(SELF, existingValue); + } + currentTrie.set(key, newValue); + currentTrie = newValue; + } + let lastKey: string | number | symbol = path[lastIndex]; + let container = currentTrie.get(lastKey); + if (container instanceof Map) { + currentTrie = container; + container = currentTrie.get(lastKey); + lastKey = SELF; + } + if (Array.isArray(container)) { + container.push(error); + } else { + currentTrie.set(lastKey, [error]); + } + } + return trie; +} + +function getErrorsByKey( + trie: RelayErrorTrie, + key: string | number, +): $ReadOnlyArray | null { + const value = trie.get(key); + if (value == null) { + return null; + } + if (Array.isArray(value)) { + return value; + } + const errors: Array< + $ReadOnly<{ + message: string, + path?: Array, + severity?: 'CRITICAL' | 'ERROR' | 'WARNING', + }>, + > = []; + recursivelyCopyErrorsIntoArray(value, errors); + return errors; +} + +function recursivelyCopyErrorsIntoArray( + trieOrSet: RelayErrorTrie, + errors: Array< + $ReadOnly<{ + message: string, + path?: Array, + severity?: 'CRITICAL' | 'ERROR' | 'WARNING', + }>, + >, +): void { + for (const [childKey, value] of trieOrSet) { + const oldLength = errors.length; + if (Array.isArray(value)) { + errors.push(...value); + } else { + recursivelyCopyErrorsIntoArray(value, errors); + } + if (childKey === SELF) { + continue; + } + const newLength = errors.length; + for (let index = oldLength; index < newLength; index++) { + const error = errors[index]; + if (error.path == null) { + errors[index] = { + ...error, + path: [childKey], + }; + } else { + error.path.unshift(childKey); + } + } + } +} + +function getNestedErrorTrieByKey( + trie: RelayErrorTrie, + key: string | number, +): RelayErrorTrie | null { + const value = trie.get(key); + if (value instanceof Map) { + return value; + } + return null; +} + +module.exports = ({ + SELF, + buildErrorTrie, + getNestedErrorTrieByKey, + getErrorsByKey, + RelayFieldError, +}: { + SELF: typeof SELF, + buildErrorTrie: typeof buildErrorTrie, + getNestedErrorTrieByKey: typeof getNestedErrorTrieByKey, + getErrorsByKey: typeof getErrorsByKey, + RelayFieldError: Class, +}); diff --git a/packages/relay-runtime/store/RelayExperimentalGraphResponseHandler.js b/packages/relay-runtime/store/RelayExperimentalGraphResponseHandler.js index b063282626d77..b5bf3356308df 100644 --- a/packages/relay-runtime/store/RelayExperimentalGraphResponseHandler.js +++ b/packages/relay-runtime/store/RelayExperimentalGraphResponseHandler.js @@ -93,12 +93,12 @@ class GraphModeHandler { } else { if (value.hasOwnProperty('__id')) { // Singular - const streamID = ((value.__id: any): number); + const streamID: number = (value.__id: any); const id = this._lookupCacheKey(streamID); RelayModernRecord.setLinkedRecordID(parentRecord, key, id); } else if (value.hasOwnProperty('__ids')) { // Plural - const streamIDs = ((value.__ids: any): Array); + const streamIDs: Array = (value.__ids: any); const ids = streamIDs.map((sID): ?DataID => { return sID == null ? null : this._lookupCacheKey(sID); }); diff --git a/packages/relay-runtime/store/RelayExperimentalGraphResponseTransform.js b/packages/relay-runtime/store/RelayExperimentalGraphResponseTransform.js index 71e7a58b3669a..4a95d12c23b0e 100644 --- a/packages/relay-runtime/store/RelayExperimentalGraphResponseTransform.js +++ b/packages/relay-runtime/store/RelayExperimentalGraphResponseTransform.js @@ -150,7 +150,6 @@ export function normalizeResponseWithMetadata( export class GraphModeNormalizer { _cacheKeyToStreamID: Map; _sentFields: Map>; - _getDataId: GetDataID; _nextStreamID: number; _getDataID: GetDataID; _variables: Variables; @@ -268,7 +267,7 @@ export class GraphModeNormalizer { switch (selection.kind) { case LINKED_FIELD: { const responseKey = selection.alias ?? selection.name; - const fieldData = ((data[responseKey]: any): PayloadData); + const fieldData: PayloadData = (data[responseKey]: any); const storageKey = this._getStorageKey(selection); @@ -306,7 +305,7 @@ export class GraphModeNormalizer { this.duplicateFieldsAvoided++; break; } - const fieldData = ((data[responseKey]: any): ChunkField); + const fieldData: ChunkField = (data[responseKey]: any); parentFields[storageKey] = fieldData; sentFields.add(storageKey); diff --git a/packages/relay-runtime/store/RelayModernEnvironment.js b/packages/relay-runtime/store/RelayModernEnvironment.js index 77a1506183c22..2a9a708bd1711 100644 --- a/packages/relay-runtime/store/RelayModernEnvironment.js +++ b/packages/relay-runtime/store/RelayModernEnvironment.js @@ -28,6 +28,7 @@ import type { LogFunction, MissingFieldHandler, MutationParameters, + NormalizeResponseFunction, OperationAvailability, OperationDescriptor, OperationLoader, @@ -35,9 +36,7 @@ import type { OptimisticResponseConfig, OptimisticUpdateFunction, PublishQueue, - ReactFlightPayloadDeserializer, - ReactFlightServerErrorHandler, - RequiredFieldLogger, + RelayFieldLogger, SelectorStoreUpdater, SingularReaderSelector, Snapshot, @@ -55,9 +54,9 @@ const RelayObservable = require('../network/RelayObservable'); const wrapNetworkWithLogObserver = require('../network/wrapNetworkWithLogObserver'); const RelayOperationTracker = require('../store/RelayOperationTracker'); const registerEnvironmentWithDevTools = require('../util/registerEnvironmentWithDevTools'); -const RelayFeatureFlags = require('../util/RelayFeatureFlags'); const defaultGetDataID = require('./defaultGetDataID'); -const defaultRequiredFieldLogger = require('./defaultRequiredFieldLogger'); +const defaultRelayFieldLogger = require('./defaultRelayFieldLogger'); +const normalizeResponse = require('./normalizeResponse'); const OperationExecutor = require('./OperationExecutor'); const RelayPublishQueue = require('./RelayPublishQueue'); const RelayRecordSource = require('./RelayRecordSource'); @@ -69,9 +68,8 @@ export type EnvironmentConfig = { +treatMissingFieldsAsNull?: boolean, +log?: ?LogFunction, +operationLoader?: ?OperationLoader, - +reactFlightPayloadDeserializer?: ?ReactFlightPayloadDeserializer, - +reactFlightServerErrorHandler?: ?ReactFlightServerErrorHandler, +network: INetwork, + +normalizeResponse?: ?NormalizeResponseFunction, +scheduler?: ?TaskScheduler, +store: Store, +missingFieldHandlers?: ?$ReadOnlyArray, @@ -80,7 +78,7 @@ export type EnvironmentConfig = { +UNSTABLE_defaultRenderPolicy?: ?RenderPolicy, +options?: mixed, +isServer?: boolean, - +requiredFieldLogger?: ?RequiredFieldLogger, + +relayFieldLogger?: ?RelayFieldLogger, +shouldProcessClientComponents?: ?boolean, }; @@ -88,8 +86,6 @@ class RelayModernEnvironment implements IEnvironment { __log: LogFunction; +_defaultRenderPolicy: RenderPolicy; _operationLoader: ?OperationLoader; - _reactFlightPayloadDeserializer: ?ReactFlightPayloadDeserializer; - _reactFlightServerErrorHandler: ?ReactFlightServerErrorHandler; _shouldProcessClientComponents: ?boolean; _network: INetwork; _publishQueue: PublishQueue; @@ -103,15 +99,13 @@ class RelayModernEnvironment implements IEnvironment { _operationExecutions: Map; +options: mixed; +_isServer: boolean; - requiredFieldLogger: RequiredFieldLogger; + relayFieldLogger: RelayFieldLogger; + _normalizeResponse: NormalizeResponseFunction; constructor(config: EnvironmentConfig) { this.configName = config.configName; this._treatMissingFieldsAsNull = config.treatMissingFieldsAsNull === true; const operationLoader = config.operationLoader; - const reactFlightPayloadDeserializer = - config.reactFlightPayloadDeserializer; - const reactFlightServerErrorHandler = config.reactFlightServerErrorHandler; if (__DEV__) { if (operationLoader != null) { invariant( @@ -123,23 +117,11 @@ class RelayModernEnvironment implements IEnvironment { operationLoader, ); } - if (reactFlightPayloadDeserializer != null) { - invariant( - typeof reactFlightPayloadDeserializer === 'function', - 'RelayModernEnvironment: Expected `reactFlightPayloadDeserializer` ' + - ' to be a function, got `%s`.', - reactFlightPayloadDeserializer, - ); - } } this.__log = config.log ?? emptyFunction; - this.requiredFieldLogger = - config.requiredFieldLogger ?? defaultRequiredFieldLogger; + this.relayFieldLogger = config.relayFieldLogger ?? defaultRelayFieldLogger; this._defaultRenderPolicy = - config.UNSTABLE_defaultRenderPolicy ?? - RelayFeatureFlags.ENABLE_PARTIAL_RENDERING_DEFAULT === true - ? 'partial' - : 'full'; + config.UNSTABLE_defaultRenderPolicy ?? 'partial'; this._operationLoader = operationLoader; this._operationExecutions = new Map(); this._network = wrapNetworkWithLogObserver(this, config.network); @@ -155,6 +137,7 @@ class RelayModernEnvironment implements IEnvironment { this._store = config.store; this.options = config.options; this._isServer = config.isServer ?? false; + this._normalizeResponse = config.normalizeResponse ?? normalizeResponse; (this: any).__setNet = newNet => (this._network = wrapNetworkWithLogObserver(this, newNet)); @@ -166,8 +149,6 @@ class RelayModernEnvironment implements IEnvironment { this._operationTracker = config.operationTracker ?? new RelayOperationTracker(); - this._reactFlightPayloadDeserializer = reactFlightPayloadDeserializer; - this._reactFlightServerErrorHandler = reactFlightServerErrorHandler; this._shouldProcessClientComponents = config.shouldProcessClientComponents; // Register this Relay Environment with Relay DevTools if it exists. @@ -492,8 +473,6 @@ class RelayModernEnvironment implements IEnvironment { assertInternalActorIdentifier(actorIdentifier); return publishQueue; }, - reactFlightPayloadDeserializer: this._reactFlightPayloadDeserializer, - reactFlightServerErrorHandler: this._reactFlightServerErrorHandler, scheduler: this._scheduler, shouldProcessClientComponents: this._shouldProcessClientComponents, sink, @@ -506,6 +485,7 @@ class RelayModernEnvironment implements IEnvironment { }, treatMissingFieldsAsNull: this._treatMissingFieldsAsNull, updater, + normalizeResponse: this._normalizeResponse, }); return () => executor.cancel(); }); diff --git a/packages/relay-runtime/store/RelayModernFragmentSpecResolver.js b/packages/relay-runtime/store/RelayModernFragmentSpecResolver.js index ed99713b451ca..ee402549e9003 100644 --- a/packages/relay-runtime/store/RelayModernFragmentSpecResolver.js +++ b/packages/relay-runtime/store/RelayModernFragmentSpecResolver.js @@ -14,6 +14,7 @@ import type {ConcreteRequest} from '../util/RelayConcreteNode'; import type {Disposable, Variables} from '../util/RelayRuntimeTypes'; import type { + ErrorResponseFields, FragmentMap, FragmentSpecResolver, FragmentSpecResults, @@ -228,6 +229,7 @@ class SelectorResolver { _environment: IEnvironment; _isMissingData: boolean; _missingRequiredFields: ?MissingRequiredFields; + _errorResponseFields: ?ErrorResponseFields; _relayResolverErrors: RelayResolverErrors; _rootIsQueryRenderer: boolean; _selector: SingularReaderSelector; @@ -245,6 +247,7 @@ class SelectorResolver { this._data = snapshot.data; this._isMissingData = snapshot.isMissingData; this._missingRequiredFields = snapshot.missingRequiredFields; + this._errorResponseFields = snapshot.errorResponseFields; this._relayResolverErrors = snapshot.relayResolverErrors; this._environment = environment; this._rootIsQueryRenderer = rootIsQueryRenderer; @@ -330,6 +333,8 @@ class SelectorResolver { this._environment, this._missingRequiredFields, this._relayResolverErrors, + this._errorResponseFields, + this._selector.node.metadata?.throwOnFieldError ?? false, ); return this._data; } @@ -346,6 +351,7 @@ class SelectorResolver { this._data = recycleNodesInto(this._data, snapshot.data); this._isMissingData = snapshot.isMissingData; this._missingRequiredFields = snapshot.missingRequiredFields; + this._errorResponseFields = snapshot.errorResponseFields; this._relayResolverErrors = snapshot.relayResolverErrors; this._selector = selector; this._subscription = this._environment.subscribe(snapshot, this._onChange); @@ -383,6 +389,7 @@ class SelectorResolver { this._data = snapshot.data; this._isMissingData = snapshot.isMissingData; this._missingRequiredFields = snapshot.missingRequiredFields; + this._errorResponseFields = snapshot.errorResponseFields; this._relayResolverErrors = snapshot.relayResolverErrors; this._callback(); }; diff --git a/packages/relay-runtime/store/RelayModernRecord.js b/packages/relay-runtime/store/RelayModernRecord.js index 32dd4f44e2cdc..6bf9f073ff0cc 100644 --- a/packages/relay-runtime/store/RelayModernRecord.js +++ b/packages/relay-runtime/store/RelayModernRecord.js @@ -13,7 +13,7 @@ import type {ActorIdentifier} from '../multi-actor-environment/ActorIdentifier'; import type {DataID} from '../util/RelayRuntimeTypes'; -import type {Record} from './RelayStoreTypes'; +import type {TRelayFieldError} from './RelayErrorTrie'; const deepFreeze = require('../util/deepFreeze'); const {generateClientObjectClientID, isClientID} = require('./ClientID'); @@ -22,6 +22,7 @@ const { } = require('./experimental-live-resolvers/LiveResolverSuspenseSentinel'); const { ACTOR_IDENTIFIER_KEY, + ERRORS_KEY, ID_KEY, INVALIDATED_AT_KEY, REF_KEY, @@ -34,6 +35,25 @@ const areEqual = require('areEqual'); const invariant = require('invariant'); const warning = require('warning'); +type StorageKey = Exclude; + +type RelayFieldErrors = {[StorageKey]: $ReadOnlyArray}; + +export type RecordJSON = { + /** + * We cannot replace __errors with typeof ERRORS_KEY because Flow does + * not support types with multiple indexers. + */ + __errors?: RelayFieldErrors, + [StorageKey]: mixed, + ... +}; + +/* + * An individual cached graph object. + */ +export opaque type Record = RecordJSON; + /** * @public * @@ -122,6 +142,17 @@ function create(dataID: DataID, typeName: string): Record { return record; } +/** + * @public + * + * Convert the JSON representation of a record into a record. + */ +function fromObject( + json: RecordJSON | TMaybe, +): Record | TMaybe { + return json; +} + /** * @public * @@ -131,6 +162,18 @@ function getDataID(record: Record): DataID { return (record[ID_KEY]: any); } +/** + * @public + * + * Get the fields of a record. + */ +function getFields(record: Record): Array { + if (ERRORS_KEY in record) { + return Object.keys(record).filter(field => field !== ERRORS_KEY); + } + return Object.keys(record); +} + /** * @public * @@ -140,12 +183,24 @@ function getType(record: Record): string { return (record[TYPENAME_KEY]: any); } +/** + * @public + * + * Get the errors associated with particular field. + */ +function getErrors( + record: Record, + storageKey: StorageKey, +): $ReadOnlyArray | void { + return record[ERRORS_KEY]?.[storageKey]; +} + /** * @public * * Get a scalar (non-link) field value. */ -function getValue(record: Record, storageKey: string): mixed { +function getValue(record: Record, storageKey: StorageKey): mixed { const value = record[storageKey]; if (value && typeof value === 'object') { invariant( @@ -162,13 +217,22 @@ function getValue(record: Record, storageKey: string): mixed { return value; } +/** + * @public + * + * Check if a record has a value for the given field. + */ +function hasValue(record: Record, storageKey: StorageKey): boolean { + return storageKey in record; +} + /** * @public * * Get the value of a field as a reference to another record. Throws if the * field has a different type. */ -function getLinkedRecordID(record: Record, storageKey: string): ?DataID { +function getLinkedRecordID(record: Record, storageKey: StorageKey): ?DataID { const maybeLink = record[storageKey]; if (maybeLink == null) { return maybeLink; @@ -192,6 +256,20 @@ function getLinkedRecordID(record: Record, storageKey: string): ?DataID { return link[REF_KEY]; } +/** + * @public + * + * Checks if a field has a reference to another record. + */ +function hasLinkedRecordID(record: Record, storageKey: StorageKey): boolean { + const maybeLink = record[storageKey]; + if (maybeLink == null) { + return false; + } + const link = maybeLink; + return typeof link === 'object' && link && typeof link[REF_KEY] === 'string'; +} + /** * @public * @@ -200,7 +278,7 @@ function getLinkedRecordID(record: Record, storageKey: string): ?DataID { */ function getLinkedRecordIDs( record: Record, - storageKey: string, + storageKey: StorageKey, ): ?Array { const links = record[storageKey]; if (links == null) { @@ -222,6 +300,23 @@ function getLinkedRecordIDs( return (links[REFS_KEY]: any); } +/** + * @public + * + * Checks if a field have references to other records. + */ +function hasLinkedRecordIDs(record: Record, storageKey: StorageKey): boolean { + const links = record[storageKey]; + if (links == null) { + return false; + } + return ( + typeof links === 'object' && + Array.isArray(links[REFS_KEY]) && + links[REFS_KEY].every(link => typeof link === 'string') + ); +} + /** * @public * @@ -274,16 +369,43 @@ function update(prevRecord: Record, nextRecord: Record): Record { nextType, ); } + const prevErrorsByKey = prevRecord[ERRORS_KEY]; + const nextErrorsByKey = nextRecord[ERRORS_KEY]; let updated: Record | null = null; - const keys = Object.keys(nextRecord); - for (let ii = 0; ii < keys.length; ii++) { - const key = keys[ii]; - if (updated || !areEqual(prevRecord[key], nextRecord[key])) { - updated = updated !== null ? updated : {...prevRecord}; - updated[key] = nextRecord[key]; + if (prevErrorsByKey == null && nextErrorsByKey == null) { + for (const storageKey in nextRecord) { + if ( + updated || + !areEqual(prevRecord[storageKey], nextRecord[storageKey]) + ) { + updated = updated !== null ? updated : {...prevRecord}; + updated[storageKey] = nextRecord[storageKey]; + } + } + return updated ?? prevRecord; + } + for (const storageKey in nextRecord) { + if (storageKey === ERRORS_KEY) { + continue; } + const nextValue = nextRecord[storageKey]; + const nextErrors = nextErrorsByKey?.[storageKey]; + if (updated == null) { + const prevValue = prevRecord[storageKey]; + const prevErrors = prevErrorsByKey?.[storageKey]; + if (areEqual(prevValue, nextValue) && areEqual(prevErrors, nextErrors)) { + continue; + } + updated = {...prevRecord}; + if (prevErrorsByKey != null) { + // Make a copy of prevErrorsByKey so that our changes don't affect prevRecord + updated[ERRORS_KEY] = {...prevErrorsByKey}; + } + } + setValue(updated, storageKey, nextValue); + setErrors(updated, storageKey, nextErrors); } - return updated !== null ? updated : prevRecord; + return updated ?? prevRecord; } /** @@ -318,7 +440,34 @@ function merge(record1: Record, record2: Record): Record { nextType, ); } - return {...record1, ...record2}; + if (ERRORS_KEY in record1 || ERRORS_KEY in record2) { + const {[ERRORS_KEY]: errors1, ...fields1} = record1; + const {[ERRORS_KEY]: errors2, ...fields2} = record2; + // $FlowIssue[cannot-spread-indexer] + const updated: Record = {...fields1, ...fields2}; + if (errors1 == null && errors2 == null) { + return updated; + } + const updatedErrors: RelayFieldErrors = {}; + for (const storageKey in errors1) { + if (fields2.hasOwnProperty(storageKey)) { + continue; + } + updatedErrors[storageKey] = errors1[storageKey]; + } + for (const storageKey in errors2) { + updatedErrors[storageKey] = errors2[storageKey]; + } + for (const _storageKey in updatedErrors) { + // We only need to add updatedErrors to updated if there was one or more error + updated[ERRORS_KEY] = updatedErrors; + break; + } + return updated; + } else { + // $FlowIssue[cannot-spread-indexer] + return {...record1, ...record2}; + } } /** @@ -331,12 +480,50 @@ function freeze(record: Record): void { deepFreeze(record); } +/** + * @public + * + * Set the errors associated with a particular field. + */ +function setErrors( + record: Record, + storageKey: StorageKey, + errors?: $ReadOnlyArray, +): void { + if (__DEV__) { + warning( + storageKey in record, + 'RelayModernRecord: Invalid error update, `%s` should not be undefined.', + storageKey, + ); + } + const errorsByStorageKey = record[ERRORS_KEY]; + if (errors != null && errors.length > 0) { + if (errorsByStorageKey == null) { + record[ERRORS_KEY] = {[storageKey]: errors}; + } else { + errorsByStorageKey[storageKey] = errors; + } + } else if (errorsByStorageKey != null) { + if (delete errorsByStorageKey[storageKey]) { + for (const otherStorageKey in errorsByStorageKey) { + if (errorsByStorageKey.hasOwnProperty(otherStorageKey)) { + // That wasn't the last error, so we shouldn't remove the error map + return; + } + } + // That was the last error, so we should remove the error map + delete record[ERRORS_KEY]; + } + } +} + /** * @public * * Set the value of a storageKey to a scalar. */ -function setValue(record: Record, storageKey: string, value: mixed): void { +function setValue(record: Record, storageKey: StorageKey, value: mixed): void { if (__DEV__) { const prevID = getDataID(record); if (storageKey === ID_KEY) { @@ -375,7 +562,7 @@ function setValue(record: Record, storageKey: string, value: mixed): void { */ function setLinkedRecordID( record: Record, - storageKey: string, + storageKey: StorageKey, linkedID: DataID, ): void { // See perf note above for why we aren't using computed property access. @@ -391,7 +578,7 @@ function setLinkedRecordID( */ function setLinkedRecordIDs( record: Record, - storageKey: string, + storageKey: StorageKey, linkedIDs: Array, ): void { // See perf note above for why we aren't using computed property access. @@ -407,7 +594,7 @@ function setLinkedRecordIDs( */ function setActorLinkedRecordID( record: Record, - storageKey: string, + storageKey: StorageKey, actorIdentifier: ActorIdentifier, linkedID: DataID, ): void { @@ -425,7 +612,7 @@ function setActorLinkedRecordID( */ function getActorLinkedRecordID( record: Record, - storageKey: string, + storageKey: StorageKey, ): ?[ActorIdentifier, DataID] { const link = record[storageKey]; if (link == null) { @@ -495,18 +682,36 @@ function getResolverLinkedRecordIDs( }); } +/** + * @public + * + * Convert a record to JSON. + */ +function toJSON( + record: Record | TMaybe, +): RecordJSON | TMaybe { + return record; +} + module.exports = { clone, copyFields, create, freeze, + fromObject, getDataID, + getErrors, + getFields, getInvalidationEpoch, getLinkedRecordID, getLinkedRecordIDs, getType, getValue, + hasValue, + hasLinkedRecordID, + hasLinkedRecordIDs, merge, + setErrors, setValue, setLinkedRecordID, setLinkedRecordIDs, @@ -515,4 +720,5 @@ module.exports = { setActorLinkedRecordID, getResolverLinkedRecordID, getResolverLinkedRecordIDs, + toJSON, }; diff --git a/packages/relay-runtime/store/RelayModernSelector.js b/packages/relay-runtime/store/RelayModernSelector.js index 17451b973abc1..99a2211ed0bad 100644 --- a/packages/relay-runtime/store/RelayModernSelector.js +++ b/packages/relay-runtime/store/RelayModernSelector.js @@ -27,9 +27,9 @@ const {getFragmentVariables} = require('./RelayConcreteVariables'); const { CLIENT_EDGE_TRAVERSAL_PATH, FRAGMENT_OWNER_KEY, + FRAGMENT_POINTER_IS_WITHIN_UNMATCHED_TYPE_REFINEMENT, FRAGMENTS_KEY, ID_KEY, - IS_WITHIN_UNMATCHED_TYPE_REFINEMENT, } = require('./RelayStoreUtils'); const areEqual = require('areEqual'); const invariant = require('invariant'); @@ -78,8 +78,6 @@ function getSingularSelector( const dataID = item[ID_KEY]; const fragments = item[FRAGMENTS_KEY]; const mixedOwner = item[FRAGMENT_OWNER_KEY]; - const isWithinUnmatchedTypeRefinement = - item[IS_WITHIN_UNMATCHED_TYPE_REFINEMENT] === true; const mixedClientEdgeTraversalPath = item[CLIENT_EDGE_TRAVERSAL_PATH]; if ( typeof dataID === 'string' && @@ -103,6 +101,11 @@ function getSingularSelector( argumentVariables, ); + const isWithinUnmatchedTypeRefinement = + argumentVariables[ + FRAGMENT_POINTER_IS_WITHIN_UNMATCHED_TYPE_REFINEMENT + ] === true; + return createReaderSelector( fragment, dataID, @@ -406,13 +409,70 @@ function areEqualSingularSelectors( thatSelector: SingularReaderSelector, ): boolean { return ( - thisSelector.owner === thatSelector.owner && thisSelector.dataID === thatSelector.dataID && thisSelector.node === thatSelector.node && - areEqual(thisSelector.variables, thatSelector.variables) + areEqual(thisSelector.variables, thatSelector.variables) && + areEqualOwners(thisSelector.owner, thatSelector.owner) && + thisSelector.isWithinUnmatchedTypeRefinement === + thatSelector.isWithinUnmatchedTypeRefinement && + areEqualClientEdgeTraversalPaths( + thisSelector.clientEdgeTraversalPath, + thatSelector.clientEdgeTraversalPath, + ) ); } +function areEqualOwners( + thisOwner: RequestDescriptor, + thatOwner: RequestDescriptor, +): boolean { + if (thisOwner === thatOwner) { + return true; + } else { + return ( + // The `identifier` should already include serilized variables, so we + // don't need to compare them here. + // And the RequestDescriptor `node` should have the same reference + // as it should come from the generated artifact. + thisOwner.identifier === thatOwner.identifier && + areEqual(thisOwner.cacheConfig, thatOwner.cacheConfig) + ); + } +} + +function areEqualClientEdgeTraversalPaths( + thisPath: ClientEdgeTraversalPath | null, + thatPath: ClientEdgeTraversalPath | null, +): boolean { + if (thisPath === thatPath) { + return true; + } + if ( + thisPath == null || + thatPath == null || + thisPath.length !== thatPath.length + ) { + return false; + } + let idx = thisPath.length; + while (idx--) { + const a = thisPath[idx]; + const b = thatPath[idx]; + if (a === b) { + continue; + } + if ( + a == null || + b == null || + a.clientEdgeDestinationID !== b.clientEdgeDestinationID || + a.readerClientEdge !== b.readerClientEdge + ) { + return false; + } + } + return true; +} + /** * @public * diff --git a/packages/relay-runtime/store/RelayModernStore.js b/packages/relay-runtime/store/RelayModernStore.js index de67c2934eb16..dde64ef45e17b 100644 --- a/packages/relay-runtime/store/RelayModernStore.js +++ b/packages/relay-runtime/store/RelayModernStore.js @@ -45,7 +45,6 @@ const RelayModernRecord = require('./RelayModernRecord'); const RelayOptimisticRecordSource = require('./RelayOptimisticRecordSource'); const RelayReader = require('./RelayReader'); const RelayReferenceMarker = require('./RelayReferenceMarker'); -const RelayStoreReactFlightUtils = require('./RelayStoreReactFlightUtils'); const RelayStoreSubscriptions = require('./RelayStoreSubscriptions'); const RelayStoreUtils = require('./RelayStoreUtils'); const {ROOT_ID, ROOT_TYPE} = require('./RelayStoreUtils'); @@ -729,15 +728,7 @@ function updateTargetFromSource( } } if (sourceRecord && targetRecord) { - // ReactFlightClientResponses are lazy and only materialize when readRoot - // is called when we read the field, so if the record is a Flight field - // we always use the new record's data regardless of whether - // it actually changed. Let React take care of reconciliation instead. - const nextRecord = - RelayModernRecord.getType(targetRecord) === - RelayStoreReactFlightUtils.REACT_FLIGHT_TYPE_NAME - ? sourceRecord - : RelayModernRecord.update(targetRecord, sourceRecord); + const nextRecord = RelayModernRecord.update(targetRecord, sourceRecord); if (nextRecord !== targetRecord) { // Prevent mutation of a record from outside the store. if (__DEV__) { diff --git a/packages/relay-runtime/store/RelayOptimisticRecordSource.js b/packages/relay-runtime/store/RelayOptimisticRecordSource.js index 3739b95aa9dd7..232d0240c4b4f 100644 --- a/packages/relay-runtime/store/RelayOptimisticRecordSource.js +++ b/packages/relay-runtime/store/RelayOptimisticRecordSource.js @@ -12,6 +12,7 @@ 'use strict'; import type {DataID} from '../util/RelayRuntimeTypes'; +import type {RecordJSON} from './RelayModernRecord'; import type {RecordState} from './RelayRecordState'; import type { MutableRecordSource, @@ -19,12 +20,15 @@ import type { RecordSource, } from './RelayStoreTypes'; +const RelayModernRecord = require('./RelayModernRecord'); const RelayRecordSource = require('./RelayRecordSource'); const invariant = require('invariant'); -const UNPUBLISH_RECORD_SENTINEL = Object.freeze({ - __UNPUBLISH_RECORD_SENTINEL: true, -}); +const UNPUBLISH_RECORD_SENTINEL = RelayModernRecord.fromObject( + Object.freeze({ + __UNPUBLISH_RECORD_SENTINEL: true, + }), +); /** * An implementation of MutableRecordSource that represents a base RecordSource @@ -98,14 +102,14 @@ class RelayOptimisticRecordSource implements MutableRecordSource { return Object.keys(this.toJSON()).length; } - toJSON(): {[DataID]: ?Record} { + toJSON(): {[DataID]: ?RecordJSON} { const merged = {...this._base.toJSON()}; this._sink.getRecordIDs().forEach(dataID => { const record = this.get(dataID); if (record === undefined) { delete merged[dataID]; } else { - merged[dataID] = record; + merged[dataID] = RelayModernRecord.toJSON(record); } }); return merged; diff --git a/packages/relay-runtime/store/RelayPublishQueue.js b/packages/relay-runtime/store/RelayPublishQueue.js index 68ed9d65e4519..b842ea09d1ee8 100644 --- a/packages/relay-runtime/store/RelayPublishQueue.js +++ b/packages/relay-runtime/store/RelayPublishQueue.js @@ -61,8 +61,8 @@ const _global: typeof global | $FlowFixMe = typeof global !== 'undefined' ? global : typeof window !== 'undefined' - ? window - : undefined; + ? window + : undefined; const applyWithGuard = _global?.ErrorUtils?.applyWithGuard ?? @@ -210,6 +210,7 @@ class RelayPublishQueue implements PublishQueue { sourceOperation?: OperationDescriptor, ): $ReadOnlyArray { const runWillClearGcHold = + // $FlowFixMe[incompatible-type] this._appliedOptimisticUpdates === 0 && !!this._gcHold; const runIsANoop = // this._pendingBackupRebase is true if an applied optimistic diff --git a/packages/relay-runtime/store/RelayReader.js b/packages/relay-runtime/store/RelayReader.js index 6d2ff9796646b..76e432a770d47 100644 --- a/packages/relay-runtime/store/RelayReader.js +++ b/packages/relay-runtime/store/RelayReader.js @@ -14,9 +14,8 @@ import type { ReaderActorChange, ReaderAliasedFragmentSpread, - ReaderClientEdgeToClientObject, - ReaderClientEdgeToServerObject, - ReaderFlightField, + ReaderCatchField, + ReaderClientEdge, ReaderFragment, ReaderFragmentSpread, ReaderInlineDataFragmentSpread, @@ -34,6 +33,7 @@ import type {DataID, Variables} from '../util/RelayRuntimeTypes'; import type { ClientEdgeTraversalInfo, DataIDSet, + ErrorResponseFields, MissingClientEdgeRequestInfo, MissingLiveResolverField, MissingRequiredFields, @@ -52,12 +52,12 @@ const { ACTOR_CHANGE, ALIASED_FRAGMENT_SPREAD, ALIASED_INLINE_FRAGMENT_SPREAD, + CATCH_FIELD, CLIENT_EDGE_TO_CLIENT_OBJECT, CLIENT_EDGE_TO_SERVER_OBJECT, CLIENT_EXTENSION, CONDITION, DEFER, - FLIGHT_FIELD, FRAGMENT_SPREAD, INLINE_DATA_FRAGMENT_SPREAD, INLINE_FRAGMENT, @@ -75,14 +75,12 @@ const { } = require('./experimental-live-resolvers/LiveResolverSuspenseSentinel'); const RelayConcreteVariables = require('./RelayConcreteVariables'); const RelayModernRecord = require('./RelayModernRecord'); -const {getReactFlightClientResponse} = require('./RelayStoreReactFlightUtils'); const { CLIENT_EDGE_TRAVERSAL_PATH, FRAGMENT_OWNER_KEY, FRAGMENT_PROP_NAME_KEY, FRAGMENTS_KEY, ID_KEY, - IS_WITHIN_UNMATCHED_TYPE_REFINEMENT, MODULE_COMPONENT_KEY, ROOT_ID, getArgumentValues, @@ -97,6 +95,8 @@ const { const {generateTypeID} = require('./TypeID'); const invariant = require('invariant'); +type RequiredOrCatchField = ReaderRequiredField | ReaderCatchField; + function read( recordSource: RecordSource, selector: SingularReaderSelector, @@ -120,6 +120,7 @@ class RelayReader { _missingLiveResolverFields: Array; _isWithinUnmatchedTypeRefinement: boolean; _missingRequiredFields: ?MissingRequiredFields; + _errorResponseFields: ?ErrorResponseFields; _owner: RequestDescriptor; _recordSource: RecordSource; _seenRecords: DataIDSet; @@ -135,16 +136,15 @@ class RelayReader { selector: SingularReaderSelector, resolverCache: ResolverCache, ) { - this._clientEdgeTraversalPath = - RelayFeatureFlags.ENABLE_CLIENT_EDGES && - selector.clientEdgeTraversalPath?.length - ? [...selector.clientEdgeTraversalPath] - : []; + this._clientEdgeTraversalPath = selector.clientEdgeTraversalPath?.length + ? [...selector.clientEdgeTraversalPath] + : []; this._missingClientEdges = []; this._missingLiveResolverFields = []; this._isMissingData = false; this._isWithinUnmatchedTypeRefinement = false; this._missingRequiredFields = null; + this._errorResponseFields = null; this._owner = selector.owner; this._recordSource = recordSource; this._seenRecords = new Set(); @@ -221,24 +221,45 @@ class RelayReader { return { data, isMissingData: this._isMissingData && isDataExpectedToBePresent, - missingClientEdges: - RelayFeatureFlags.ENABLE_CLIENT_EDGES && this._missingClientEdges.length - ? this._missingClientEdges - : null, + missingClientEdges: this._missingClientEdges.length + ? this._missingClientEdges + : null, missingLiveResolverFields: this._missingLiveResolverFields, seenRecords: this._seenRecords, selector: this._selector, missingRequiredFields: this._missingRequiredFields, relayResolverErrors: this._resolverErrors, + errorResponseFields: this._errorResponseFields, }; } + _maybeAddErrorResponseFields(record: Record, storageKey: string): void { + if (!RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING) { + return; + } + + const errors = RelayModernRecord.getErrors(record, storageKey); + + if (errors == null) { + return; + } + const owner = this._fragmentName; + + if (this._errorResponseFields == null) { + this._errorResponseFields = []; + } + for (const error of errors) { + this._errorResponseFields.push({ + owner, + path: (error.path ?? []).join('.'), + error, + }); + } + } + _markDataAsMissing(): void { this._isMissingData = true; - if ( - RelayFeatureFlags.ENABLE_CLIENT_EDGES && - this._clientEdgeTraversalPath.length - ) { + if (this._clientEdgeTraversalPath.length) { const top = this._clientEdgeTraversalPath[this._clientEdgeTraversalPath.length - 1]; // Top can be null if we've traversed past a client edge into an ordinary @@ -284,11 +305,7 @@ class RelayReader { return this._variables[name]; } - _maybeReportUnexpectedNull( - fieldPath: string, - action: 'LOG' | 'THROW', - _record: Record, - ) { + _maybeReportUnexpectedNull(fieldPath: string, action: 'LOG' | 'THROW') { if (this._missingRequiredFields?.action === 'THROW') { // Chained @required directives may cause a parent `@required(action: // THROW)` field to become null, so the first missing field we @@ -322,6 +339,92 @@ class RelayReader { } } + _handleCatchFieldValue( + selection: ReaderCatchField, + record: Record, + data: SelectorData, + value: mixed, + ) { + const {to} = selection; + const field = selection.field?.backingField ?? selection.field; + const fieldName = field?.alias ?? field?.name; + + // ReaderClientExtension doesn't have `alias` or `name` + // so we don't support this yet + invariant( + fieldName != null, + "Couldn't determine field name for this field. It might be a ReaderClientExtension - which is not yet supported.", + ); + + if (this._errorResponseFields != null) { + for (let i = 0; i < this._errorResponseFields.length; i++) { + // if it's a @catch - it can only be NULL or RESULT. So we always add the "to" from the CatchField. + this._errorResponseFields[i].to = to; + } + } + // If we have a nested @required(THROW) that will throw, + // we want to catch that error and provide it, and remove the original error + if (this._missingRequiredFields?.action === 'THROW') { + if (this._missingRequiredFields?.field == null) { + return; + } + + // We want to catch nested @required THROWs + if (this._errorResponseFields == null) { + this._errorResponseFields = []; + } + + const {owner, path} = this._missingRequiredFields.field; + this._errorResponseFields.push({ + owner, + path, + error: { + message: `Relay: Missing @required value at path '${path}' in '${owner}'.`, + }, + to, + }); + + // remove missing required because we're providing it in catch instead. + this._missingRequiredFields = null; + + return; + } + + if (this._errorResponseFields != null) { + const errors = this._errorResponseFields.map(error => error.error); + + data[fieldName] = { + ok: false, + errors, + }; + return; + } + + data[fieldName] = { + ok: true, + value, + }; + + // we do nothing if to is 'NULL' + } + + _handleRequiredFieldValue( + selection: ReaderRequiredField, + value: mixed, + ): boolean /*should continue to siblings*/ { + if (value == null) { + const {action} = selection; + if (action !== 'NONE') { + this._maybeReportUnexpectedNull(selection.path, action); + } + // We are going to throw, or our parent is going to get nulled out. + // Either way, sibling values are going to be ignored, so we can + // bail early here as an optimization. + return false; + } + return true; + } + _traverseSelections( selections: $ReadOnlyArray, record: Record, @@ -329,21 +432,37 @@ class RelayReader { ): boolean /* had all expected data */ { for (let i = 0; i < selections.length; i++) { const selection = selections[i]; + switch (selection.kind) { - case REQUIRED_FIELD: { - const fieldValue = this._readRequiredField(selection, record, data); - if (fieldValue == null) { - const {action} = selection; - if (action !== 'NONE') { - this._maybeReportUnexpectedNull(selection.path, action, record); - } - // We are going to throw, or our parent is going to get nulled out. - // Either way, sibling values are going to be ignored, so we can - // bail early here as an optimization. + case REQUIRED_FIELD: + const requiredFieldValue = this._readClientSideDirectiveField( + selection, + record, + data, + ); + if (!this._handleRequiredFieldValue(selection, requiredFieldValue)) { return false; } break; - } + case CATCH_FIELD: + const catchFieldValue = this._readClientSideDirectiveField( + selection, + record, + data, + ); + if (RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING_CATCH_DIRECTIVE) { + /* NULL is old behavior. do nothing. */ + if (selection.to != 'NULL') { + /* @catch(to: RESULT) is the default */ + this._handleCatchFieldValue( + selection, + record, + data, + catchFieldValue, + ); + } + } + break; case SCALAR_FIELD: this._readScalar(selection, record, data); break; @@ -418,9 +537,7 @@ class RelayReader { case CLIENT_EXTENSION: { const isMissingData = this._isMissingData; const alreadyMissingClientEdges = this._missingClientEdges.length; - if (RelayFeatureFlags.ENABLE_CLIENT_EDGES) { - this._clientEdgeTraversalPath.push(null); - } + this._clientEdgeTraversalPath.push(null); const hasExpectedData = this._traverseSelections( selection.selections, record, @@ -433,9 +550,7 @@ class RelayReader { isMissingData || this._missingClientEdges.length > alreadyMissingClientEdges || this._missingLiveResolverFields.length > 0; - if (RelayFeatureFlags.ENABLE_CLIENT_EDGES) { - this._clientEdgeTraversalPath.pop(); - } + this._clientEdgeTraversalPath.pop(); if (!hasExpectedData) { return false; } @@ -452,23 +567,12 @@ class RelayReader { } break; } - case FLIGHT_FIELD: - if (RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD) { - this._readFlightField(selection, record, data); - } else { - throw new Error('Flight fields are not yet supported.'); - } - break; case ACTOR_CHANGE: this._readActorChange(selection, record, data); break; case CLIENT_EDGE_TO_CLIENT_OBJECT: case CLIENT_EDGE_TO_SERVER_OBJECT: - if (RelayFeatureFlags.ENABLE_CLIENT_EDGES) { - this._readClientEdge(selection, record, data); - } else { - throw new Error('Client edges are not yet supported.'); - } + this._readClientEdge(selection, record, data); break; default: (selection: empty); @@ -482,8 +586,8 @@ class RelayReader { return true; } - _readRequiredField( - selection: ReaderRequiredField, + _readClientSideDirectiveField( + selection: RequiredOrCatchField, record: Record, data: SelectorData, ): ?mixed { @@ -527,8 +631,19 @@ class RelayReader { record: Record, data: SelectorData, ): mixed { - const {fragment} = field; const parentRecordID = RelayModernRecord.getDataID(record); + const result = this._readResolverFieldImpl(field, parentRecordID); + + const fieldName = field.alias ?? field.name; + data[fieldName] = result; + return result; + } + + _readResolverFieldImpl( + field: ReaderRelayResolver | ReaderRelayLiveResolver, + parentRecordID: DataID, + ): mixed { + const {fragment} = field; // Found when reading the resolver fragment, which can happen either when // evaluating the resolver and it calls readFragment, or when checking if the @@ -591,26 +706,16 @@ class RelayReader { field, this._variables, key, - this._fragmentName, ); - return { - resolverResult, - snapshot: snapshot, - error: resolverError, - }; + return {resolverResult, snapshot, error: resolverError}; }); } else { const [resolverResult, resolverError] = getResolverValue( field, this._variables, null, - this._fragmentName, ); - return { - resolverResult, - snapshot: undefined, - error: resolverError, - }; + return {resolverResult, snapshot: undefined, error: resolverError}; } }; @@ -629,6 +734,29 @@ class RelayReader { getDataForResolverFragment, ); + this._propogateResolverMetadata( + field.path, + cachedSnapshot, + resolverError, + seenRecord, + suspenseID, + updatedDataIDs, + ); + + return result; + } + + // Reading a resolver field can uncover missing data, errors, suspense, + // additional seen records and updated dataIDs. All of these facts must be + // represented in the snapshot we return for this fragment. + _propogateResolverMetadata( + fieldPath: string, + cachedSnapshot: ?Snapshot, + resolverError: ?Error, + seenRecord: ?DataID, + suspenseID: ?DataID, + updatedDataIDs: ?DataIDSet, + ) { // The resolver's root fragment (if there is one) may be missing data, have // errors, or be in a suspended state. Here we propagate those cases // upwards to mimic the behavior of having traversed into that fragment directly. @@ -660,7 +788,10 @@ class RelayReader { // the errors can be attached to this read's snapshot. This allows the error // to be logged. if (resolverError) { - this._resolverErrors.push(resolverError); + this._resolverErrors.push({ + field: {path: fieldPath, owner: this._fragmentName}, + error: resolverError, + }); } // The resolver itself creates a record in the store. We record that we've @@ -678,7 +809,7 @@ class RelayReader { if (suspenseID != null) { this._isMissingData = true; this._missingLiveResolverFields.push({ - path: `${this._fragmentName}.${field.path}`, + path: `${this._fragmentName}.${fieldPath}`, liveStateID: suspenseID, }); } @@ -687,124 +818,156 @@ class RelayReader { this._updatedDataIDs.add(recordID); } } - - const applicationName = field.alias ?? field.name; - data[applicationName] = result; - return result; } _readClientEdge( - field: ReaderClientEdgeToServerObject | ReaderClientEdgeToClientObject, + field: ReaderClientEdge, record: Record, data: SelectorData, - ): void { + ): ?mixed { const backingField = field.backingField; // Because ReaderClientExtension doesn't have `alias` or `name` and so I don't know - // how to get its applicationName or storageKey yet: + // how to get its fieldName or storageKey yet: invariant( backingField.kind !== 'ClientExtension', 'Client extension client edges are not yet implemented.', ); - const applicationName = backingField.alias ?? backingField.name; + const fieldName = backingField.alias ?? backingField.name; const backingFieldData = {}; this._traverseSelections([backingField], record, backingFieldData); - // At this point, backingFieldData is an object with a single key (applicationName) + // At this point, backingFieldData is an object with a single key (fieldName) // whose value is the value returned from the resolver, or a suspense sentinel. - const clientEdgeResolverResponse = backingFieldData[applicationName]; + // $FlowFixMe[invalid-computed-prop] + const clientEdgeResolverResponse = backingFieldData[fieldName]; if ( clientEdgeResolverResponse == null || isSuspenseSentinel(clientEdgeResolverResponse) ) { - data[applicationName] = clientEdgeResolverResponse; - return; + data[fieldName] = clientEdgeResolverResponse; + return clientEdgeResolverResponse; } - const validClientEdgeResolverResponse = - assertValidClientEdgeResolverResponse(field, clientEdgeResolverResponse); - - switch (validClientEdgeResolverResponse.kind) { - case 'PluralConcrete': - const storeIDs = getStoreIDsForPluralClientEdgeResolver( - field, - validClientEdgeResolverResponse.ids, - this._resolverCache, - ); - this._clientEdgeTraversalPath.push(null); - data[applicationName] = this._readLinkedIds( - field.linkedField, - storeIDs, - record, - data, - ); - this._clientEdgeTraversalPath.pop(); - break; + if (field.linkedField.plural) { + invariant( + Array.isArray(clientEdgeResolverResponse), + 'Expected plural Client Edge Relay Resolver to return an array containing IDs or objects with shape {id}.', + ); + let storeIDs: $ReadOnlyArray; + invariant( + field.kind === CLIENT_EDGE_TO_CLIENT_OBJECT, + 'Unexpected Client Edge to plural server type. This should be prevented by the compiler.', + ); + if (field.backingField.normalizationInfo == null) { + // @edgeTo case where we need to ensure that the record has `id` field + storeIDs = clientEdgeResolverResponse.map(itemResponse => { + const concreteType = field.concreteType ?? itemResponse.__typename; + invariant( + typeof concreteType === 'string', + 'Expected resolver modeling an edge to an abstract type to return an object with a `__typename` property.', + ); + const localId = extractIdFromResponse(itemResponse); + const id = this._resolverCache.ensureClientRecord( + localId, + concreteType, + ); - case 'SingularConcrete': - const [storeID, traversalPathSegment] = - getStoreIDAndTraversalPathSegmentForSingularClientEdgeResolver( - field, - validClientEdgeResolverResponse.id, - this._resolverCache, + const modelResolvers = field.modelResolvers; + if (modelResolvers != null) { + const modelResolver = modelResolvers[concreteType]; + invariant( + modelResolver !== undefined, + `Invalid \`__typename\` returned by resolver. Expected one of ${Object.keys(modelResolvers).join(', ')} but got \`${concreteType}\`.`, + ); + const model = this._readResolverFieldImpl(modelResolver, id); + return model != null ? id : null; + } + return id; + }); + } else { + // The normalization process in LiveResolverCache should take care of generating the correct ID. + storeIDs = clientEdgeResolverResponse.map(extractIdFromResponse); + } + this._clientEdgeTraversalPath.push(null); + const edgeValues = this._readLinkedIds( + field.linkedField, + storeIDs, + record, + data, + ); + this._clientEdgeTraversalPath.pop(); + data[fieldName] = edgeValues; + return edgeValues; + } else { + const id = extractIdFromResponse(clientEdgeResolverResponse); + let storeID: DataID; + const concreteType = + field.concreteType ?? clientEdgeResolverResponse.__typename; + let traversalPathSegment: ClientEdgeTraversalInfo | null; + if (field.kind === CLIENT_EDGE_TO_CLIENT_OBJECT) { + if (field.backingField.normalizationInfo == null) { + invariant( + typeof concreteType === 'string', + 'Expected resolver modeling an edge to an abstract type to return an object with a `__typename` property.', ); - this._clientEdgeTraversalPath.push(traversalPathSegment); + // @edgeTo case where we need to ensure that the record has `id` field + storeID = this._resolverCache.ensureClientRecord(id, concreteType); + traversalPathSegment = null; + } else { + // The normalization process in LiveResolverCache should take care of generating the correct ID. + storeID = id; + traversalPathSegment = null; + } + } else { + storeID = id; + traversalPathSegment = { + readerClientEdge: field, + clientEdgeDestinationID: id, + }; + } - const prevData = data[applicationName]; + const modelResolvers = field.modelResolvers; + if (modelResolvers != null) { invariant( - prevData == null || typeof prevData === 'object', - 'RelayReader(): Expected data for field `%s` on record `%s` ' + - 'to be an object, got `%s`.', - applicationName, - RelayModernRecord.getDataID(record), - prevData, + typeof concreteType === 'string', + 'Expected resolver modeling an edge to an abstract type to return an object with a `__typename` property.', ); - data[applicationName] = this._traverse( - field.linkedField, - storeID, - // $FlowFixMe[incompatible-variance] - prevData, + const modelResolver = modelResolvers[concreteType]; + invariant( + modelResolver !== undefined, + `Invalid \`__typename\` returned by resolver. Expected one of ${Object.keys(modelResolvers).join(', ')} but got \`${concreteType}\`.`, ); - this._clientEdgeTraversalPath.pop(); - break; - default: - (validClientEdgeResolverResponse.kind: empty); - } - } - - _readFlightField( - field: ReaderFlightField, - record: Record, - data: SelectorData, - ): ?mixed { - const applicationName = field.alias ?? field.name; - const storageKey = getStorageKey(field, this._variables); - const reactFlightClientResponseRecordID = - RelayModernRecord.getLinkedRecordID(record, storageKey); - if (reactFlightClientResponseRecordID == null) { - data[applicationName] = reactFlightClientResponseRecordID; - if (reactFlightClientResponseRecordID === undefined) { - this._markDataAsMissing(); - } - return reactFlightClientResponseRecordID; - } - const reactFlightClientResponseRecord = this._recordSource.get( - reactFlightClientResponseRecordID, - ); - this._seenRecords.add(reactFlightClientResponseRecordID); - if (reactFlightClientResponseRecord == null) { - data[applicationName] = reactFlightClientResponseRecord; - if (reactFlightClientResponseRecord === undefined) { - this._markDataAsMissing(); + const model = this._readResolverFieldImpl(modelResolver, storeID); + if (model == null) { + // If the model resolver returns undefined, we should still return null + // to match GQL behavior. + data[fieldName] = null; + return null; + } } - return reactFlightClientResponseRecord; + this._clientEdgeTraversalPath.push(traversalPathSegment); + + const prevData = data[fieldName]; + invariant( + prevData == null || typeof prevData === 'object', + 'RelayReader(): Expected data for field `%s` on record `%s` ' + + 'to be an object, got `%s`.', + fieldName, + RelayModernRecord.getDataID(record), + prevData, + ); + const edgeValue = this._traverse( + field.linkedField, + storeID, + // $FlowFixMe[incompatible-variance] + prevData, + ); + this._clientEdgeTraversalPath.pop(); + data[fieldName] = edgeValue; + return edgeValue; } - const clientResponse = getReactFlightClientResponse( - reactFlightClientResponseRecord, - ); - data[applicationName] = clientResponse; - return clientResponse; } _readScalar( @@ -812,13 +975,15 @@ class RelayReader { record: Record, data: SelectorData, ): ?mixed { - const applicationName = field.alias ?? field.name; + const fieldName = field.alias ?? field.name; const storageKey = getStorageKey(field, this._variables); const value = RelayModernRecord.getValue(record, storageKey); - if (value === undefined) { + if (value === null) { + this._maybeAddErrorResponseFields(record, storageKey); + } else if (value === undefined) { this._markDataAsMissing(); } - data[applicationName] = value; + data[fieldName] = value; return value; } @@ -827,29 +992,31 @@ class RelayReader { record: Record, data: SelectorData, ): ?mixed { - const applicationName = field.alias ?? field.name; + const fieldName = field.alias ?? field.name; const storageKey = getStorageKey(field, this._variables); const linkedID = RelayModernRecord.getLinkedRecordID(record, storageKey); if (linkedID == null) { - data[applicationName] = linkedID; - if (linkedID === undefined) { + data[fieldName] = linkedID; + if (linkedID === null) { + this._maybeAddErrorResponseFields(record, storageKey); + } else if (linkedID === undefined) { this._markDataAsMissing(); } return linkedID; } - const prevData = data[applicationName]; + const prevData = data[fieldName]; invariant( prevData == null || typeof prevData === 'object', 'RelayReader(): Expected data for field `%s` on record `%s` ' + 'to be an object, got `%s`.', - applicationName, + fieldName, RelayModernRecord.getDataID(record), prevData, ); // $FlowFixMe[incompatible-variance] const value = this._traverse(field, linkedID, prevData); - data[applicationName] = value; + data[fieldName] = value; return value; } @@ -858,7 +1025,7 @@ class RelayReader { record: Record, data: SelectorData, ): ?mixed { - const applicationName = field.alias ?? field.name; + const fieldName = field.alias ?? field.name; const storageKey = getStorageKey(field, this._variables); const externalRef = RelayModernRecord.getActorLinkedRecordID( record, @@ -866,27 +1033,29 @@ class RelayReader { ); if (externalRef == null) { - data[applicationName] = externalRef; + data[fieldName] = externalRef; if (externalRef === undefined) { this._markDataAsMissing(); + } else if (externalRef === null) { + this._maybeAddErrorResponseFields(record, storageKey); } - return data[applicationName]; + return data[fieldName]; } const [actorIdentifier, dataID] = externalRef; const fragmentRef = {}; this._createFragmentPointer( field.fragmentSpread, - { + RelayModernRecord.fromObject<>({ __id: dataID, - }, + }), fragmentRef, ); - data[applicationName] = { + data[fieldName] = { __fragmentRef: fragmentRef, __viewer: actorIdentifier, }; - return data[applicationName]; + return data[fieldName]; } _readPluralLink( @@ -896,6 +1065,9 @@ class RelayReader { ): ?mixed { const storageKey = getStorageKey(field, this._variables); const linkedIDs = RelayModernRecord.getLinkedRecordIDs(record, storageKey); + if (linkedIDs === null) { + this._maybeAddErrorResponseFields(record, storageKey); + } return this._readLinkedIds(field, linkedIDs, record, data); } @@ -905,22 +1077,22 @@ class RelayReader { record: Record, data: SelectorData, ): ?mixed { - const applicationName = field.alias ?? field.name; + const fieldName = field.alias ?? field.name; if (linkedIDs == null) { - data[applicationName] = linkedIDs; + data[fieldName] = linkedIDs; if (linkedIDs === undefined) { this._markDataAsMissing(); } return linkedIDs; } - const prevData = data[applicationName]; + const prevData = data[fieldName]; invariant( prevData == null || Array.isArray(prevData), 'RelayReader(): Expected data for field `%s` on record `%s` ' + 'to be an array, got `%s`.', - applicationName, + fieldName, RelayModernRecord.getDataID(record), prevData, ); @@ -939,7 +1111,7 @@ class RelayReader { prevItem == null || typeof prevItem === 'object', 'RelayReader(): Expected data for field `%s` on record `%s` ' + 'to be an object, got `%s`.', - applicationName, + fieldName, RelayModernRecord.getDataID(record), prevItem, ); @@ -947,7 +1119,7 @@ class RelayReader { // $FlowFixMe[incompatible-variance] linkedArray[nextIndex] = this._traverse(field, linkedID, prevItem); }); - data[applicationName] = linkedArray; + data[fieldName] = linkedArray; return linkedArray; } @@ -1023,7 +1195,7 @@ class RelayReader { record, fieldData, ); - return fieldData; + return RelayModernRecord.fromObject<>(fieldData); } // Has three possible return values: @@ -1036,6 +1208,20 @@ class RelayReader { record: Record, data: SelectorData, ): ?(SelectorData | false) { + if (inlineFragment.type == null) { + // Inline fragment without a type condition: always read data + // Usually this would get compiled away, but fragments with @alias + // and no type condition will get preserved. + const hasExpectedData = this._traverseSelections( + inlineFragment.selections, + record, + data, + ); + if (hasExpectedData === false) { + return false; + } + return data; + } const {abstractKey} = inlineFragment; if (abstractKey == null) { // concrete type refinement: only read data if the type exactly matches @@ -1097,7 +1283,7 @@ class RelayReader { let fragmentPointers = data[FRAGMENTS_KEY]; if (fragmentPointers == null) { fragmentPointers = data[FRAGMENTS_KEY] = ({}: { - [string]: Arguments | {...}, + [string]: Arguments, }); } invariant( @@ -1105,26 +1291,25 @@ class RelayReader { 'RelayReader: Expected fragment spread data to be an object, got `%s`.', fragmentPointers, ); + if (data[ID_KEY] == null) { data[ID_KEY] = RelayModernRecord.getDataID(record); } // $FlowFixMe[cannot-write] - writing into read-only field - fragmentPointers[fragmentSpread.name] = fragmentSpread.args - ? getArgumentValues(fragmentSpread.args, this._variables) - : {}; + fragmentPointers[fragmentSpread.name] = getArgumentValues( + fragmentSpread.args, + this._variables, + this._isWithinUnmatchedTypeRefinement, + ); data[FRAGMENT_OWNER_KEY] = this._owner; - data[IS_WITHIN_UNMATCHED_TYPE_REFINEMENT] = - this._isWithinUnmatchedTypeRefinement; - if (RelayFeatureFlags.ENABLE_CLIENT_EDGES) { - if ( - this._clientEdgeTraversalPath.length > 0 && - this._clientEdgeTraversalPath[ - this._clientEdgeTraversalPath.length - 1 - ] !== null - ) { - data[CLIENT_EDGE_TRAVERSAL_PATH] = [...this._clientEdgeTraversalPath]; - } + if ( + this._clientEdgeTraversalPath.length > 0 && + this._clientEdgeTraversalPath[ + this._clientEdgeTraversalPath.length - 1 + ] !== null + ) { + data[CLIENT_EDGE_TRAVERSAL_PATH] = [...this._clientEdgeTraversalPath]; } } @@ -1221,8 +1406,7 @@ function getResolverValue( field: ReaderRelayResolver | ReaderRelayLiveResolver, variables: Variables, fragmentKey: mixed, - ownerName: string, -) { +): [mixed, ?Error] { // Support for languages that work (best) with ES6 modules, such as TypeScript. const resolverFunction = typeof field.resolverModule === 'function' @@ -1247,143 +1431,13 @@ function getResolverValue( if (e === RESOLVER_FRAGMENT_MISSING_DATA_SENTINEL) { resolverResult = undefined; } else { - // `field.path` is typed as nullable while we rollout compiler changes. - const path = field.path ?? '[UNKNOWN]'; - resolverError = { - field: {path, owner: ownerName}, - error: e, - }; + resolverError = e; } } return [resolverResult, resolverError]; } -type ValidClientEdgeResolverResponse = - | { - kind: 'PluralConcrete', - ids: $ReadOnlyArray, - } - | { - kind: 'SingularConcrete', - id: DataID, - }; - -function assertValidClientEdgeResolverResponse( - field: ReaderClientEdgeToClientObject | ReaderClientEdgeToServerObject, - clientEdgeResolverResponse: mixed, -): ValidClientEdgeResolverResponse { - if (field.linkedField.plural) { - invariant( - Array.isArray(clientEdgeResolverResponse), - 'Expected plural Client Edge Relay Resolver to return an array containing IDs or objects with shape {id}.', - ); - return { - kind: 'PluralConcrete', - ids: clientEdgeResolverResponse.map(response => - extractIdFromResponse( - response, - 'Expected this plural Client Edge Relay Resolver to return an array containing IDs or objects with shape {id}.', - ), - ), - }; - } else { - return { - kind: 'SingularConcrete', - id: extractIdFromResponse( - clientEdgeResolverResponse, - 'Expected this Client Edge Relay Resolver to return an ID of type `string` or an object with shape {id}.', - ), - }; - } -} - -// For weak objects: -// The return value of a client edge resolver is the entire object (though, -// strong objects become DataIDs or arrays thereof). However, when being read -// out, these raw objects are turned into DataIDs or arrays thereof. -// -// For strong objects: -// For a singular field, the return value of a client edge resolver is a DataID -// (i.e. a string). If the edge points to a client type, we namespace the -// ID with the typename by calling resolverCache.ensureClientRecord. -function getStoreIDAndTraversalPathSegmentForSingularClientEdgeResolver( - field: ReaderClientEdgeToClientObject | ReaderClientEdgeToServerObject, - clientEdgeResolverResponse: DataID, - resolverCache: ResolverCache, -): [DataID, ClientEdgeTraversalInfo | null] { - if (field.kind === CLIENT_EDGE_TO_CLIENT_OBJECT) { - if (field.backingField.normalizationInfo == null) { - const concreteType = field.concreteType; - invariant( - concreteType != null, - 'Expected at least one of backingField.normalizationInfo or field.concreteType to be non-null. ' + - 'This indicates a bug in Relay.', - ); - // @edgeTo case where we need to ensure that the record has `id` field - return [ - resolverCache.ensureClientRecord( - clientEdgeResolverResponse, - concreteType, - ), - null, - ]; - } else { - // The normalization process in LiveResolverCache should take care of generating the correct ID. - return [clientEdgeResolverResponse, null]; - } - } else { - return [ - clientEdgeResolverResponse, - { - readerClientEdge: field, - clientEdgeDestinationID: clientEdgeResolverResponse, - }, - ]; - } -} - -// For weak objects: -// The return value of a client edge resolver is the entire object (though, -// strong objects become DataIDs or arrays thereof). However, when being read -// out, these raw objects are turned into DataIDs or arrays thereof. -// -// For strong objects: -// For a plural field, the return value of a client edge resolver is an -// array of DataID's. If the edge points to a client type, we namespace the -// IDs with the typename by calling resolverCache.ensureClientRecord. -function getStoreIDsForPluralClientEdgeResolver( - field: ReaderClientEdgeToClientObject | ReaderClientEdgeToServerObject, - clientEdgeResolverResponse: $ReadOnlyArray, - resolverCache: ResolverCache, -): $ReadOnlyArray { - if (field.kind === CLIENT_EDGE_TO_CLIENT_OBJECT) { - if (field.backingField.normalizationInfo == null) { - const concreteType = field.concreteType; - invariant( - concreteType != null, - 'Expected at least one of backingField.normalizationInfo or field.concreteType to be non-null. ' + - 'This indicates a bug in Relay.', - ); - // @edgeTo case where we need to ensure that the record has `id` field - return clientEdgeResolverResponse.map(id => - resolverCache.ensureClientRecord(id, concreteType), - ); - } else { - // The normalization process in LiveResolverCache should take care of generating the correct ID. - return clientEdgeResolverResponse; - } - } else { - invariant( - false, - 'Unexpected Client Edge to plural server type. This should be prevented by the compiler.', - ); - } -} - -function extractIdFromResponse( - individualResponse: mixed, - errorMessage: string, -): string { +function extractIdFromResponse(individualResponse: mixed): string { if (typeof individualResponse === 'string') { return individualResponse; } else if ( @@ -1393,7 +1447,10 @@ function extractIdFromResponse( ) { return individualResponse.id; } - invariant(false, errorMessage); + invariant( + false, + 'Expected object returned from an edge resolver to be a string or an object with an `id` property', + ); } module.exports = {read}; diff --git a/packages/relay-runtime/store/RelayRecordSource.js b/packages/relay-runtime/store/RelayRecordSource.js index f65431265d0e8..00e25cbe31f6e 100644 --- a/packages/relay-runtime/store/RelayRecordSource.js +++ b/packages/relay-runtime/store/RelayRecordSource.js @@ -16,9 +16,10 @@ import type {RecordState} from './RelayRecordState'; import type { MutableRecordSource, Record, - RecordObjectMap, + RecordSourceJSON, } from './RelayStoreTypes'; +const RelayModernRecord = require('./RelayModernRecord'); const RelayRecordState = require('./RelayRecordState'); const {EXISTENT, NONEXISTENT, UNKNOWN} = RelayRecordState; @@ -30,16 +31,18 @@ const {EXISTENT, NONEXISTENT, UNKNOWN} = RelayRecordState; class RelayRecordSource implements MutableRecordSource { _records: Map; - constructor(records?: RecordObjectMap) { + constructor(records?: RecordSourceJSON) { this._records = new Map(); if (records != null) { Object.keys(records).forEach(key => { - this._records.set(key, records[key]); + const object = records[key]; + const record = RelayModernRecord.fromObject(object); + this._records.set(key, record); }); } } - static create(records?: RecordObjectMap): MutableRecordSource { + static create(records?: RecordSourceJSON): MutableRecordSource { return new RelayRecordSource(records); } @@ -82,10 +85,10 @@ class RelayRecordSource implements MutableRecordSource { return this._records.size; } - toJSON(): {[DataID]: ?Record, ...} { - const obj: {[DataID]: ?Record} = {}; - for (const [key, value] of this._records) { - obj[key] = value; + toJSON(): RecordSourceJSON { + const obj: RecordSourceJSON = {}; + for (const [key, record] of this._records) { + obj[key] = RelayModernRecord.toJSON(record); } return obj; } diff --git a/packages/relay-runtime/store/RelayReferenceMarker.js b/packages/relay-runtime/store/RelayReferenceMarker.js index cacfb41add911..a9e4edd8069bf 100644 --- a/packages/relay-runtime/store/RelayReferenceMarker.js +++ b/packages/relay-runtime/store/RelayReferenceMarker.js @@ -13,8 +13,8 @@ import type { NormalizationClientEdgeToClientObject, - NormalizationFlightField, NormalizationLinkedField, + NormalizationLiveResolverField, NormalizationModuleImport, NormalizationNode, NormalizationResolverField, @@ -25,19 +25,16 @@ import type { DataIDSet, NormalizationSelector, OperationLoader, - ReactFlightReachableExecutableDefinitions, Record, RecordSource, } from './RelayStoreTypes'; const getOperation = require('../util/getOperation'); const RelayConcreteNode = require('../util/RelayConcreteNode'); -const RelayFeatureFlags = require('../util/RelayFeatureFlags'); const cloneRelayHandleSourceField = require('./cloneRelayHandleSourceField'); const getOutputTypeRecordIDs = require('./experimental-live-resolvers/getOutputTypeRecordIDs'); const {getLocalVariables} = require('./RelayConcreteVariables'); const RelayModernRecord = require('./RelayModernRecord'); -const RelayStoreReactFlightUtils = require('./RelayStoreReactFlightUtils'); const RelayStoreUtils = require('./RelayStoreUtils'); const {generateTypeID} = require('./TypeID'); const invariant = require('invariant'); @@ -48,7 +45,6 @@ const { CLIENT_COMPONENT, CLIENT_EXTENSION, DEFER, - FLIGHT_FIELD, FRAGMENT_SPREAD, INLINE_FRAGMENT, LINKED_FIELD, @@ -59,9 +55,10 @@ const { STREAM, TYPE_DISCRIMINATOR, RELAY_RESOLVER, + RELAY_LIVE_RESOLVER, CLIENT_EDGE_TO_CLIENT_OBJECT, } = RelayConcreteNode; -const {ROOT_ID, getStorageKey, getModuleOperationKey} = RelayStoreUtils; +const {getStorageKey, getModuleOperationKey} = RelayStoreUtils; function mark( recordSource: RecordSource, @@ -232,13 +229,6 @@ class RelayReferenceMarker { case CLIENT_EXTENSION: this._traverseSelections(selection.selections, record); break; - case FLIGHT_FIELD: - if (RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD) { - this._traverseFlightField(selection, record); - } else { - throw new Error('Flight fields are not yet supported.'); - } - break; case CLIENT_COMPONENT: if (this._shouldProcessClientComponents === false) { break; @@ -248,6 +238,9 @@ class RelayReferenceMarker { case RELAY_RESOLVER: this._traverseResolverField(selection, record); break; + case RELAY_LIVE_RESOLVER: + this._traverseResolverField(selection, record); + break; case CLIENT_EDGE_TO_CLIENT_OBJECT: this._traverseClientEdgeToClientObject(selection, record); break; @@ -315,7 +308,7 @@ class RelayReferenceMarker { } _traverseResolverField( - field: NormalizationResolverField, + field: NormalizationResolverField | NormalizationLiveResolverField, record: Record, ): ?DataID { const storageKey = getStorageKey(field, this._variables); @@ -394,51 +387,6 @@ class RelayReferenceMarker { } }); } - - _traverseFlightField(field: NormalizationFlightField, record: Record): void { - const storageKey = getStorageKey(field, this._variables); - const linkedID = RelayModernRecord.getLinkedRecordID(record, storageKey); - if (linkedID == null) { - return; - } - this._references.add(linkedID); - - const reactFlightClientResponseRecord = this._recordSource.get(linkedID); - - if (reactFlightClientResponseRecord == null) { - return; - } - - const reachableExecutableDefinitions = RelayModernRecord.getValue( - reactFlightClientResponseRecord, - RelayStoreReactFlightUtils.REACT_FLIGHT_EXECUTABLE_DEFINITIONS_STORAGE_KEY, - ); - - if (!Array.isArray(reachableExecutableDefinitions)) { - return; - } - - const operationLoader = this._operationLoader; - invariant( - operationLoader !== null, - 'DataChecker: Expected an operationLoader to be configured when using ' + - 'React Flight', - ); - // In Flight, the variables that are in scope for reachable executable - // definitions aren't the same as what's in scope for the outer query. - const prevVariables = this._variables; - // $FlowFixMe[incompatible-cast] - for (const definition of (reachableExecutableDefinitions: Array)) { - this._variables = definition.variables; - const operationReference = definition.module; - const normalizationRootNode = operationLoader.get(operationReference); - if (normalizationRootNode != null) { - const operation = getOperation(normalizationRootNode); - this._traverse(operation, ROOT_ID); - } - } - this._variables = prevVariables; - } } module.exports = {mark}; diff --git a/packages/relay-runtime/store/RelayResponseNormalizer.js b/packages/relay-runtime/store/RelayResponseNormalizer.js index 2d8888c82e180..7120dc872cc69 100644 --- a/packages/relay-runtime/store/RelayResponseNormalizer.js +++ b/packages/relay-runtime/store/RelayResponseNormalizer.js @@ -12,12 +12,12 @@ 'use strict'; import type {ActorIdentifier} from '../multi-actor-environment/ActorIdentifier'; -import type {PayloadData} from '../network/RelayNetworkTypes'; +import type {PayloadData, PayloadError} from '../network/RelayNetworkTypes'; import type { NormalizationActorChange, NormalizationDefer, - NormalizationFlightField, NormalizationLinkedField, + NormalizationLiveResolverField, NormalizationModuleImport, NormalizationNode, NormalizationResolverField, @@ -25,15 +25,13 @@ import type { NormalizationStream, } from '../util/NormalizationNode'; import type {DataID, Variables} from '../util/RelayRuntimeTypes'; +import type {RelayErrorTrie} from './RelayErrorTrie'; import type { FollowupPayload, HandleFieldPayload, IncrementalDataPlaceholder, MutableRecordSource, NormalizationSelector, - ReactFlightPayloadDeserializer, - ReactFlightReachableExecutableDefinitions, - ReactFlightServerErrorHandler, Record, RelayResponsePayload, } from './RelayStoreTypes'; @@ -49,32 +47,29 @@ const { CLIENT_EXTENSION, CONDITION, DEFER, - FLIGHT_FIELD, FRAGMENT_SPREAD, INLINE_FRAGMENT, LINKED_FIELD, LINKED_HANDLE, MODULE_IMPORT, + RELAY_LIVE_RESOLVER, RELAY_RESOLVER, SCALAR_FIELD, SCALAR_HANDLE, STREAM, TYPE_DISCRIMINATOR, } = require('../util/RelayConcreteNode'); -const RelayFeatureFlags = require('../util/RelayFeatureFlags'); const {generateClientID, isClientID} = require('./ClientID'); const {getLocalVariables} = require('./RelayConcreteVariables'); +const { + buildErrorTrie, + getErrorsByKey, + getNestedErrorTrieByKey, +} = require('./RelayErrorTrie'); const RelayModernRecord = require('./RelayModernRecord'); const {createNormalizationSelector} = require('./RelayModernSelector'); -const { - REACT_FLIGHT_EXECUTABLE_DEFINITIONS_STORAGE_KEY, - REACT_FLIGHT_TREE_STORAGE_KEY, - REACT_FLIGHT_TYPE_NAME, - refineToReactFlightPayloadData, -} = require('./RelayStoreReactFlightUtils'); const { ROOT_ID, - ROOT_TYPE, TYPENAME_KEY, getArgumentValues, getHandleStorageKey, @@ -96,8 +91,6 @@ export type NormalizationOptions = { +getDataID: GetDataID, +treatMissingFieldsAsNull: boolean, +path?: $ReadOnlyArray, - +reactFlightPayloadDeserializer?: ?ReactFlightPayloadDeserializer, - +reactFlightServerErrorHandler?: ?ReactFlightServerErrorHandler, +shouldProcessClientComponents?: ?boolean, +actorIdentifier?: ?ActorIdentifier, }; @@ -111,6 +104,7 @@ function normalize( selector: NormalizationSelector, response: PayloadData, options: NormalizationOptions, + errors?: Array, ): RelayResponsePayload { const {dataID, node, variables} = selector; const normalizer = new RelayResponseNormalizer( @@ -118,7 +112,7 @@ function normalize( variables, options, ); - return normalizer.normalizeResponse(node, dataID, response); + return normalizer.normalizeResponse(node, dataID, response, errors); } /** @@ -138,9 +132,8 @@ class RelayResponseNormalizer { _path: Array; _recordSource: MutableRecordSource; _variables: Variables; - _reactFlightPayloadDeserializer: ?ReactFlightPayloadDeserializer; - _reactFlightServerErrorHandler: ?ReactFlightServerErrorHandler; _shouldProcessClientComponents: ?boolean; + _errorTrie: RelayErrorTrie | null; constructor( recordSource: MutableRecordSource, @@ -158,9 +151,6 @@ class RelayResponseNormalizer { this._path = options.path ? [...options.path] : []; this._recordSource = recordSource; this._variables = variables; - this._reactFlightPayloadDeserializer = - options.reactFlightPayloadDeserializer; - this._reactFlightServerErrorHandler = options.reactFlightServerErrorHandler; this._shouldProcessClientComponents = options.shouldProcessClientComponents; } @@ -168,6 +158,7 @@ class RelayResponseNormalizer { node: NormalizationNode, dataID: DataID, data: PayloadData, + errors?: Array, ): RelayResponsePayload { const record = this._recordSource.get(dataID); invariant( @@ -176,9 +167,10 @@ class RelayResponseNormalizer { dataID, ); this._assignClientAbstractTypes(node); + this._errorTrie = buildErrorTrie(errors); this._traverseSelections(node, record, data); return { - errors: null, + errors, fieldPayloads: this._handleFieldPayloads, incrementalPlaceholders: this._incrementalPlaceholders, followupPayloads: this._followupPayloads, @@ -237,7 +229,7 @@ class RelayResponseNormalizer { switch (selection.kind) { case SCALAR_FIELD: case LINKED_FIELD: - this._normalizeField(node, selection, record, data); + this._normalizeField(selection, record, data); break; case CONDITION: const conditionValue = Boolean( @@ -321,7 +313,7 @@ class RelayResponseNormalizer { }); break; case MODULE_IMPORT: - this._normalizeModuleImport(node, selection, record, data); + this._normalizeModuleImport(selection, record, data); break; case DEFER: this._normalizeDefer(selection, record, data); @@ -341,19 +333,15 @@ class RelayResponseNormalizer { } this._traverseSelections(selection.fragment, record, data); break; - case FLIGHT_FIELD: - if (RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD) { - this._normalizeFlightField(node, selection, record, data); - } else { - throw new Error('Flight fields are not yet supported.'); - } - break; case ACTOR_CHANGE: - this._normalizeActorChange(node, selection, record, data); + this._normalizeActorChange(selection, record, data); break; case RELAY_RESOLVER: this._normalizeResolver(selection, record, data); break; + case RELAY_LIVE_RESOLVER: + this._normalizeResolver(selection, record, data); + break; case CLIENT_EDGE_TO_CLIENT_OBJECT: this._normalizeResolver(selection.backingField, record, data); break; @@ -369,7 +357,7 @@ class RelayResponseNormalizer { } _normalizeResolver( - resolver: NormalizationResolverField, + resolver: NormalizationResolverField | NormalizationLiveResolverField, record: Record, data: PayloadData, ) { @@ -449,7 +437,6 @@ class RelayResponseNormalizer { } _normalizeModuleImport( - parent: NormalizationNode, moduleImport: NormalizationModuleImport, record: Record, data: PayloadData, @@ -491,7 +478,6 @@ class RelayResponseNormalizer { } _normalizeField( - parent: NormalizationNode, selection: NormalizationLinkedField | NormalizationScalarField, record: Record, data: PayloadData, @@ -552,6 +538,13 @@ class RelayResponseNormalizer { } } RelayModernRecord.setValue(record, storageKey, null); + const errorTrie = this._errorTrie; + if (errorTrie != null) { + const errors = getErrorsByKey(errorTrie, responseKey); + if (errors != null) { + RelayModernRecord.setErrors(record, storageKey, errors); + } + } return; } @@ -566,11 +559,17 @@ class RelayResponseNormalizer { RelayModernRecord.setValue(record, storageKey, fieldValue); } else if (selection.kind === LINKED_FIELD) { this._path.push(responseKey); + const oldErrorTrie = this._errorTrie; + this._errorTrie = + oldErrorTrie == null + ? null + : getNestedErrorTrieByKey(oldErrorTrie, responseKey); if (selection.plural) { this._normalizePluralLink(selection, record, storageKey, fieldValue); } else { this._normalizeLink(selection, record, storageKey, fieldValue); } + this._errorTrie = oldErrorTrie; this._path.pop(); } else { (selection: empty); @@ -583,7 +582,6 @@ class RelayResponseNormalizer { } _normalizeActorChange( - parent: NormalizationNode, selection: NormalizationActorChange, record: Record, data: PayloadData, @@ -675,183 +673,6 @@ class RelayResponseNormalizer { }); } - _normalizeFlightField( - parent: NormalizationNode, - selection: NormalizationFlightField, - record: Record, - data: PayloadData, - ): void { - const responseKey = selection.alias || selection.name; - const storageKey = getStorageKey(selection, this._variables); - const fieldValue = data[responseKey]; - - if (fieldValue == null) { - if (fieldValue === undefined) { - // Flight field may be missing in the response if: - // - It is inside an abstract type refinement where the concrete type does - // not conform to the interface/union. - // However an otherwise-required field may also be missing if the server - // is configured to skip fields with `null` values, in which case the - // client is assumed to be correctly configured with - // treatMissingFieldsAsNull=true. - if (this._isUnmatchedAbstractType) { - // Field not expected to exist regardless of whether the server is pruning null - // fields or not. - return; - } else { - // Not optional and the server is not pruning null fields: field is expected - // to be present - invariant( - this._treatMissingFieldsAsNull, - 'RelayResponseNormalizer: Payload did not contain a value for ' + - 'field `%s: %s`. Check that you are parsing with the same ' + - 'query that was used to fetch the payload.', - responseKey, - storageKey, - ); - } - } - RelayModernRecord.setValue(record, storageKey, null); - return; - } - - const reactFlightPayload = refineToReactFlightPayloadData(fieldValue); - const reactFlightPayloadDeserializer = this._reactFlightPayloadDeserializer; - - invariant( - reactFlightPayload != null, - 'RelayResponseNormalizer: Expected React Flight payload data to be an ' + - 'object with `status`, tree`, `queries` and `errors` properties, got ' + - '`%s`.', - fieldValue, - ); - invariant( - typeof reactFlightPayloadDeserializer === 'function', - 'RelayResponseNormalizer: Expected reactFlightPayloadDeserializer to ' + - 'be a function, got `%s`.', - reactFlightPayloadDeserializer, - ); - - if (reactFlightPayload.errors.length > 0) { - if (typeof this._reactFlightServerErrorHandler === 'function') { - this._reactFlightServerErrorHandler( - reactFlightPayload.status, - reactFlightPayload.errors, - ); - } else { - warning( - false, - 'RelayResponseNormalizer: Received server errors for field `%s`.\n\n' + - '%s\n%s', - responseKey, - reactFlightPayload.errors[0].message, - reactFlightPayload.errors[0].stack, - ); - } - } - - const reactFlightID = generateClientID( - RelayModernRecord.getDataID(record), - getStorageKey(selection, this._variables), - ); - let reactFlightClientResponseRecord = this._recordSource.get(reactFlightID); - if (reactFlightClientResponseRecord == null) { - reactFlightClientResponseRecord = RelayModernRecord.create( - reactFlightID, - REACT_FLIGHT_TYPE_NAME, - ); - this._recordSource.set(reactFlightID, reactFlightClientResponseRecord); - } - - if (reactFlightPayload.tree == null) { - // This typically indicates that a fatal server error prevented rows from - // being written. When this occurs, we should not continue normalization of - // the Flight field because the row response is malformed. - // - // Receiving empty rows is OK because it can indicate the start of a stream. - warning( - false, - 'RelayResponseNormalizer: Expected `tree` not to be null. This ' + - 'typically indicates that a fatal server error prevented any Server ' + - 'Component rows from being written.', - ); - // We create the flight record with a null value for the tree - // and empty reachable definitions - RelayModernRecord.setValue( - reactFlightClientResponseRecord, - REACT_FLIGHT_TREE_STORAGE_KEY, - null, - ); - RelayModernRecord.setValue( - reactFlightClientResponseRecord, - REACT_FLIGHT_EXECUTABLE_DEFINITIONS_STORAGE_KEY, - [], - ); - RelayModernRecord.setLinkedRecordID(record, storageKey, reactFlightID); - return; - } - - // We store the deserialized reactFlightClientResponse in a separate - // record and link it to the parent record. This is so we can GC the Flight - // tree later even if the parent record is still reachable. - const reactFlightClientResponse = reactFlightPayloadDeserializer( - reactFlightPayload.tree, - ); - - RelayModernRecord.setValue( - reactFlightClientResponseRecord, - REACT_FLIGHT_TREE_STORAGE_KEY, - reactFlightClientResponse, - ); - - const reachableExecutableDefinitions: Array = - []; - for (const query of reactFlightPayload.queries) { - if (query.response.data != null) { - this._followupPayloads.push({ - kind: 'ModuleImportPayload', - args: null, - data: query.response.data, - dataID: ROOT_ID, - operationReference: query.module, - path: [], - typeName: ROOT_TYPE, - variables: query.variables, - actorIdentifier: this._actorIdentifier, - }); - } - reachableExecutableDefinitions.push({ - module: query.module, - variables: query.variables, - }); - } - for (const fragment of reactFlightPayload.fragments) { - if (fragment.response.data != null) { - this._followupPayloads.push({ - kind: 'ModuleImportPayload', - args: null, - data: fragment.response.data, - dataID: fragment.__id, - operationReference: fragment.module, - path: [], - typeName: fragment.__typename, - variables: fragment.variables, - actorIdentifier: this._actorIdentifier, - }); - } - reachableExecutableDefinitions.push({ - module: fragment.module, - variables: fragment.variables, - }); - } - RelayModernRecord.setValue( - reactFlightClientResponseRecord, - REACT_FLIGHT_EXECUTABLE_DEFINITIONS_STORAGE_KEY, - reachableExecutableDefinitions, - ); - RelayModernRecord.setLinkedRecordID(record, storageKey, reactFlightID); - } - _normalizeLink( field: NormalizationLinkedField, record: Record, @@ -880,7 +701,6 @@ class RelayResponseNormalizer { ); if (__DEV__) { this._validateConflictingLinkedFieldsWithIdenticalId( - record, RelayModernRecord.getLinkedRecordID(record, storageKey), nextID, storageKey, @@ -921,6 +741,11 @@ class RelayResponseNormalizer { return; } this._path.push(String(nextIndex)); + const oldErrorTrie = this._errorTrie; + this._errorTrie = + oldErrorTrie == null + ? null + : getNestedErrorTrieByKey(oldErrorTrie, nextIndex); invariant( typeof item === 'object', 'RelayResponseNormalizer: Expected elements for field `%s` to be ' + @@ -962,7 +787,6 @@ class RelayResponseNormalizer { if (__DEV__) { if (prevIDs) { this._validateConflictingLinkedFieldsWithIdenticalId( - record, prevIDs[nextIndex], nextID, storageKey, @@ -971,6 +795,7 @@ class RelayResponseNormalizer { } // $FlowFixMe[incompatible-variance] this._traverseSelections(field, nextRecord, item); + this._errorTrie = oldErrorTrie; this._path.pop(); }); RelayModernRecord.setLinkedRecordIDs(record, storageKey, nextIDs); @@ -1032,7 +857,6 @@ class RelayResponseNormalizer { * Warns if a single response contains conflicting fields with the same id */ _validateConflictingLinkedFieldsWithIdenticalId( - record: Record, prevID: ?DataID, nextID: DataID, storageKey: string, diff --git a/packages/relay-runtime/store/RelayStoreReactFlightUtils.js b/packages/relay-runtime/store/RelayStoreReactFlightUtils.js deleted file mode 100644 index 2a97f86c1b6a2..0000000000000 --- a/packages/relay-runtime/store/RelayStoreReactFlightUtils.js +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow - * @format - * @oncall relay - */ - -'use strict'; - -import type {ReactFlightPayloadData} from '../network/RelayNetworkTypes'; -import type {ReactFlightClientResponse, Record} from './RelayStoreTypes'; - -const {getType} = require('./RelayModernRecord'); -const invariant = require('invariant'); - -// Reachable (client) executable definitions encountered while server component -// rendering -const REACT_FLIGHT_EXECUTABLE_DEFINITIONS_STORAGE_KEY = 'executableDefinitions'; -const REACT_FLIGHT_TREE_STORAGE_KEY = 'tree'; -const REACT_FLIGHT_TYPE_NAME = 'ReactFlightComponent'; - -function refineToReactFlightPayloadData( - payload: mixed, -): ?ReactFlightPayloadData { - if ( - payload == null || - typeof payload !== 'object' || - typeof payload.status !== 'string' || - (!Array.isArray(payload.tree) && payload.tree !== null) || - !Array.isArray(payload.queries) || - !Array.isArray(payload.fragments) || - !Array.isArray(payload.errors) - ) { - return null; - } - return (payload: $FlowFixMe); -} - -function getReactFlightClientResponse( - record: Record, -): ?ReactFlightClientResponse { - invariant( - getType(record) === REACT_FLIGHT_TYPE_NAME, - 'getReactFlightClientResponse(): Expected a ReactFlightComponentRecord, ' + - 'got %s.', - record, - ); - return (record[REACT_FLIGHT_TREE_STORAGE_KEY]: $FlowFixMe); -} - -module.exports = { - REACT_FLIGHT_EXECUTABLE_DEFINITIONS_STORAGE_KEY, - REACT_FLIGHT_TREE_STORAGE_KEY, - REACT_FLIGHT_TYPE_NAME, - getReactFlightClientResponse, - refineToReactFlightPayloadData, -}; diff --git a/packages/relay-runtime/store/RelayStoreSubscriptions.js b/packages/relay-runtime/store/RelayStoreSubscriptions.js index 413a492e5f645..4afb50b051927 100644 --- a/packages/relay-runtime/store/RelayStoreSubscriptions.js +++ b/packages/relay-runtime/store/RelayStoreSubscriptions.js @@ -27,6 +27,7 @@ const deepFreeze = require('../util/deepFreeze'); const recycleNodesInto = require('../util/recycleNodesInto'); const RelayFeatureFlags = require('../util/RelayFeatureFlags'); const hasOverlappingIDs = require('./hasOverlappingIDs'); +const hasSignificantOverlappingIDs = require('./hasSignificantOverlappingIDs'); const RelayReader = require('./RelayReader'); type Subscription = { @@ -95,6 +96,8 @@ class RelayStoreSubscriptions implements StoreSubscriptions { subscription.backup = null; if (backup) { if (backup.data !== subscription.snapshot.data) { + // This subscription's data changed in the optimistic state. We will + // need to re-read. subscription.stale = true; } subscription.snapshot = { @@ -106,8 +109,11 @@ class RelayStoreSubscriptions implements StoreSubscriptions { selector: backup.selector, missingRequiredFields: backup.missingRequiredFields, relayResolverErrors: backup.relayResolverErrors, + errorResponseFields: backup.errorResponseFields, }; } else { + // This subscription was created during the optimisitic state. We should + // re-read. subscription.stale = true; } }); @@ -170,6 +176,7 @@ class RelayStoreSubscriptions implements StoreSubscriptions { selector: nextSnapshot.selector, missingRequiredFields: nextSnapshot.missingRequiredFields, relayResolverErrors: nextSnapshot.relayResolverErrors, + errorResponseFields: nextSnapshot.errorResponseFields, }: Snapshot); if (__DEV__) { deepFreeze(nextSnapshot); @@ -188,6 +195,16 @@ class RelayStoreSubscriptions implements StoreSubscriptions { callback(nextSnapshot); return snapshot.selector.owner; } + // While there were some overlapping IDs that affected this subscription, + // none of the read fields were actually affected. + if ( + RelayFeatureFlags.ENABLE_LOOSE_SUBSCRIPTION_ATTRIBUTION && + (stale || + hasSignificantOverlappingIDs(snapshot.seenRecords, updatedRecordIDs)) + ) { + // With loose attribution enabled, we'll attribute this anyway. + return snapshot.selector.owner; + } } } diff --git a/packages/relay-runtime/store/RelayStoreTypes.js b/packages/relay-runtime/store/RelayStoreTypes.js index 40391e819af2d..30a226802d8e2 100644 --- a/packages/relay-runtime/store/RelayStoreTypes.js +++ b/packages/relay-runtime/store/RelayStoreTypes.js @@ -10,18 +10,16 @@ */ 'use strict'; - import type { ActorIdentifier, IActorEnvironment, } from '../multi-actor-environment'; import type { GraphQLResponse, + GraphQLResponseWithData, INetwork, PayloadData, PayloadError, - ReactFlightServerError, - ReactFlightServerTree, UploadableMap, } from '../network/RelayNetworkTypes'; import type RelayObservable from '../network/RelayObservable'; @@ -34,6 +32,7 @@ import type { NormalizationSelectableNode, } from '../util/NormalizationNode'; import type { + CatchFieldTo, ReaderClientEdgeToServerObject, ReaderFragment, ReaderLinkedField, @@ -51,29 +50,27 @@ import type { UpdatableQuery, Variables, } from '../util/RelayRuntimeTypes'; +import type {TRelayFieldError} from './RelayErrorTrie'; +import type { + Record as RelayModernRecord, + RecordJSON, +} from './RelayModernRecord'; import type {InvalidationState} from './RelayModernStore'; import type RelayOperationTracker from './RelayOperationTracker'; import type {RecordState} from './RelayRecordState'; +import type {NormalizationOptions} from './RelayResponseNormalizer'; export opaque type FragmentType = empty; export type OperationTracker = RelayOperationTracker; +export type Record = RelayModernRecord; + export type MutationParameters = { +response: {...}, +variables: {...}, +rawResponse?: {...}, }; -/* - * An individual cached graph object. - */ -export type Record = {[key: string]: mixed, ...}; - -/** - * A collection of records keyed by id. - */ -export type RecordObjectMap = {[DataID]: ?Record}; - export type FragmentMap = {[key: string]: ReaderFragment, ...}; /** @@ -120,11 +117,19 @@ type FieldLocation = { owner: string, }; +type ErrorFieldLocation = { + ...FieldLocation, + error: TRelayFieldError, + to?: CatchFieldTo, +}; + export type MissingRequiredFields = $ReadOnly< | {action: 'THROW', field: FieldLocation} | {action: 'LOG', fields: Array}, >; +export type ErrorResponseFields = Array; + export type ClientEdgeTraversalInfo = { +readerClientEdge: ReaderClientEdgeToServerObject, +clientEdgeDestinationID: DataID, @@ -162,6 +167,7 @@ export type Snapshot = { +selector: SingularReaderSelector, +missingRequiredFields: ?MissingRequiredFields, +relayResolverErrors: RelayResolverErrors, + +errorResponseFields: ?ErrorResponseFields, }; /** @@ -251,9 +257,14 @@ export interface RecordSource { getStatus(dataID: DataID): RecordState; has(dataID: DataID): boolean; size(): number; - toJSON(): {[DataID]: ?Record, ...}; + toJSON(): RecordSourceJSON; } +/** + * A collection of records keyed by id. + */ +export type RecordSourceJSON = {[DataID]: ?RecordJSON}; + /** * A read/write interface for accessing and updating graph data. */ @@ -463,7 +474,7 @@ export interface RecordProxy { args?: ?Variables, ): RecordProxy; setLinkedRecords( - records: Array, + records: $ReadOnlyArray, name: string, args?: ?Variables, ): RecordProxy; @@ -534,164 +545,224 @@ export interface RecordSourceSelectorProxy extends RecordSourceProxy { invalidateStore(): void; } +export type SuspenseFragmentLogEvent = { + +name: 'suspense.fragment', + +data: mixed, + +fragment: ReaderFragment, + +isRelayHooks: boolean, + +isMissingData: boolean, + +isPromiseCached: boolean, + +pendingOperations: $ReadOnlyArray, +}; + +export type SuspenseQueryLogEvent = { + +name: 'suspense.query', + +fetchPolicy: string, + +isPromiseCached: boolean, + +operation: OperationDescriptor, + +queryAvailability: ?OperationAvailability, + +renderPolicy: RenderPolicy, +}; + +export type QueryResourceFetchLogEvent = { + +name: 'queryresource.fetch', + // ID of this query resource request and will be the same + // if there is an associated queryresource.retain event. + +resourceID: number, + +operation: OperationDescriptor, + // value from ProfilerContext + +profilerContext: mixed, + // FetchPolicy from Relay Hooks + +fetchPolicy: string, + // RenderPolicy from Relay Hooks + +renderPolicy: RenderPolicy, + +queryAvailability: OperationAvailability, + +shouldFetch: boolean, +}; + +export type QueryResourceRetainLogEvent = { + +name: 'queryresource.retain', + +resourceID: number, + // value from ProfilerContext + +profilerContext: mixed, +}; + +export type FragmentResourceMissingDataLogEvent = { + // Indicates FragmentResource is going to return a result that is missing + // data. + +name: 'fragmentresource.missing_data', + +data: mixed, + +fragment: ReaderFragment, + +isRelayHooks: boolean, + // Are we reading this result from the fragment resource cache? + +cached: boolean, +}; + +export type PendingOperationFoundLogEvent = { + // Indicates getPendingOperationForFragment identified a pending operation. + // Useful for measuring how frequently RelayOperationTracker identifies a + // related operation on which to suspend. + +name: 'pendingoperation.found', + +fragment: ReaderFragment, + +fragmentOwner: RequestDescriptor, + +pendingOperations: $ReadOnlyArray, +}; + +export type NetworkInfoLogEvent = { + +name: 'network.info', + +networkRequestId: number, + +info: mixed, +}; + +export type NetworkStartLogEvent = { + +name: 'network.start', + +networkRequestId: number, + +params: RequestParameters, + +variables: Variables, + +cacheConfig: CacheConfig, +}; + +export type NetworkNextLogEvent = { + +name: 'network.next', + +networkRequestId: number, + +response: GraphQLResponse, +}; + +export type NetworkErrorLogEvent = { + +name: 'network.error', + +networkRequestId: number, + +error: Error, +}; + +export type NetworkCompleteLogEvent = { + +name: 'network.complete', + +networkRequestId: number, +}; + +export type NetworkUnsubscribeLogEvent = { + +name: 'network.unsubscribe', + +networkRequestId: number, +}; + +export type ExecuteStartLogEvent = { + +name: 'execute.start', + +executeId: number, + +params: RequestParameters, + +variables: Variables, + +cacheConfig: CacheConfig, +}; + +export type ExecuteNextLogEvent = { + +name: 'execute.next', + +executeId: number, + +response: GraphQLResponse, + +duration: number, +}; + +export type ExecuteAsyncModuleLogEvent = { + +name: 'execute.async.module', + +executeId: number, + +operationName: string, + +duration: number, +}; + +export type ExecuteErrorLogEvent = { + +name: 'execute.error', + +executeId: number, + +error: Error, +}; + +export type ExecuteCompleteLogEvent = { + +name: 'execute.complete', + +executeId: number, +}; + +export type StorePublishLogEvent = { + +name: 'store.publish', + +source: RecordSource, + +optimistic: boolean, +}; + +export type StoreSnapshotLogEvent = { + +name: 'store.snapshot', +}; + +export type StoreRestoreLogEvent = { + +name: 'store.restore', +}; + +export type StoreGcLogEvent = { + +name: 'store.gc', + +references: DataIDSet, +}; + +export type StoreNotifyStartLogEvent = { + +name: 'store.notify.start', + +sourceOperation: ?OperationDescriptor, +}; + +export type StoreNotifyCompleteLogEvent = { + +name: 'store.notify.complete', + +sourceOperation: ?OperationDescriptor, + +updatedRecordIDs: DataIDSet, + +invalidatedRecordIDs: DataIDSet, +}; + +export type StoreNotifySubscriptionLogEvent = { + +name: 'store.notify.subscription', + +sourceOperation: ?OperationDescriptor, + +snapshot: Snapshot, + +nextSnapshot: Snapshot, +}; + +export type EntrypointRootConsumeLogEvent = { + +name: 'entrypoint.root.consume', + +profilerContext: mixed, + +rootModuleID: string, +}; + +export type LiveResolverBatchStartLogEvent = { + +name: 'liveresolver.batch.start', +}; + +export type LiveResolverBatchEndLogEvent = { + +name: 'liveresolver.batch.end', +}; + +export type UseFragmentSubscriptionMissedUpdates = { + +name: 'useFragment.subscription.missedUpdates', + +hasDataChanges: boolean, +}; + export type LogEvent = - | { - +name: 'suspense.fragment', - +data: mixed, - +fragment: ReaderFragment, - +isRelayHooks: boolean, - +isMissingData: boolean, - +isPromiseCached: boolean, - +pendingOperations: $ReadOnlyArray, - } - | { - +name: 'suspense.query', - +fetchPolicy: string, - +isPromiseCached: boolean, - +operation: OperationDescriptor, - +queryAvailability: ?OperationAvailability, - +renderPolicy: RenderPolicy, - } - | { - +name: 'queryresource.fetch', - // ID of this query resource request and will be the same - // if there is an associated queryresource.retain event. - +resourceID: number, - +operation: OperationDescriptor, - // value from ProfilerContext - +profilerContext: mixed, - // FetchPolicy from Relay Hooks - +fetchPolicy: string, - // RenderPolicy from Relay Hooks - +renderPolicy: RenderPolicy, - +queryAvailability: OperationAvailability, - +shouldFetch: boolean, - } - | { - +name: 'queryresource.retain', - +resourceID: number, - // value from ProfilerContext - +profilerContext: mixed, - } - | { - // Indicates FragmentResource is going to return a result that is missing - // data. - +name: 'fragmentresource.missing_data', - +data: mixed, - +fragment: ReaderFragment, - +isRelayHooks: boolean, - // Are we reading this result from the fragment resource cache? - +cached: boolean, - } - | { - // Indicates getPendingOperationForFragment identified a pending operation. - // Useful for measuring how frequently RelayOperationTracker identifies a - // related operation on which to suspend. - +name: 'pendingoperation.found', - +fragment: ReaderFragment, - +fragmentOwner: RequestDescriptor, - +pendingOperations: $ReadOnlyArray, - } - | { - +name: 'network.info', - +networkRequestId: number, - +info: mixed, - } - | { - +name: 'network.start', - +networkRequestId: number, - +params: RequestParameters, - +variables: Variables, - +cacheConfig: CacheConfig, - } - | { - +name: 'network.next', - +networkRequestId: number, - +response: GraphQLResponse, - } - | { - +name: 'network.error', - +networkRequestId: number, - +error: Error, - } - | { - +name: 'network.complete', - +networkRequestId: number, - } - | { - +name: 'network.unsubscribe', - +networkRequestId: number, - } - | { - +name: 'execute.start', - +executeId: number, - +params: RequestParameters, - +variables: Variables, - +cacheConfig: CacheConfig, - } - | { - +name: 'execute.next', - +executeId: number, - +response: GraphQLResponse, - +duration: number, - } - | { - +name: 'execute.async.module', - +executeId: number, - +operationName: string, - +duration: number, - } - | { - +name: 'execute.flight.payload_deserialize', - +executeId: number, - +operationName: string, - +duration: number, - } - | { - +name: 'execute.error', - +executeId: number, - +error: Error, - } - | { - +name: 'execute.complete', - +executeId: number, - } - | { - +name: 'store.publish', - +source: RecordSource, - +optimistic: boolean, - } - | { - +name: 'store.snapshot', - } - | { - +name: 'store.restore', - } - | { - +name: 'store.gc', - +references: DataIDSet, - } - | { - +name: 'store.notify.start', - +sourceOperation: ?OperationDescriptor, - } - | { - +name: 'store.notify.complete', - +sourceOperation: ?OperationDescriptor, - +updatedRecordIDs: DataIDSet, - +invalidatedRecordIDs: DataIDSet, - } - | { - +name: 'store.notify.subscription', - +sourceOperation: ?OperationDescriptor, - +snapshot: Snapshot, - +nextSnapshot: Snapshot, - } - | { - +name: 'entrypoint.root.consume', - +profilerContext: mixed, - +rootModuleID: string, - }; + | SuspenseFragmentLogEvent + | SuspenseQueryLogEvent + | QueryResourceFetchLogEvent + | QueryResourceRetainLogEvent + | FragmentResourceMissingDataLogEvent + | PendingOperationFoundLogEvent + | NetworkInfoLogEvent + | NetworkStartLogEvent + | NetworkNextLogEvent + | NetworkErrorLogEvent + | NetworkCompleteLogEvent + | NetworkUnsubscribeLogEvent + | ExecuteStartLogEvent + | ExecuteNextLogEvent + | ExecuteAsyncModuleLogEvent + | ExecuteErrorLogEvent + | ExecuteCompleteLogEvent + | StorePublishLogEvent + | StoreSnapshotLogEvent + | StoreRestoreLogEvent + | StoreGcLogEvent + | StoreNotifyStartLogEvent + | StoreNotifyCompleteLogEvent + | StoreNotifySubscriptionLogEvent + | EntrypointRootConsumeLogEvent + | LiveResolverBatchStartLogEvent + | LiveResolverBatchEndLogEvent + | UseFragmentSubscriptionMissedUpdates; export type LogFunction = LogEvent => void; export type LogRequestInfoFunction = mixed => void; @@ -888,7 +959,7 @@ export interface IEnvironment { * Called by Relay when it encounters a missing field that has been annotated * with `@required(action: LOG)`. */ - requiredFieldLogger: RequiredFieldLogger; + relayFieldLogger: RelayFieldLogger; } /** @@ -912,10 +983,10 @@ export type DataIDSet = Set; * A function that updates a store (via a proxy) given the results of a "handle" * field payload. */ -export type Handler = { +export type Handler = $ReadOnly<{ update: (store: RecordSourceProxy, fieldPayload: HandleFieldPayload) => void, ... -}; +}>; /** * A payload that is used to initialize or update a "handle" field with @@ -939,7 +1010,7 @@ export type HandleFieldPayload = { /** * A payload that represents data necessary to process the results of an object - * with a `@module` fragment spread, or a Flight field's: + * with a `@module` fragment spread: * * ## @module Fragment Spread * - args: Local arguments from the parent @@ -954,20 +1025,6 @@ export type HandleFieldPayload = { * which can in turn be used to normalize and publish the data. The dataID and * typeName can also be used to construct a root record for normalization. * - * ## Flight fields - * In Flight, data for additional components rendered by the requested server - * component are included in the response returned by a Flight compliant server. - * - * - data: Data used by additional components rendered by the server component - * being requested. - * - dataID: For Flight fields, this should always be ROOT_ID. This is because - * the query data isn't relative to the parent record–it's root data. - * - operationReference: The query's module that will be later used by an - * operation loader. - * - variables: The query's variables. - * - typeName: For Flight fields, this should always be ROOT_TYPE. This is - * because the query data isn't relative to the parent record–it's - * root data. */ export type ModuleImportPayload = { +kind: 'ModuleImportPayload', @@ -1037,6 +1094,13 @@ export type StreamPlaceholder = { }; export type IncrementalDataPlaceholder = DeferPlaceholder | StreamPlaceholder; +export type NormalizeResponseFunction = ( + response: GraphQLResponseWithData, + selector: NormalizationSelector, + typeName: string, + options: NormalizationOptions, +) => RelayResponsePayload; + /** * A user-supplied object to load a generated operation (SplitOperation or * ConcreteRequest) AST by a module reference. The exact format of a module @@ -1129,11 +1193,7 @@ export type MissingFieldHandler = ) => ?Array, }; -/** - * A handler for events related to @required fields. Currently reports missing - * fields with either `action: LOG` or `action: THROW`. - */ -export type RequiredFieldLogger = ( +export type RelayFieldLoggerEvent = | { +kind: 'missing_field.log', +owner: string, @@ -1149,8 +1209,19 @@ export type RequiredFieldLogger = ( +owner: string, +fieldPath: string, +error: Error, - }, -) => void; + } + | { + +kind: 'relay_field_payload.error', + +owner: string, + +fieldPath: string, + +error: TRelayFieldError, + }; + +/** + * A handler for events related to @required fields. Currently reports missing + * fields with either `action: LOG` or `action: THROW`. + */ +export type RelayFieldLogger = (event: RelayFieldLoggerEvent) => void; /** * The results of normalizing a query. @@ -1228,35 +1299,6 @@ export interface PublishQueue { run(sourceOperation?: OperationDescriptor): $ReadOnlyArray; } -/** - * ReactFlightDOMRelayClient processes a ReactFlightServerTree into a - * ReactFlightClientResponse object. readRoot() can suspend. - */ -export type ReactFlightClientResponse = {readRoot: () => mixed, ...}; - -export type ReactFlightReachableExecutableDefinitions = { - +module: mixed, - +variables: Variables, -}; - -/** - * A user-supplied function that takes a ReactFlightServerTree - * (after successful execution on the server), and deserializes it into a - * ReactFlightClientResponse object. - */ -export type ReactFlightPayloadDeserializer = ( - tree: ReactFlightServerTree, -) => ReactFlightClientResponse; - -/** - * An optionally user-supplied function that handles errors returned by the - * server's JS runtime while executing a React Server Component. - */ -export type ReactFlightServerErrorHandler = ( - status: string, - errors: Array, -) => void; - /** * The return type of a client edge resolver pointing to a concrete type. * T can be overridden to be more specific than a DataID, e.g. if the IDs @@ -1265,3 +1307,25 @@ export type ReactFlightServerErrorHandler = ( export type ConcreteClientEdgeResolverReturnType = { +id: T & DataID, }; + +/** + * The return type of a Live Resolver. Models an external value which can + * be read lazily and which might change over time. The subscribe method + * returns a callback which should be called when the value _may_ have changed. + * + * While over-notification (subscription notifications when the read value has + * not actually changed) is suported, for performance reasons, it is recommended + * that the provider of the LiveState value confirms that the value has indeed + * change before notifying Relay of the change. + */ +export type LiveState<+T> = { + /** + * Returns the current value of the live state. + */ + read(): T, + /** + * Subscribes to changes in the live state. The state provider should + * call the callback when the value of the live state changes. + */ + subscribe(cb: () => void): () => void, +}; diff --git a/packages/relay-runtime/store/RelayStoreUtils.js b/packages/relay-runtime/store/RelayStoreUtils.js index 81d455e6dda38..8338672f25a45 100644 --- a/packages/relay-runtime/store/RelayStoreUtils.js +++ b/packages/relay-runtime/store/RelayStoreUtils.js @@ -31,10 +31,14 @@ const RelayConcreteNode = require('../util/RelayConcreteNode'); const stableCopy = require('../util/stableCopy'); const invariant = require('invariant'); -export type Arguments = {+[string]: mixed}; +export type Arguments = { + +FRAGMENT_POINTER_IS_WITHIN_UNMATCHED_TYPE_REFINEMENT?: boolean, + +[string]: mixed, +}; const {VARIABLE, LITERAL, OBJECT_VALUE, LIST_VALUE} = RelayConcreteNode; +const ERRORS_KEY: '__errors' = '__errors'; const MODULE_COMPONENT_KEY_PREFIX = '__module_component_'; const MODULE_OPERATION_KEY_PREFIX = '__module_operation_'; @@ -68,13 +72,24 @@ function getArgumentValue( * names. Guaranteed to return a result with stable ordered nested values. */ function getArgumentValues( - args: $ReadOnlyArray, + args?: ?$ReadOnlyArray, variables: Variables, + isWithinUnmatchedTypeRefinement?: boolean, ): Arguments { - const values: {[string]: mixed} = {}; - args.forEach(arg => { - values[arg.name] = getArgumentValue(arg, variables); - }); + const values: { + FRAGMENT_POINTER_IS_WITHIN_UNMATCHED_TYPE_REFINEMENT?: boolean, + [string]: mixed, + } = {}; + if (isWithinUnmatchedTypeRefinement) { + values[ + RelayStoreUtils.FRAGMENT_POINTER_IS_WITHIN_UNMATCHED_TYPE_REFINEMENT + ] = true; + } + if (args) { + args.forEach(arg => { + values[arg.name] = getArgumentValue(arg, variables); + }); + } return values; } @@ -239,8 +254,11 @@ const RelayStoreUtils = { CLIENT_EDGE_TRAVERSAL_PATH: '__clientEdgeTraversalPath', FRAGMENTS_KEY: '__fragments', FRAGMENT_OWNER_KEY: '__fragmentOwner', + FRAGMENT_POINTER_IS_WITHIN_UNMATCHED_TYPE_REFINEMENT: + '$isWithinUnmatchedTypeRefinement', FRAGMENT_PROP_NAME_KEY: '__fragmentPropName', MODULE_COMPONENT_KEY: '__module_component', // alias returned by Reader + ERRORS_KEY, ID_KEY: '__id', REF_KEY: '__ref', REFS_KEY: '__refs', @@ -248,7 +266,6 @@ const RelayStoreUtils = { ROOT_TYPE: '__Root', TYPENAME_KEY: '__typename', INVALIDATED_AT_KEY: '__invalidated_at', - IS_WITHIN_UNMATCHED_TYPE_REFINEMENT: '__isWithinUnmatchedTypeRefinement', RELAY_RESOLVER_VALUE_KEY: '__resolverValue', RELAY_RESOLVER_INVALIDATION_KEY: '__resolverValueMayBeInvalid', RELAY_RESOLVER_SNAPSHOT_KEY: '__resolverSnapshot', diff --git a/packages/relay-runtime/store/ResolverCache.js b/packages/relay-runtime/store/ResolverCache.js index be8752b6b5128..2bb8e0292e6f5 100644 --- a/packages/relay-runtime/store/ResolverCache.js +++ b/packages/relay-runtime/store/ResolverCache.js @@ -20,13 +20,14 @@ import type { DataIDSet, MutableRecordSource, Record, - RelayResolverError, SingularReaderSelector, Snapshot, } from './RelayStoreTypes'; const recycleNodesInto = require('../util/recycleNodesInto'); const {RELAY_LIVE_RESOLVER} = require('../util/RelayConcreteNode'); +const RelayFeatureFlags = require('../util/RelayFeatureFlags'); +const shallowFreeze = require('../util/shallowFreeze'); const {generateClientID} = require('./ClientID'); const RelayModernRecord = require('./RelayModernRecord'); const { @@ -44,7 +45,7 @@ type ResolverID = string; export type EvaluationResult = { resolverResult: ?T, snapshot: ?Snapshot, - error: ?RelayResolverError, + error: ?Error, }; export type ResolverFragmentResult = { @@ -65,7 +66,7 @@ export interface ResolverCache { ): [ ?T /* Answer */, ?DataID /* Seen record */, - ?RelayResolverError, + ?Error, ?Snapshot, ?DataID /* ID of record containing a suspended Live field */, ?DataIDSet /** Set of updated records after read. Then need to be consumed by `processFollowupUpdates` */, @@ -90,7 +91,7 @@ class NoopResolverCache implements ResolverCache { ): [ ?T /* Answer */, ?DataID /* Seen record */, - ?RelayResolverError, + ?Error, ?Snapshot, ?DataID /* ID of record containing a suspended Live field */, ?DataIDSet /** Set of dirty records after read */, @@ -147,7 +148,7 @@ class RecordResolverCache implements ResolverCache { ): [ ?T /* Answer */, ?DataID /* Seen record */, - ?RelayResolverError, + ?Error, ?Snapshot, ?DataID /* ID of record containing a suspended Live field */, ?DataIDSet /** Set of dirty records after read */, @@ -173,6 +174,7 @@ class RecordResolverCache implements ResolverCache { linkedRecord = RelayModernRecord.create(linkedID, '__RELAY_RESOLVER__'); const evaluationResult = evaluate(); + shallowFreeze(evaluationResult.resolverResult); RelayModernRecord.setValue( linkedRecord, RELAY_RESOLVER_VALUE_KEY, @@ -224,11 +226,22 @@ class RecordResolverCache implements ResolverCache { } // $FlowFixMe[incompatible-type] - will always be empty - const answer: T = linkedRecord[RELAY_RESOLVER_VALUE_KEY]; + const answer: T = RelayModernRecord.getValue( + linkedRecord, + RELAY_RESOLVER_VALUE_KEY, + ); + // $FlowFixMe[incompatible-type] - casting mixed - const snapshot: ?Snapshot = linkedRecord[RELAY_RESOLVER_SNAPSHOT_KEY]; + const snapshot: ?Snapshot = RelayModernRecord.getValue( + linkedRecord, + RELAY_RESOLVER_SNAPSHOT_KEY, + ); + // $FlowFixMe[incompatible-type] - casting mixed - const error: ?RelayResolverError = linkedRecord[RELAY_RESOLVER_ERROR_KEY]; + const error: ?Error = RelayModernRecord.getValue( + linkedRecord, + RELAY_RESOLVER_ERROR_KEY, + ); return [answer, linkedID, error, snapshot, undefined, undefined]; } @@ -313,6 +326,22 @@ class RecordResolverCache implements ResolverCache { if (recycled !== originalInputs) { return true; } + + if (RelayFeatureFlags.MARK_RESOLVER_VALUES_AS_CLEAN_AFTER_FRAGMENT_REREAD) { + // This record does not need to be recomputed, we can reuse the cached value. + // For subsequent reads we can mark this record as "clean" so that they will + // not need to re-read the fragment. + const nextRecord = RelayModernRecord.clone(record); + RelayModernRecord.setValue( + nextRecord, + RELAY_RESOLVER_INVALIDATION_KEY, + false, + ); + + const recordSource = this._getRecordSource(); + recordSource.set(RelayModernRecord.getDataID(record), nextRecord); + } + return false; } diff --git a/packages/relay-runtime/store/ResolverFragments.js b/packages/relay-runtime/store/ResolverFragments.js index b622d012f123b..78833ac240a12 100644 --- a/packages/relay-runtime/store/ResolverFragments.js +++ b/packages/relay-runtime/store/ResolverFragments.js @@ -55,17 +55,14 @@ declare function readFragment< >( fragmentInput: GraphQLTaggedNode, fragmentKey: TKey, -): $Call<({+$data?: TFragmentData, ...}) => TFragmentData, TKey>; +): $NonMaybeType; declare function readFragment< TKey: ?{+$data?: mixed, +$fragmentSpreads: FragmentType, ...}, >( fragmentInput: GraphQLTaggedNode, fragmentKey: TKey, -): $Call< - (?{+$data?: TFragmentData, ...}) => ?TFragmentData, - TKey, ->; +): ?TKey?.['$data']; declare function readFragment< TKey: $ReadOnlyArray<{ @@ -76,12 +73,7 @@ declare function readFragment< >( fragmentInput: GraphQLTaggedNode, fragmentKey: TKey, -): $Call< - ( - $ReadOnlyArray<{+$data?: TFragmentData, ...}>, - ) => TFragmentData, - TKey, ->; +): $NonMaybeType; declare function readFragment< TKey: ?$ReadOnlyArray<{ @@ -92,12 +84,7 @@ declare function readFragment< >( fragmentInput: GraphQLTaggedNode, fragmentKey: TKey, -): $Call< - ( - ?$ReadOnlyArray<{+$data?: TFragmentData, ...}>, - ) => ?TFragmentData, - TKey, ->; +): ?TKey?.[number]['$data']; declare function readFragment( fragmentInput: Fragment, diff --git a/packages/relay-runtime/store/StoreInspector.js b/packages/relay-runtime/store/StoreInspector.js index eb7d5f051b911..6e2db9c5800df 100644 --- a/packages/relay-runtime/store/StoreInspector.js +++ b/packages/relay-runtime/store/StoreInspector.js @@ -136,6 +136,7 @@ if (__DEV__) { return record; } return new Proxy( + // $FlowFixMe: Do not assume that record is an object {...record}, { get(target, prop) { diff --git a/packages/relay-runtime/store/__tests__/ClientEdgeToClientObject-test.js b/packages/relay-runtime/store/__tests__/ClientEdgeToClientObject-test.js index 71450b869908d..a64c29fadd57c 100644 --- a/packages/relay-runtime/store/__tests__/ClientEdgeToClientObject-test.js +++ b/packages/relay-runtime/store/__tests__/ClientEdgeToClientObject-test.js @@ -32,12 +32,10 @@ disallowWarnings(); beforeEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = true; }); afterEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = false; }); test('Can read a deep portion of the schema that is backed by client edges to client objects.', () => { @@ -93,7 +91,7 @@ test('Can read a deep portion of the schema that is backed by client edges to cl }); // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (environment.lookup(operation.fragment).data: any); + const {me}: any = environment.lookup(operation.fragment).data; expect(me).toMatchInlineSnapshot(` Object { diff --git a/packages/relay-runtime/store/__tests__/DataChecker-test.js b/packages/relay-runtime/store/__tests__/DataChecker-test.js index d8c490b365a40..ee81e14ef22cb 100644 --- a/packages/relay-runtime/store/__tests__/DataChecker-test.js +++ b/packages/relay-runtime/store/__tests__/DataChecker-test.js @@ -38,7 +38,6 @@ const { } = require('../../multi-actor-environment/ActorIdentifier'); const {getRequest, graphql} = require('../../query/GraphQLTag'); const getRelayHandleKey = require('../../util/getRelayHandleKey'); -const RelayFeatureFlags = require('../../util/RelayFeatureFlags'); const {check} = require('../DataChecker'); const defaultGetDataID = require('../defaultGetDataID'); const {createNormalizationSelector} = require('../RelayModernSelector'); @@ -720,9 +719,11 @@ describe('check()', () => { loader = { get: jest.fn( + // $FlowFixMe[invalid-computed-prop] (moduleName: mixed) => nodes[String(moduleName).replace(/\$.*/, '')], ), load: jest.fn((moduleName: mixed) => + // $FlowFixMe[invalid-computed-prop] Promise.resolve(nodes[String(moduleName).replace(/\$.*/, '')]), ), }; @@ -735,23 +736,20 @@ describe('check()', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'PlainUserNameRenderer', - __module_component_DataCheckerTest4Fragment: - 'PlainUserNameRenderer.react', - __module_operation_DataCheckerTest4Fragment: - 'DataCheckerTestPlainUserNameRenderer_nameFragment$normalization.graphql', - plaintext: 'plain name', - data: {__ref: 'data'}, + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'PlainUserNameRenderer', + __module_component_DataCheckerTest4Fragment: + 'PlainUserNameRenderer.react', + __module_operation_DataCheckerTest4Fragment: + 'DataCheckerTestPlainUserNameRenderer_nameFragment$normalization.graphql', + plaintext: 'plain name', + data: {__ref: 'data'}, + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -799,23 +797,20 @@ describe('check()', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'MarkdownUserNameRenderer', - __module_component_DataCheckerTest4Fragment: - 'MarkdownUserNameRenderer.react', - __module_operation_DataCheckerTest4Fragment: - 'DataCheckerTestMarkdownUserNameRenderer_nameFragment$normalization.graphql', - markdown: 'markdown payload', - data: {__ref: 'data'}, + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'MarkdownUserNameRenderer', + __module_component_DataCheckerTest4Fragment: + 'MarkdownUserNameRenderer.react', + __module_operation_DataCheckerTest4Fragment: + 'DataCheckerTestMarkdownUserNameRenderer_nameFragment$normalization.graphql', + markdown: 'markdown payload', + data: {__ref: 'data'}, + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -861,18 +856,15 @@ describe('check()', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'MarkdownUserNameRenderer', - // NOTE: markdown/data fields are missing, data not processed. + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'MarkdownUserNameRenderer', + // NOTE: markdown/data fields are missing, data not processed. + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -915,19 +907,16 @@ describe('check()', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'MarkdownUserNameRenderer', - // NOTE: 'markdown' field missing - data: {__ref: 'data'}, + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'MarkdownUserNameRenderer', + // NOTE: 'markdown' field missing + data: {__ref: 'data'}, + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -972,19 +961,16 @@ describe('check()', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'MarkdownUserNameRenderer', - markdown: 'markdown text', - // NOTE: 'data' field missing + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'MarkdownUserNameRenderer', + markdown: 'markdown text', + // NOTE: 'data' field missing + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -1022,18 +1008,15 @@ describe('check()', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'CustomNameRenderer', - customField: 'custom value', + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'CustomNameRenderer', + customField: 'custom value', + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -1070,8 +1053,7 @@ describe('check()', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - null, + 'nameRenderer(supported:"34hjiS")': null, }, 'client:root': { __id: 'client:root', @@ -1189,9 +1171,11 @@ describe('check()', () => { loader = { get: jest.fn( + // $FlowFixMe[invalid-computed-prop] (moduleName: mixed) => nodes[String(moduleName).replace(/\$.*/, '')], ), load: jest.fn((moduleName: mixed) => + // $FlowFixMe[invalid-computed-prop] Promise.resolve(nodes[String(moduleName).replace(/\$.*/, '')]), ), }; @@ -1977,22 +1961,22 @@ describe('check()', () => { updatedHometown === undefined ? {} : updatedHometown === null - ? { - user1: { - __id: 'user1', - __typename: 'User', - hometown: null, - }, - } - : { - user1: { - __id: 'user1', - __typename: 'User', - hometown: { - __ref: updatedHometown, + ? { + user1: { + __id: 'user1', + __typename: 'User', + hometown: null, + }, + } + : { + user1: { + __id: 'user1', + __typename: 'User', + hometown: { + __ref: updatedHometown, + }, }, }, - }, ); }, ); @@ -2137,22 +2121,22 @@ describe('check()', () => { updatedScreennames === undefined ? {} : updatedScreennames === null - ? { - user1: { - __id: 'user1', - __typename: 'User', - screennames: null, - }, - } - : { - user1: { - __id: 'user1', - __typename: 'User', - screennames: { - __refs: updatedScreennames, + ? { + user1: { + __id: 'user1', + __typename: 'User', + screennames: null, + }, + } + : { + user1: { + __id: 'user1', + __typename: 'User', + screennames: { + __refs: updatedScreennames, + }, }, }, - }, ); }, ); @@ -2804,319 +2788,6 @@ describe('check()', () => { }); }); - describe('with feature ENABLE_REACT_FLIGHT_COMPONENT_FIELD', () => { - let FlightQuery; - let InnerQuery; - let operationLoader; - - const readRoot = () => { - return { - $$typeof: Symbol.for('react.element'), - type: 'div', - key: null, - ref: null, - props: {foo: 1}, - }; - }; - - beforeEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = true; - - FlightQuery = graphql` - query DataCheckerTestFlightQuery($id: ID!, $count: Int!) { - node(id: $id) { - ... on Story { - flightComponent(condition: true, count: $count, id: $id) - } - } - } - `; - InnerQuery = graphql` - query DataCheckerTestInnerQuery($id: ID!) { - node(id: $id) { - ... on User { - name - } - } - } - `; - - operationLoader = { - get: jest.fn(() => getRequest(InnerQuery)), - load: jest.fn(() => Promise.resolve(getRequest(InnerQuery))), - }; - }); - afterEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = false; - }); - - it('returns available when the Flight field is fetched', () => { - const data = { - '1': { - __id: '1', - __typename: 'Story', - 'flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __ref: - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - }, - id: '1', - }, - '2': { - __id: '2', - __typename: 'User', - id: '2', - name: 'Lauren', - }, - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __id: 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - __typename: 'ReactFlightComponent', - executableDefinitions: [ - { - module: { - __dr: 'RelayFlightExampleQuery.graphql', - }, - variables: { - id: '2', - }, - }, - ], - tree: { - readRoot, - }, - }, - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': { - __ref: '1', - }, - 'node(id:"2")': { - __ref: '2', - }, - }, - }; - const source = RelayRecordSource.create(data); - const target = RelayRecordSource.create(); - const status = check( - () => source, - () => target, - INTERNAL_ACTOR_IDENTIFIER_DO_NOT_USE, - createNormalizationSelector( - getRequest(FlightQuery).operation, - ROOT_ID, - { - count: 10, - id: '1', - }, - ), - [], - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - defaultGetDataID, - ); - expect(status).toEqual({ - status: 'available', - mostRecentlyInvalidatedAt: null, - }); - expect(target.size()).toBe(0); - }); - - it('returns missing when the Flight field exists but has not been processed', () => { - const data = { - '1': { - __id: '1', - __typename: 'Story', - 'flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __ref: - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - }, - id: '1', - }, - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __id: 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - __typename: 'ReactFlightComponent', - }, - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': { - __ref: '1', - }, - }, - }; - const source = RelayRecordSource.create(data); - const target = RelayRecordSource.create(); - const status = check( - () => source, - () => target, - INTERNAL_ACTOR_IDENTIFIER_DO_NOT_USE, - createNormalizationSelector( - getRequest(FlightQuery).operation, - ROOT_ID, - { - count: 10, - id: '1', - }, - ), - [], - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - defaultGetDataID, - ); - expect(status).toEqual({ - status: 'missing', - mostRecentlyInvalidatedAt: null, - }); - expect(target.size()).toBe(0); - }); - - it('returns missing when the Flight field is null in the store', () => { - const data = { - '1': { - __id: '1', - __typename: 'Story', - 'flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __ref: - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - }, - id: '1', - }, - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - null, - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': { - __ref: '1', - }, - }, - }; - const source = RelayRecordSource.create(data); - const target = RelayRecordSource.create(); - const status = check( - () => source, - () => target, - INTERNAL_ACTOR_IDENTIFIER_DO_NOT_USE, - createNormalizationSelector( - getRequest(FlightQuery).operation, - ROOT_ID, - { - count: 10, - id: '1', - }, - ), - [], - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - defaultGetDataID, - ); - expect(status).toEqual({ - status: 'missing', - mostRecentlyInvalidatedAt: null, - }); - expect(target.size()).toBe(0); - }); - - it('returns missing when the Flight field is undefined in the store', () => { - const data = { - '1': { - __id: '1', - __typename: 'Story', - 'flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __ref: - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - }, - id: '1', - }, - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - undefined, - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': { - __ref: '1', - }, - }, - }; - const source = RelayRecordSource.create(data); - const target = RelayRecordSource.create(); - const status = check( - () => source, - () => target, - INTERNAL_ACTOR_IDENTIFIER_DO_NOT_USE, - createNormalizationSelector( - getRequest(FlightQuery).operation, - ROOT_ID, - { - count: 10, - id: '1', - }, - ), - [], - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - defaultGetDataID, - ); - expect(status).toEqual({ - status: 'missing', - mostRecentlyInvalidatedAt: null, - }); - expect(target.size()).toBe(0); - }); - - it('returns missing when the linked ReactFlightClientResponseRecord is missing', () => { - const data = { - '1': { - __id: '1', - __typename: 'Story', - 'flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __ref: - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - }, - id: '1', - }, - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': { - __ref: '1', - }, - }, - }; - const source = RelayRecordSource.create(data); - const target = RelayRecordSource.create(); - const status = check( - () => source, - () => target, - INTERNAL_ACTOR_IDENTIFIER_DO_NOT_USE, - createNormalizationSelector( - getRequest(FlightQuery).operation, - ROOT_ID, - { - count: 10, - id: '1', - }, - ), - [], - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - defaultGetDataID, - ); - expect(status).toEqual({ - status: 'missing', - mostRecentlyInvalidatedAt: null, - }); - expect(target.size()).toBe(0); - }); - }); - describe('ActorChange', () => { beforeEach(() => { Query = graphql` diff --git a/packages/relay-runtime/store/__tests__/RelayErrorTrie-test.js b/packages/relay-runtime/store/__tests__/RelayErrorTrie-test.js new file mode 100644 index 0000000000000..ea1fd67723579 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/RelayErrorTrie-test.js @@ -0,0 +1,466 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; + +const RelayFeatureFlags = require('../../util/RelayFeatureFlags'); +const { + SELF, + buildErrorTrie, + getErrorsByKey, + getNestedErrorTrieByKey, +} = require('../RelayErrorTrie'); +const nullthrows = require('nullthrows'); + +describe('when field error handling is disabled', () => { + const wasFieldErrorHandlingEnabled = + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING; + + beforeAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = false; + }); + + describe('buildErrorTrie', () => { + it('always returns an empty result', () => { + expect( + buildErrorTrie([ + { + message: 'An error on the name field!', + path: ['people', 0, 'name'], + }, + { + message: 'An error on the age field!', + path: ['people', 0, 'age'], + }, + ]), + ).toBeNull(); + }); + + afterAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = + wasFieldErrorHandlingEnabled; + }); + }); + + describe('when field error handling is enabled', () => { + const wasFieldErrorHandlingEnabled = + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING; + + beforeAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = true; + }); + + describe('buildErrorTrie', () => { + it('can handle when two errors have common ancestor', () => { + expect( + buildErrorTrie([ + { + message: 'An error on the name field!', + path: ['people', 0, 'name'], + }, + { + message: 'An error on the age field!', + path: ['people', 0, 'age'], + }, + ]), + ).toEqual( + new Map([ + [ + 'people', + new Map([ + [ + 0, + new Map([ + [ + 'name', + [ + { + message: 'An error on the name field!', + }, + ], + ], + [ + 'age', + [ + { + message: 'An error on the age field!', + }, + ], + ], + ]), + ], + ]), + ], + ]), + ); + }); + + it('can handle when there is more than one error with the same path', () => { + expect( + buildErrorTrie([ + { + message: 'An error on the name field!', + path: ['people', 0, 'name'], + }, + { + message: 'Another error on the name field!', + path: ['people', 0, 'name'], + }, + { + message: 'A third error on the name field!', + path: ['people', 0, 'name'], + }, + ]), + ).toEqual( + new Map([ + [ + 'people', + new Map([ + [ + 0, + new Map([ + [ + 'name', + [ + { + message: 'An error on the name field!', + }, + { + message: 'Another error on the name field!', + }, + { + message: 'A third error on the name field!', + }, + ], + ], + ]), + ], + ]), + ], + ]), + ); + }); + + it("can handle errors when a subsequent error's path points to an ancestor", () => { + expect( + buildErrorTrie([ + { + message: 'An error on the name field!', + path: ['people', 0, 'name'], + }, + { + message: 'An error on the age field!', + path: ['people', 0, 'age'], + }, + { + message: 'An error on the person!', + path: ['people', 0], + }, + ]), + ).toEqual( + new Map([ + [ + 'people', + new Map([ + [ + 0, + new Map([ + [ + 'name', + [ + { + message: 'An error on the name field!', + }, + ], + ], + [ + 'age', + [ + { + message: 'An error on the age field!', + }, + ], + ], + [ + SELF, + [ + { + message: 'An error on the person!', + }, + ], + ], + ]), + ], + ]), + ], + ]), + ); + }); + + it("can handle errors errors when a preceding error's path points to an ancestor", () => { + expect( + buildErrorTrie([ + { + message: 'An error on the person!', + path: ['people', 0], + }, + { + message: 'An error on the name field!', + path: ['people', 0, 'name'], + }, + { + message: 'An error on the age field!', + path: ['people', 0, 'age'], + }, + ]), + ).toEqual( + new Map([ + [ + 'people', + new Map([ + [ + 0, + new Map([ + [ + SELF, + [ + { + message: 'An error on the person!', + }, + ], + ], + [ + 'name', + [ + { + message: 'An error on the name field!', + }, + ], + ], + [ + 'age', + [ + { + message: 'An error on the age field!', + }, + ], + ], + ]), + ], + ]), + ], + ]), + ); + }); + }); + + describe('getErrorsByKey', () => { + it('returns the errors with the given key', () => { + expect( + getErrorsByKey( + nullthrows( + buildErrorTrie([ + { + message: 'An error on the name field!', + path: ['name'], + }, + { + message: 'Another error on the name field!', + path: ['name'], + }, + { + message: 'An error on the age field!', + path: ['age'], + }, + ]), + ), + 'name', + ), + ).toEqual([ + { + message: 'An error on the name field!', + }, + { + message: 'Another error on the name field!', + }, + ]); + }); + it('returns null when there are no errors with the given key', () => { + expect( + getErrorsByKey( + nullthrows( + buildErrorTrie([ + { + message: 'An error on the age field!', + path: ['age'], + }, + { + message: 'An error on the first name field!', + path: ['name', 'first'], + }, + { + message: 'An error on the last name field!', + path: ['name', 'last'], + }, + ]), + ), + 'favorite_color', + ), + ).toBe(null); + }); + it('returns nested errors with the given key', () => { + expect( + getErrorsByKey( + nullthrows( + buildErrorTrie([ + { + message: 'An error on the age field!', + path: ['age'], + }, + { + message: 'An error on the first name field!', + path: ['name', 'first'], + }, + { + message: 'An error on the last name field!', + path: ['name', 'last'], + }, + ]), + ), + 'name', + ), + ).toEqual([ + { + message: 'An error on the first name field!', + path: ['first'], + }, + { + message: 'An error on the last name field!', + path: ['last'], + }, + ]); + }); + it('returns deeply nested errors ', () => { + expect( + getErrorsByKey( + nullthrows( + buildErrorTrie([ + { + message: 'An error on the age field!', + path: ['age'], + }, + { + message: 'An error on the name field!', + path: ['friends', 0, 'name'], + }, + { + message: 'An error on the first name field!', + path: ['friends', 0, 'name', 'first'], + }, + { + message: 'An error on the last name field!', + path: ['friends', 0, 'name', 'last'], + }, + ]), + ), + 'friends', + ), + ).toEqual([ + { + message: 'An error on the name field!', + path: [0, 'name'], + }, + { + message: 'An error on the first name field!', + path: [0, 'name', 'first'], + }, + { + message: 'An error on the last name field!', + path: [0, 'name', 'last'], + }, + ]); + }); + }); + + describe('getNestedErrorTrieByKey', () => { + it('returns the nested errors that are prefixed by the given key', () => { + expect( + getNestedErrorTrieByKey( + nullthrows( + buildErrorTrie([ + { + message: 'An error on the first person!', + path: ['people', 0], + }, + { + message: 'An error on the second person!', + path: ['people', 1], + }, + { + message: 'An error on the pets field!', + path: ['pets'], + }, + ]), + ), + 'people', + ), + ).toEqual( + new Map([ + [ + 0, + [ + { + message: 'An error on the first person!', + }, + ], + ], + [ + 1, + [ + { + message: 'An error on the second person!', + }, + ], + ], + ]), + ); + }); + it('returns an empty trie when there are no nested errors prefixed by the given key', () => { + expect( + getNestedErrorTrieByKey( + nullthrows( + buildErrorTrie([ + { + message: 'An error on the first person!', + path: ['people', 0], + }, + { + message: 'An error on the second person!', + path: ['people', 1], + }, + { + message: 'An error on the pets field!', + path: ['pets'], + }, + ]), + ), + 'pets', + ), + ).toBeNull(); + }); + }); + + afterAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = + wasFieldErrorHandlingEnabled; + }); + }); +}); diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-CommitPayload-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-CommitPayload-test.js index c9334d7857a56..4773cc4e60215 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-CommitPayload-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-CommitPayload-test.js @@ -272,7 +272,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( RelayModernEnvironmentCommitPayloadTest4UserFragment: {}, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); expect(fragmentCallback.mock.calls.length).toBe(1); diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-Connection-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-Connection-test.js index 53b3f92656fe1..dcb96cd423de7 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-Connection-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-Connection-test.js @@ -177,7 +177,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-DynamicConnectionKey-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-DynamicConnectionKey-test.js index 49e017fa3f469..c230bdddb83a5 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-DynamicConnectionKey-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-DynamicConnectionKey-test.js @@ -188,7 +188,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteMutation-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteMutation-test.js index 2542b8cc1fa9d..ea8e3be40d71c 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteMutation-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteMutation-test.js @@ -27,6 +27,7 @@ const { const {createReaderSelector} = require('../RelayModernSelector'); const RelayModernStore = require('../RelayModernStore'); const RelayRecordSource = require('../RelayRecordSource'); +const {RelayFeatureFlags} = require('relay-runtime'); const { disallowWarnings, expectWarningWillFire, @@ -53,6 +54,7 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( let source; let store; let subject; + let networkSource; let variables; let queryVariables; @@ -122,11 +124,14 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( ); // $FlowFixMe[missing-local-annot] error found when enabling Flow LTI mode - fetch = jest.fn((_query, _variables, _cacheConfig) => - // $FlowFixMe[missing-local-annot] error found when enabling Flow LTI mode - RelayObservable.create(sink => { - subject = sink; - }), + networkSource = RelayObservable.create(sink => { + subject = sink; + }); + // $FlowFixMe[missing-local-annot] error found when enabling Flow LTI mode + fetch = jest.fn( + (_query, _variables, _cacheConfig) => + // $FlowFixMe[missing-local-annot] error found when enabling Flow LTI mode + networkSource, ); source = RelayRecordSource.create(); store = new RelayModernStore(source); @@ -546,6 +551,66 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( // and thus the snapshot has missing data expect(callback.mock.calls[0][0].isMissingData).toEqual(true); }); + + it('reverts the optimistic update and commits the prefetched server payload', () => { + RelayFeatureFlags.PROCESS_OPTIMISTIC_UPDATE_BEFORE_SUBSCRIPTION = true; + const selector = createReaderSelector( + CommentFragment, + commentID, + {}, + queryOperation.request, + ); + const snapshot = environment.lookup(selector); + const callback = jest.fn<[Snapshot], void>(); + environment.subscribe(snapshot, callback); + + // This is to mock the prefetched payload that exists before the network source is being subscribed to + networkSource = RelayObservable.create(sink => { + sink.next({ + data: { + commentCreate: { + comment: { + id: commentID, + body: { + text: 'Gave Relay', + }, + }, + }, + }, + }); + sink.complete(); + }); + + callback.mockClear(); + environment + .executeMutation({ + operation, + optimisticUpdater: _store => { + const body = _store.get(commentID)?.getLinkedRecord('body'); + // When optimistic updater happens after the payload is commited, these records already exist + if (body != null) { + body.setValue('Give Relay', 'text'); + } else { + const comment = _store.create(commentID, 'Comment'); + comment.setValue(commentID, 'id'); + const body = _store.create(commentID + '.text', 'Text'); + comment.setLinkedRecord(body, 'body'); + body.setValue('Give Relay', 'text'); + } + }, + }) + .subscribe(callbacks); + + expect(complete).toBeCalled(); + expect(error).not.toBeCalled(); + expect(callback.mock.calls.length).toBe(2); + expect(callback.mock.calls[1][0].data).toEqual({ + id: commentID, + body: { + text: 'Gave Relay', + }, + }); + }); }); }, ); diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteMutationWithFlight-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteMutationWithFlight-test.js deleted file mode 100644 index 3216031a929f9..0000000000000 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteMutationWithFlight-test.js +++ /dev/null @@ -1,570 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow - * @format - * @oncall relay - */ - -'use strict'; -import type { - ReactFlightServerError, - ReactFlightServerTree, -} from '../../network/RelayNetworkTypes'; -import type {GraphQLResponse} from '../../network/RelayNetworkTypes'; -import type {Snapshot} from '../RelayStoreTypes'; -import type {RequestParameters} from 'relay-runtime/util/RelayConcreteNode'; -import type { - CacheConfig, - Variables, -} from 'relay-runtime/util/RelayRuntimeTypes'; - -const { - MultiActorEnvironment, - getActorIdentifier, -} = require('../../multi-actor-environment'); -const RelayNetwork = require('../../network/RelayNetwork'); -const RelayObservable = require('../../network/RelayObservable'); -const {graphql} = require('../../query/GraphQLTag'); -const RelayModernEnvironment = require('../RelayModernEnvironment'); -const { - createOperationDescriptor, -} = require('../RelayModernOperationDescriptor'); -const RelayModernStore = require('../RelayModernStore'); -const RelayRecordSource = require('../RelayRecordSource'); -const {RelayFeatureFlags} = require('relay-runtime'); -const {disallowWarnings, expectToWarn} = require('relay-test-utils-internal'); - -disallowWarnings(); - -describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( - 'executeMutation() with Flight field', - environmentType => { - let callbacks; - let complete; - let environment; - let error; - let fetch; - let innerQueryOperation; - let innerQueryVariables; - let next; - let operation; - let operationLoader; - let queryOperation; - let queryVariables; - let reactFlightPayloadDeserializer; - let RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery; - let RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery; - let RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation; - let source; - let store; - let storyID; - let subject; - let variables; - - describe(environmentType, () => { - beforeEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = true; - - storyID = 'story-id'; - - RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation = graphql` - mutation RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation( - $input: StoryUpdateInput! - $count: Int! - ) { - storyUpdate(input: $input) { - story { - id - body { - text - } - flightComponent(condition: true, count: $count, id: "x") - } - } - } - `; - - RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery = graphql` - query RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery( - $id: ID! - $count: Int! - ) { - node(id: $id) { - ... on Story { - flightComponent(condition: true, count: $count, id: "x") - } - } - } - `; - - RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery = graphql` - query RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery( - $id: ID! - ) { - node(id: $id) { - ... on User { - name - } - } - } - `; - variables = { - input: { - body: { - text: 'Hello world!', - }, - }, - count: 5, - }; - queryVariables = { - id: storyID, - count: 5, - }; - innerQueryVariables = { - id: '2', - }; - - reactFlightPayloadDeserializer = jest.fn( - (payload: ReactFlightServerTree) => { - return { - readRoot() { - return payload; - }, - }; - }, - ); - complete = jest.fn<[], mixed>(); - error = jest.fn<[Error], mixed>(); - next = jest.fn<[GraphQLResponse], mixed>(); - callbacks = {complete, error, next}; - fetch = ( - _query: RequestParameters, - _variables: Variables, - _cacheConfig: CacheConfig, - ) => { - return RelayObservable.create(sink => { - subject = sink; - }); - }; - operationLoader = { - load: jest.fn(() => - Promise.resolve( - RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery, - ), - ), - get: jest.fn( - () => - RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery, - ), - }; - source = RelayRecordSource.create(); - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - store = new RelayModernStore(source, {operationLoader}); - const multiActorEnvironment = new MultiActorEnvironment({ - createNetworkForActor: _actorID => RelayNetwork.create(fetch), - createStoreForActor: _actorID => store, - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - reactFlightPayloadDeserializer, - }); - environment = - environmentType === 'MultiActorEnvironment' - ? multiActorEnvironment.forActor(getActorIdentifier('actor:1234')) - : new RelayModernEnvironment({ - network: RelayNetwork.create(fetch), - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - store, - reactFlightPayloadDeserializer, - }); - - operation = createOperationDescriptor( - RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation, - variables, - ); - queryOperation = createOperationDescriptor( - RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery, - queryVariables, - ); - innerQueryOperation = createOperationDescriptor( - RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery, - innerQueryVariables, - ); - - environment.execute({operation: queryOperation}).subscribe({}); - subject.next({ - data: { - node: { - id: storyID, - __typename: 'Story', - flightComponent: { - status: 'SUCCESS', - tree: [ - { - type: 'div', - key: null, - ref: null, - props: {foo: 1}, - }, - ], - queries: [ - { - id: 'b0dbe24703062b69e6b1d0c38c4f69d2', - module: {__dr: 'RelayFlightExampleQuery.graphql'}, - response: { - data: { - node: { - id: '2', - name: 'Lauren', - __typename: 'User', - }, - }, - extensions: [], - }, - variables: { - id: '2', - }, - }, - ], - errors: [], - fragments: [], - }, - }, - }, - }); - jest.runAllTimers(); - }); - afterEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = false; - }); - - describe('when successful', () => { - it('updates Flight fields that were previously queried for', () => { - // precondition - FlightQuery - const snapshot = environment.lookup(queryOperation.fragment); - const callback = jest.fn<[Snapshot], void>(); - environment.subscribe(snapshot, callback); - // $FlowFixMe[incompatible-use] readRoot() to verify that it updated - expect(snapshot.data.node.flightComponent.readRoot()).toEqual([ - {key: null, props: {foo: 1}, ref: null, type: 'div'}, - ]); - - // precondition - InnerQuery - const innerSnapshot = environment.lookup( - innerQueryOperation.fragment, - ); - const innerCallback = jest.fn<[Snapshot], void>(); - environment.subscribe(innerSnapshot, innerCallback); - expect(innerSnapshot.data).toEqual({node: {name: 'Lauren'}}); - - environment.executeMutation({operation}).subscribe(callbacks); - callback.mockClear(); - subject.next({ - data: { - storyUpdate: { - story: { - id: storyID, - body: { - text: 'Hello world!', - }, - __typename: 'Story', - flightComponent: { - status: 'SUCCESS', - tree: [ - { - type: 'div', - key: null, - ref: null, - props: {foo: 2, bar: 'abc', baz: [1, 2, 3]}, // updated - }, - ], - queries: [ - { - id: 'b0dbe24703062b69e6b1d0c38c4f69d2', - module: {__dr: 'RelayFlightExampleQuery.graphql'}, - response: { - data: { - node: { - id: '2', - name: 'Lauren Tan', - __typename: 'User', - }, - }, - extensions: [], - }, - variables: { - id: '2', - }, - }, - ], - errors: [], - fragments: [], - }, - }, - }, - }, - }); - subject.complete(); - - expect(complete).toBeCalled(); - expect(error).not.toBeCalled(); - expect(callback).toHaveBeenCalledTimes(1); - expect( - // $FlowFixMe[incompatible-use] readRoot() to verify that it updated - callback.mock.calls[0][0].data.node.flightComponent.readRoot(), - ).toEqual([ - { - key: null, - props: {foo: 2, bar: 'abc', baz: [1, 2, 3]}, - ref: null, - type: 'div', - }, - ]); - - // This verifies that data for client components included in the payload are - // also updated as a result of the mutation. - expect(innerCallback).toHaveBeenCalledTimes(1); - expect(innerCallback).toHaveBeenLastCalledWith( - expect.objectContaining({ - data: { - node: { - name: 'Lauren Tan', - }, - }, - }), - ); - }); - }); - - describe('when server errors are encountered', () => { - describe('and ReactFlightServerErrorHandler is specified', () => { - let reactFlightServerErrorHandler; - beforeEach(() => { - reactFlightServerErrorHandler = jest.fn( - (status: string, errors: Array) => { - const err = new Error(`${status}: ${errors[0].message}`); - err.stack = errors[0].stack; - throw err; - }, - ); - const multiActorEnvironment = new MultiActorEnvironment({ - createNetworkForActor: _actorID => RelayNetwork.create(fetch), - createStoreForActor: _actorID => store, - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - reactFlightPayloadDeserializer, - reactFlightServerErrorHandler, - }); - environment = - environmentType === 'MultiActorEnvironment' - ? multiActorEnvironment.forActor( - getActorIdentifier('actor:1234'), - ) - : new RelayModernEnvironment({ - network: RelayNetwork.create(fetch), - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - store, - reactFlightPayloadDeserializer, - reactFlightServerErrorHandler, - }); - }); - it('calls ReactFlightServerErrorHandler', () => { - // precondition - FlightQuery - const snapshot = environment.lookup(queryOperation.fragment); - const callback = jest.fn<[Snapshot], void>(); - environment.subscribe(snapshot, callback); - // $FlowFixMe[incompatible-use] readRoot() to verify that it updated - expect(snapshot.data.node.flightComponent.readRoot()).toEqual([ - {key: null, props: {foo: 1}, ref: null, type: 'div'}, - ]); - - // precondition - InnerQuery - const innerSnapshot = environment.lookup( - innerQueryOperation.fragment, - ); - const innerCallback = jest.fn<[Snapshot], void>(); - environment.subscribe(innerSnapshot, innerCallback); - expect(innerSnapshot.data).toEqual({node: {name: 'Lauren'}}); - - environment.executeMutation({operation}).subscribe(callbacks); - callback.mockClear(); - subject.next({ - data: { - storyUpdate: { - story: { - id: storyID, - body: { - text: 'Hello world!', - }, - __typename: 'Story', - flightComponent: { - status: 'FAIL_JS_ERROR', - tree: [], - queries: [], - errors: [ - { - message: 'Something threw an error on the server', - stack: 'Error\n at :1:1', - }, - ], - fragments: [], - }, - }, - }, - }, - }); - subject.complete(); - - expect(complete).not.toBeCalled(); - expect(error).toBeCalled(); - expect(callback).toHaveBeenCalledTimes(0); - expect(reactFlightServerErrorHandler).toHaveBeenCalledWith( - 'FAIL_JS_ERROR', - expect.arrayContaining([ - expect.objectContaining({ - message: 'Something threw an error on the server', - stack: 'Error\n at :1:1', - }), - ]), - ); - }); - }); - describe('and no ReactFlightServerErrorHandler is specified', () => { - it('warns', () => { - // precondition - FlightQuery - const snapshot = environment.lookup(queryOperation.fragment); - const callback = jest.fn<[Snapshot], void>(); - environment.subscribe(snapshot, callback); - // $FlowFixMe[incompatible-use] readRoot() to verify that it updated - expect(snapshot.data.node.flightComponent.readRoot()).toEqual([ - {key: null, props: {foo: 1}, ref: null, type: 'div'}, - ]); - - // precondition - InnerQuery - const innerSnapshot = environment.lookup( - innerQueryOperation.fragment, - ); - const innerCallback = jest.fn<[Snapshot], void>(); - environment.subscribe(innerSnapshot, innerCallback); - expect(innerSnapshot.data).toEqual({node: {name: 'Lauren'}}); - - environment.executeMutation({operation}).subscribe(callbacks); - callback.mockClear(); - expectToWarn( - `RelayResponseNormalizer: Received server errors for field \`flightComponent\`. - -Something threw an error on the server -Error - at :1:1`, - () => { - subject.next({ - data: { - storyUpdate: { - story: { - id: storyID, - body: { - text: 'Hello world!', - }, - __typename: 'Story', - flightComponent: { - status: 'FAIL_JS_ERROR', - tree: [], - queries: [], - errors: [ - { - message: 'Something threw an error on the server', - stack: 'Error\n at :1:1', - }, - ], - fragments: [], - }, - }, - }, - }, - }); - }, - ); - subject.complete(); - - expect(complete).toBeCalled(); - expect(error).not.toBeCalled(); - expect(callback).toHaveBeenCalledTimes(1); - }); - }); - }); - - describe('when the row protocol is malformed', () => { - it('warns when the row protocol is null', () => { - // precondition - FlightQuery - const snapshot = environment.lookup(queryOperation.fragment); - const callback = jest.fn<[Snapshot], void>(); - environment.subscribe(snapshot, callback); - // $FlowFixMe[incompatible-use] readRoot() to verify that it updated - expect(snapshot.data.node.flightComponent.readRoot()).toEqual([ - {key: null, props: {foo: 1}, ref: null, type: 'div'}, - ]); - - // precondition - InnerQuery - const innerSnapshot = environment.lookup( - innerQueryOperation.fragment, - ); - const innerCallback = jest.fn<[Snapshot], void>(); - environment.subscribe(innerSnapshot, innerCallback); - expect(innerSnapshot.data).toEqual({node: {name: 'Lauren'}}); - - environment.executeMutation({operation}).subscribe(callbacks); - callback.mockClear(); - expectToWarn( - 'RelayResponseNormalizer: Expected `tree` not to be null. This typically indicates that a fatal server error prevented any Server Component rows from being written.', - () => { - subject.next({ - data: { - storyUpdate: { - story: { - id: storyID, - body: { - text: 'Hello world!', - }, - __typename: 'Story', - flightComponent: { - status: 'UNEXPECTED_ERROR', - tree: null, - queries: [], - errors: [], - fragments: [], - }, - }, - }, - }, - }); - }, - ); - subject.complete(); - - expect(complete).toBeCalled(); - expect(error).not.toBeCalled(); - expect(innerCallback).toHaveBeenCalledTimes(0); - expect(callback).toHaveBeenCalledTimes(1); - expect(callback.mock.calls[0][0].data).toEqual({ - node: {flightComponent: null}, - }); - expect(callback.mock.calls[0][0].isMissingData).toEqual(false); - - // Server Component is read out as null - const latestSnapshot = environment.lookup(queryOperation.fragment); - expect(latestSnapshot.isMissingData).toEqual(false); - expect(latestSnapshot.data).toEqual({ - node: { - flightComponent: null, - }, - }); - }); - }); - }); - }, -); diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteMutationWithMatch-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteMutationWithMatch-test.js index 53c86ce51b7bb..047a27eaefdca 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteMutationWithMatch-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteMutationWithMatch-test.js @@ -312,14 +312,13 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( actor: { name: 'actor-name', nameRenderer: { - __id: 'client:4:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + __id: 'client:4:nameRenderer(supported:"34hjiS")', __fragmentPropName: 'name', __fragments: { RelayModernEnvironmentExecuteMutationWithMatchTestMarkdownUserNameRenderer_name: {}, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'MarkdownUserNameRenderer.react', }, }, @@ -616,14 +615,13 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( actor: { name: 'optimistic-actor-name', nameRenderer: { - __id: 'client:4:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + __id: 'client:4:nameRenderer(supported:"34hjiS")', __fragmentPropName: 'name', __fragments: { RelayModernEnvironmentExecuteMutationWithMatchTestMarkdownUserNameRenderer_name: {}, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'MarkdownUserNameRenderer.react', }, }, @@ -654,9 +652,9 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( it('optimistically creates @match fields and loads resources', () => { operationLoader.load.mockImplementationOnce(() => { return new Promise(resolve => { - setImmediate(() => { + setTimeout(() => { resolve(markdownRendererNormalizationFragment); - }); + }, 0); }); }); environment @@ -676,14 +674,13 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( actor: { name: 'optimistic-actor-name', nameRenderer: { - __id: 'client:4:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + __id: 'client:4:nameRenderer(supported:"34hjiS")', __fragmentPropName: 'name', __fragments: { RelayModernEnvironmentExecuteMutationWithMatchTestMarkdownUserNameRenderer_name: {}, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'MarkdownUserNameRenderer.react', }, }, @@ -765,14 +762,13 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( actor: { name: 'actor-name', nameRenderer: { - __id: 'client:4:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + __id: 'client:4:nameRenderer(supported:"34hjiS")', __fragmentPropName: 'name', __fragments: { RelayModernEnvironmentExecuteMutationWithMatchTestMarkdownUserNameRenderer_name: {}, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'MarkdownUserNameRenderer.react', }, }, diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteSubscriptionWithMatch-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteSubscriptionWithMatch-test.js index 128b0207d87f5..ac4d96afe2d5d 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteSubscriptionWithMatch-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteSubscriptionWithMatch-test.js @@ -291,17 +291,17 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( actor: { name: 'actor-name', nameRenderer: { - __id: 'client:4:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + __id: 'client:4:nameRenderer(supported:"34hjiS")', __fragmentPropName: 'name', __fragments: { RelayModernEnvironmentExecuteSubscriptionWithMatchTestMarkdownUserNameRenderer_name: - {}, + { + // TODO T96653810: Correctly detect reading from root of mutation/subscription + $isWithinUnmatchedTypeRefinement: true, // should be false + }, }, __fragmentOwner: operation.request, - // TODO T96653810: Correctly detect reading from root of mutation/subscription - __isWithinUnmatchedTypeRefinement: true, // should be false - __module_component: 'MarkdownUserNameRenderer.react', }, }, diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithFlight-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithFlight-test.js deleted file mode 100644 index feb0aa10b0e71..0000000000000 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithFlight-test.js +++ /dev/null @@ -1,776 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; -import type { - ReactFlightServerError, - ReactFlightServerTree, -} from '../../network/RelayNetworkTypes'; -import type {GraphQLResponse} from '../../network/RelayNetworkTypes'; -import type {RequestParameters} from 'relay-runtime/util/RelayConcreteNode'; -import type { - CacheConfig, - Variables, -} from 'relay-runtime/util/RelayRuntimeTypes'; - -const { - MultiActorEnvironment, - getActorIdentifier, -} = require('../../multi-actor-environment'); -const RelayNetwork = require('../../network/RelayNetwork'); -const RelayObservable = require('../../network/RelayObservable'); -const {graphql} = require('../../query/GraphQLTag'); -const RelayModernEnvironment = require('../RelayModernEnvironment'); -const { - createOperationDescriptor, -} = require('../RelayModernOperationDescriptor'); -const RelayModernStore = require('../RelayModernStore'); -const RelayRecordSource = require('../RelayRecordSource'); -const {RelayFeatureFlags} = require('relay-runtime'); -const {disallowWarnings, expectToWarn} = require('relay-test-utils-internal'); - -disallowWarnings(); - -describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( - 'execute() with Flight field', - environmentType => { - let callbacks; - let complete; - let dataSource; - let environment; - let error; - let fetch; - let FlightQuery; - let innerOperation; - let InnerQuery; - let next; - let operation; - let operationLoader; - let reactFlightPayloadDeserializer; - let source; - let store; - - describe(environmentType, () => { - beforeEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = true; - - FlightQuery = graphql` - query RelayModernEnvironmentExecuteWithFlightTestFlightQuery( - $id: ID! - $count: Int! - ) { - node(id: $id) { - ... on Story { - flightComponent(condition: true, count: $count, id: $id) - } - } - } - `; - - InnerQuery = graphql` - query RelayModernEnvironmentExecuteWithFlightTestInnerQuery( - $id: ID! - ) { - node(id: $id) { - ... on User { - name - } - } - } - `; - - reactFlightPayloadDeserializer = jest.fn( - (payload: ReactFlightServerTree) => { - return { - readRoot() { - return payload; - }, - }; - }, - ); - complete = jest.fn<[], mixed>(); - error = jest.fn<[Error], mixed>(); - next = jest.fn<[GraphQLResponse], mixed>(); - callbacks = {complete, error, next}; - fetch = ( - _query: RequestParameters, - _variables: Variables, - _cacheConfig: CacheConfig, - ) => { - return RelayObservable.create<$FlowFixMe>(sink => { - dataSource = sink; - }); - }; - operationLoader = { - load: jest.fn(() => Promise.resolve(InnerQuery)), - get: jest.fn(() => InnerQuery), - }; - source = RelayRecordSource.create(); - // DataChecker receives its operationLoader from the store, not the - // environment. So we have to pass it here as well. - store = new RelayModernStore(source, { - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - gcReleaseBufferSize: 0, - }); - const multiActorEnvironment = new MultiActorEnvironment({ - createNetworkForActor: _actorID => RelayNetwork.create(fetch), - createStoreForActor: _actorID => store, - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - reactFlightPayloadDeserializer, - }); - environment = - environmentType === 'MultiActorEnvironment' - ? multiActorEnvironment.forActor(getActorIdentifier('actor:1234')) - : new RelayModernEnvironment({ - network: RelayNetwork.create(fetch), - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - store, - reactFlightPayloadDeserializer, - }); - - operation = createOperationDescriptor(FlightQuery, { - count: 10, - id: '1', - }); - innerOperation = createOperationDescriptor(InnerQuery, {id: '2'}); - }); - afterEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = false; - }); - - it('loads the Flight field and normalizes/publishes the field payload', () => { - environment.retain(operation); - environment.execute({operation}).subscribe(callbacks); - const payload: $FlowFixMe = { - data: { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'SUCCESS', - tree: [ - { - type: 'div', - key: null, - ref: null, - props: {foo: 1}, - }, - ], - queries: [ - { - id: 'b0dbe24703062b69e6b1d0c38c4f69d2', - module: {__dr: 'RelayFlightExampleQuery.graphql'}, - response: { - data: { - node: { - id: '2', - name: 'Lauren', - __typename: 'User', - }, - }, - extensions: [], - }, - variables: { - id: '2', - }, - }, - ], - errors: [], - fragments: [], - }, - }, - }, - }; - dataSource.next(payload); - jest.runAllTimers(); - - expect(next).toBeCalledTimes(1); - expect(complete).toBeCalledTimes(0); - expect(error).toBeCalledTimes(0); - expect(reactFlightPayloadDeserializer).toBeCalledTimes(1); - - store.scheduleGC(); - jest.runAllTimers(); - - expect(environment.lookup(innerOperation.fragment).data).toEqual({ - node: { - name: 'Lauren', - }, - }); - expect( - environment - .lookup(operation.fragment) - // $FlowFixMe[incompatible-use] readRoot() to verify that it updated - .data.node.flightComponent.readRoot(), - ).toEqual([{key: null, props: {foo: 1}, ref: null, type: 'div'}]); - }); - - it('updates the Flight field on refetch', () => { - environment.retain(operation); - environment.execute({operation}).subscribe(callbacks); - const initialPayload: $FlowFixMe = { - data: { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'SUCCESS', - tree: [ - { - type: 'div', - key: null, - ref: null, - props: {foo: 1}, - }, - ], - queries: [ - { - id: 'b0dbe24703062b69e6b1d0c38c4f69d2', - module: {__dr: 'RelayFlightExampleQuery.graphql'}, - response: { - data: { - node: { - id: '2', - name: 'Lauren', - __typename: 'User', - }, - }, - extensions: [], - }, - variables: { - id: '2', - }, - }, - ], - errors: [], - fragments: [], - }, - }, - }, - }; - const nextPayload: $FlowFixMe = { - data: { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'SUCCESS', - tree: [ - { - type: 'div', - key: null, - ref: null, - props: {foo: 2, bar: 'abc', baz: [1, 2, 3]}, // updated - }, - ], - queries: [ - { - id: 'b0dbe24703062b69e6b1d0c38c4f69d2', - module: {__dr: 'RelayFlightExampleQuery.graphql'}, - response: { - data: { - node: { - id: '2', - name: 'Lauren', - __typename: 'User', - }, - }, - extensions: [], - }, - variables: { - id: '2', - }, - }, - ], - errors: [], - fragments: [], - }, - }, - }, - }; - dataSource.next(initialPayload); - jest.runAllTimers(); - - expect(next).toBeCalledTimes(1); - expect(complete).toBeCalledTimes(0); - expect(error).toBeCalledTimes(0); - expect(reactFlightPayloadDeserializer).toBeCalledTimes(1); - - store.scheduleGC(); // Invoke gc to verify that data is retained - jest.runAllTimers(); - - expect(environment.lookup(innerOperation.fragment).data).toEqual({ - node: { - name: 'Lauren', - }, - }); - - dataSource.next(nextPayload); - jest.runAllTimers(); - - expect(next).toBeCalledTimes(2); - expect(complete).toBeCalledTimes(0); - expect(error).toBeCalledTimes(0); - expect(reactFlightPayloadDeserializer).toBeCalledTimes(2); - expect( - environment - .lookup(operation.fragment) - // $FlowFixMe[incompatible-use] readRoot() to verify that it updated - .data.node.flightComponent.readRoot(), - ).toEqual([ - { - key: null, - props: {foo: 2, bar: 'abc', baz: [1, 2, 3]}, - ref: null, - type: 'div', - }, - ]); - }); - - describe('when server errors are encountered', () => { - describe('and ReactFlightServerErrorHandler is specified', () => { - let reactFlightServerErrorHandler; - beforeEach(() => { - reactFlightServerErrorHandler = jest.fn( - (status: string, errors: Array) => { - const err = new Error(`${status}: ${errors[0].message}`); - err.stack = errors[0].stack; - throw err; - }, - ); - const multiActorEnvironment = new MultiActorEnvironment({ - createNetworkForActor: _actorID => RelayNetwork.create(fetch), - createStoreForActor: _actorID => store, - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - reactFlightPayloadDeserializer, - reactFlightServerErrorHandler, - }); - environment = - environmentType === 'MultiActorEnvironment' - ? multiActorEnvironment.forActor( - getActorIdentifier('actor:1234'), - ) - : new RelayModernEnvironment({ - network: RelayNetwork.create(fetch), - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - store, - reactFlightPayloadDeserializer, - reactFlightServerErrorHandler, - }); - }); - - it('calls ReactFlightServerErrorHandler', () => { - environment.execute({operation}).subscribe(callbacks); - const payload: $FlowFixMe = { - data: { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'FAIL_JS_ERROR', - tree: [], - queries: [], - errors: [ - { - message: 'Something threw an error on the server', - stack: 'Error\n at :1:1', - }, - ], - fragments: [], - }, - }, - }, - }; - dataSource.next(payload); - jest.runAllTimers(); - - expect(next).toBeCalledTimes(0); - expect(complete).toBeCalledTimes(0); - expect(error).toBeCalledTimes(1); - expect(reactFlightPayloadDeserializer).toBeCalledTimes(0); - expect(reactFlightServerErrorHandler).toHaveBeenCalledWith( - 'FAIL_JS_ERROR', - expect.arrayContaining([ - expect.objectContaining({ - message: 'Something threw an error on the server', - stack: 'Error\n at :1:1', - }), - ]), - ); - }); - }); - - describe('no ReactFlightServerErrorHandler is specified', () => { - it('warns', () => { - environment.execute({operation}).subscribe(callbacks); - const payload: $FlowFixMe = { - data: { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'FAIL_JS_ERROR', - tree: [], - queries: [], - errors: [ - { - message: 'Something threw an error on the server', - stack: 'Error\n at :1:1', - }, - ], - fragments: [], - }, - }, - }, - }; - expectToWarn( - `RelayResponseNormalizer: Received server errors for field \`flightComponent\`. - -Something threw an error on the server -Error - at :1:1`, - () => { - dataSource.next(payload); - }, - ); - jest.runAllTimers(); - - expect(next).toBeCalledTimes(1); - expect(complete).toBeCalledTimes(0); - expect(error).toBeCalledTimes(0); - expect(reactFlightPayloadDeserializer).toBeCalledTimes(1); - }); - }); - }); - - describe('when checking availability', () => { - it('returns available if all data exists in the environment', () => { - environment.execute({operation}).subscribe(callbacks); - const payload: $FlowFixMe = { - data: { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'SUCCESS', - tree: [ - { - type: 'div', - key: null, - ref: null, - props: {foo: 1}, - }, - ], - queries: [ - { - id: 'b0dbe24703062b69e6b1d0c38c4f69d2', - module: {__dr: 'RelayFlightExampleQuery.graphql'}, - response: { - data: { - node: { - id: '2', - name: 'Lauren', - __typename: 'User', - }, - }, - extensions: [], - }, - variables: { - id: '2', - }, - }, - ], - errors: [], - fragments: [], - }, - }, - }, - }; - dataSource.next(payload); - jest.runAllTimers(); - - expect(environment.check(operation)).toEqual({ - status: 'available', - fetchTime: null, - }); - expect(environment.check(innerOperation)).toEqual({ - status: 'available', - fetchTime: null, - }); - }); - - it('returns missing if `tree` is null in the payload', () => { - environment.execute({operation}).subscribe(callbacks); - const payload: $FlowFixMe = { - data: { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'SUCCESS', - tree: null, - queries: [ - { - id: 'b0dbe24703062b69e6b1d0c38c4f69d2', - module: {__dr: 'RelayFlightExampleQuery.graphql'}, - response: { - data: { - node: { - id: '2', - name: 'Lauren', - __typename: 'User', - }, - }, - extensions: [], - }, - variables: { - id: '2', - }, - }, - ], - errors: [], - fragments: [], - }, - }, - }, - }; - - expectToWarn( - 'RelayResponseNormalizer: Expected `tree` not to be null. This typically indicates that a fatal server error prevented any Server Component rows from being written.', - () => { - dataSource.next(payload); - }, - ); - jest.runAllTimers(); - - expect(environment.check(operation)).toEqual({ - status: 'missing', - }); - expect(environment.check(innerOperation)).toEqual({ - status: 'missing', - }); - }); - - it('returns missing if `queries` is null in the payload', () => { - environment.execute({operation}).subscribe(callbacks); - const payload: $FlowFixMe = { - data: { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'SUCCESS', - tree: [ - { - type: 'div', - key: null, - ref: null, - props: {foo: 1}, - }, - ], - queries: null, - errors: [], - fragments: [], - }, - }, - }, - }; - dataSource.next(payload); - jest.runAllTimers(); - - expect(environment.check(operation)).toEqual({ - status: 'missing', - }); - expect(environment.check(innerOperation)).toEqual({ - status: 'missing', - }); - }); - - it('returns missing if the inner query is missing data', () => { - environment.execute({operation}).subscribe(callbacks); - const payload: $FlowFixMe = { - data: { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'SUCCESS', - tree: [ - { - type: 'div', - key: null, - ref: null, - props: {foo: 1}, - }, - ], - queries: [ - { - id: 'b0dbe24703062b69e6b1d0c38c4f69d2', - module: {__dr: 'RelayFlightExampleQuery.graphql'}, - response: { - data: { - node: { - id: '2', - // name: 'Lauren', - __typename: 'User', - }, - }, - extensions: [], - }, - variables: { - id: '3', - }, - }, - ], - errors: [], - fragments: [], - }, - }, - }, - }; - expectToWarn( - 'RelayResponseNormalizer: Payload did not contain a value for field `name: name`. Check that you are parsing with the same query that was used to fetch the payload.', - () => { - dataSource.next(payload); - }, - ); - jest.runAllTimers(); - - expect(environment.check(operation)).toEqual({ - status: 'missing', - }); - expect(environment.check(innerOperation)).toEqual({ - status: 'missing', - }); - }); - - it('returns missing if the response is undefined', () => { - environment.execute({operation}).subscribe(callbacks); - const payload = { - data: { - node: { - id: '1', - __typename: 'Story', - flightComponent: undefined, - }, - }, - }; - dataSource.next(payload); - jest.runAllTimers(); - - expect(next).toBeCalledTimes(0); - expect(complete).toBeCalledTimes(0); - expect(error).toBeCalledTimes(1); - expect(error).toHaveBeenLastCalledWith( - expect.objectContaining({ - message: expect.stringMatching( - /Payload did not contain a value for field/, - ), - }), - ); - expect(reactFlightPayloadDeserializer).toBeCalledTimes(0); - - const snapshot = environment.lookup(operation.fragment); - expect(snapshot.data).toMatchInlineSnapshot(` - Object { - "node": undefined, - } - `); - expect(snapshot.isMissingData).toEqual(true); - expect(environment.check(operation)).toEqual({ - status: 'missing', - }); - }); - - it('returns available if the response is null', () => { - environment.execute({operation}).subscribe(callbacks); - const payload = { - data: { - node: { - id: '1', - __typename: 'Story', - flightComponent: null, - }, - }, - }; - dataSource.next(payload); - jest.runAllTimers(); - - expect(next).toBeCalledTimes(1); - expect(complete).toBeCalledTimes(0); - expect(error).toBeCalledTimes(0); - expect(reactFlightPayloadDeserializer).toBeCalledTimes(0); - - const snapshot = environment.lookup(operation.fragment); - expect(snapshot.data).toMatchInlineSnapshot(` - Object { - "node": Object { - "flightComponent": null, - }, - } - `); - expect(snapshot.isMissingData).toEqual(false); - expect(environment.check(operation)).toEqual({ - status: 'available', - fetchTime: null, - }); - }); - }); - - describe('when the response is malformed', () => { - it('warns if the row protocol is null', () => { - environment.execute({operation}).subscribe(callbacks); - const payload: $FlowFixMe = { - data: { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'UNEXPECTED_ERROR', - tree: null, - queries: [], - errors: [], - fragments: [], - }, - }, - }, - }; - expectToWarn( - 'RelayResponseNormalizer: Expected `tree` not to be null. This typically indicates that a fatal server error prevented any Server Component rows from being written.', - () => { - dataSource.next(payload); - }, - ); - jest.runAllTimers(); - - expect(next).toBeCalledTimes(1); - expect(complete).toBeCalledTimes(0); - expect(error).toBeCalledTimes(0); - expect(reactFlightPayloadDeserializer).toBeCalledTimes(0); - - // Server Component is read out as null - const snapshot = environment.lookup(operation.fragment); - expect(snapshot.isMissingData).toEqual(false); - expect(snapshot.data).toEqual({ - node: { - flightComponent: null, - }, - }); - }); - }); - }); - }, -); diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithFlightAndClientFragment-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithFlightAndClientFragment-test.js deleted file mode 100644 index f5c4604030a39..0000000000000 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithFlightAndClientFragment-test.js +++ /dev/null @@ -1,255 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; -import type {GraphQLResponse} from '../../network/RelayNetworkTypes'; -import type {NormalizationRootNode} from '../../util/NormalizationNode'; -import type {RequestParameters} from 'relay-runtime/util/RelayConcreteNode'; -import type { - CacheConfig, - Variables, -} from 'relay-runtime/util/RelayRuntimeTypes'; - -const { - MultiActorEnvironment, - getActorIdentifier, -} = require('../../multi-actor-environment'); -const RelayNetwork = require('../../network/RelayNetwork'); -const RelayObservable = require('../../network/RelayObservable'); -const {graphql} = require('../../query/GraphQLTag'); -const RelayModernEnvironment = require('../RelayModernEnvironment'); -const { - createOperationDescriptor, -} = require('../RelayModernOperationDescriptor'); -const {getSingularSelector} = require('../RelayModernSelector'); -const RelayModernStore = require('../RelayModernStore'); -const RelayRecordSource = require('../RelayRecordSource'); -const nullthrows = require('nullthrows'); -const {RelayFeatureFlags} = require('relay-runtime'); -const {disallowWarnings} = require('relay-test-utils-internal'); - -disallowWarnings(); - -describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( - 'execute() with Flight field and client fragment', - environmentType => { - let callbacks; - let ClientFragment; - let ClientNormalizationFragment; - let complete; - let dataSource; - let environment; - let error; - let fetch; - let FlightQuery; - let next; - let operation; - let operationLoader; - let reactFlightPayloadDeserializer; - let resolveFragment; - let source; - let store; - - describe(environmentType, () => { - beforeEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = true; - - FlightQuery = graphql` - query RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery( - $id: ID! - $count: Int! - ) { - node(id: $id) { - ... on Story { - flightComponent(condition: true, count: $count, id: $id) - } - } - } - `; - - ClientNormalizationFragment = require('./__generated__/RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$normalization.graphql'); - ClientFragment = graphql` - fragment RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment on Story { - name - body { - text - } - } - `; - // Query that indirectly executed as a result of selecting the - // `flightComponent` field. - graphql` - query RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery( - $id: ID! - ) { - node(id: $id) { - ...RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment - @relay_client_component - } - } - `; - - // $FlowFixMe[missing-local-annot] error found when enabling Flow LTI mode - reactFlightPayloadDeserializer = jest.fn(payload => { - return { - readRoot() { - return payload; - }, - }; - }); - complete = jest.fn<[], mixed>(); - error = jest.fn<[Error], mixed>(); - next = jest.fn<[GraphQLResponse], mixed>(); - callbacks = {complete, error, next}; - fetch = ( - _query: RequestParameters, - _variables: Variables, - _cacheConfig: CacheConfig, - ) => { - // $FlowFixMe[missing-local-annot] Error found while enabling LTI on this file - return RelayObservable.create(sink => { - dataSource = sink; - }); - }; - operationLoader = { - // $FlowFixMe[missing-local-annot] error found when enabling Flow LTI mode - load: jest.fn(moduleName => { - return new Promise(resolve => { - resolveFragment = resolve; - }); - }), - get: jest.fn<[mixed], ?NormalizationRootNode>(), - }; - source = RelayRecordSource.create(); - // DataChecker receives its operationLoader from the store, not the - // environment. So we have to pass it here as well. - store = new RelayModernStore(source, { - // $FlowFixMe[incompatible-call] error found when enabling Flow LTI mode - operationLoader, - gcReleaseBufferSize: 0, - }); - const multiActorEnvironment = new MultiActorEnvironment({ - createNetworkForActor: _actorID => RelayNetwork.create(fetch), - createStoreForActor: _actorID => store, - // $FlowFixMe[incompatible-call] error found when enabling Flow LTI mode - operationLoader, - reactFlightPayloadDeserializer, - }); - environment = - environmentType === 'MultiActorEnvironment' - ? multiActorEnvironment.forActor(getActorIdentifier('actor:1234')) - : new RelayModernEnvironment({ - network: RelayNetwork.create(fetch), - // $FlowFixMe[incompatible-call] error found when enabling Flow LTI mode - operationLoader, - store, - reactFlightPayloadDeserializer, - }); - operation = createOperationDescriptor(FlightQuery, { - count: 10, - id: '1', - }); - }); - afterEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = false; - }); - - it('loads the Flight field and normalizes/publishes the field payload', () => { - environment.execute({operation}).subscribe(callbacks); - dataSource.next({ - data: { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'SUCCESS', - tree: [ - { - type: 'div', - key: null, - ref: null, - props: {foo: 1}, - }, - ], - queries: [], - errors: [], - fragments: [ - { - module: { - __dr: 'RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$normalization.graphql', - }, - __id: '3', - __typename: 'Story', - response: { - data: { - id: '3', - __typename: 'Story', - name: 'React Server Components: The Musical', - body: { - text: 'Presenting a new musical from the director of Cats (2019)!', - }, - }, - }, - variables: { - id: '3', - }, - }, - ], - }, - }, - }, - extensions: { - is_final: true, - }, - }); - dataSource.complete(); - - expect(next).toBeCalledTimes(1); - expect(complete).toBeCalledTimes(0); - expect(error).toBeCalledTimes(0); - expect(reactFlightPayloadDeserializer).toBeCalledTimes(1); - - const querySnapshot = environment.lookup(operation.fragment); - expect( - // $FlowFixMe[incompatible-use] readRoot() to verify that it updated - querySnapshot.data.node?.flightComponent.readRoot(), - ).toEqual([{key: null, props: {foo: 1}, ref: null, type: 'div'}]); - - // This is the fragment ref we expect to be sent via the Server - // Component as a prop to the Client Component - const fragmentRef = { - __id: '3', - __fragments: {[ClientFragment.name]: {}}, - __fragmentOwner: operation.request, - }; - const selector = nullthrows( - getSingularSelector(ClientFragment, fragmentRef), - ); - const initialFragmentSnapshot = environment.lookup(selector); - // Expect isMissingData initially as we have yet to receive the fragment's - // SplitNormalization AST - expect(initialFragmentSnapshot.isMissingData).toBe(true); - - resolveFragment(ClientNormalizationFragment); - jest.runAllTimers(); - - const fragmentSnapshot = environment.lookup(selector); - expect(fragmentSnapshot.isMissingData).toBe(false); - expect(fragmentSnapshot.data).toEqual({ - name: 'React Server Components: The Musical', - body: { - text: 'Presenting a new musical from the director of Cats (2019)!', - }, - }); - }); - }); - }, -); diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithMatch-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithMatch-test.js index 9c6b63da5a221..7688e29124d3c 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithMatch-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithMatch-test.js @@ -200,7 +200,7 @@ describe('execute() a query with @match', () => { expect(operationSnapshot.data).toEqual({ node: { nameRenderer: { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + __id: 'client:1:nameRenderer(supported:"34hjiS")', __typename: 'MarkdownUserNameRenderer', __fragmentPropName: 'name', __fragments: { @@ -208,7 +208,6 @@ describe('execute() a query with @match', () => { {}, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'MarkdownUserNameRenderer.react', }, }, diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithMatchAdditionalArguments-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithMatchAdditionalArguments-test.js index 275f71b52cf17..204526d34271b 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithMatchAdditionalArguments-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithMatchAdditionalArguments-test.js @@ -200,7 +200,7 @@ describe('execute() a query with @match with additional arguments', () => { expect(operationSnapshot.data).toEqual({ node: { nameRendererForContext: { - __id: 'client:1:nameRendererForContext(context:"HEADER",supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + __id: 'client:1:nameRendererForContext(context:"HEADER",supported:"34hjiS")', __fragmentPropName: 'name', @@ -210,7 +210,6 @@ describe('execute() a query with @match with additional arguments', () => { }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'MarkdownUserNameRenderer.react', }, }, diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithModule-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithModule-test.js index 7f293d0fad211..c1de9b6081c0a 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithModule-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithModule-test.js @@ -208,7 +208,6 @@ describe('execute() a query with @module', () => { }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'MarkdownUserNameRenderer.react', }, }, diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithModuleOnConcreteField-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithModuleOnConcreteField-test.js index 53afde0c012ae..3ef1f6be503e0 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithModuleOnConcreteField-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithModuleOnConcreteField-test.js @@ -160,7 +160,6 @@ describe('execute() a query with @module on a field with a nullable concrete typ }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'FeedbackAuthor.react', }, }, diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithModuleWithKey-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithModuleWithKey-test.js index 6611020304c8f..bc1c0fd18da00 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithModuleWithKey-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithModuleWithKey-test.js @@ -211,7 +211,6 @@ describe('execute() a query with @module', () => { }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'MarkdownUserNameRenderer.react', }, }, diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithNestedMatch-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithNestedMatch-test.js index f36b93db55a60..35a915ec472be 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithNestedMatch-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithNestedMatch-test.js @@ -216,7 +216,7 @@ describe('execute() a query with nested @match', () => { expect(operationSnapshot.data).toEqual({ node: { outerRenderer: { - __id: 'client:1:nameRenderer(supported:["MarkdownUserNameRenderer"])', + __id: 'client:1:nameRenderer(supported:"2aTHRe")', __fragmentPropName: 'name', __fragments: { @@ -225,7 +225,6 @@ describe('execute() a query with nested @match', () => { }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'MarkdownUserNameRenderer.react', }, }, @@ -329,7 +328,6 @@ describe('execute() a query with nested @match', () => { user: { innerRenderer: { __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __fragmentPropName: 'name', __fragments: { @@ -337,7 +335,7 @@ describe('execute() a query with nested @match', () => { {}, }, - __id: 'client:2:nameRenderer(supported:["PlainUserNameRenderer"])', + __id: 'client:2:nameRenderer(supported:"1AwQS7")', __module_component: 'PlainUserNameRenderer.react', }, }, diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithOverlappingModule-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithOverlappingModule-test.js index f209ed7d003e0..5d7a951d338b2 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithOverlappingModule-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithOverlappingModule-test.js @@ -205,7 +205,6 @@ describe('execute() multiple queries with overlapping @module-s', () => { }, __fragmentOwner: userOperation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'MarkdownUserNameRenderer.react', }, }, @@ -249,7 +248,6 @@ describe('execute() multiple queries with overlapping @module-s', () => { }, __fragmentOwner: userOperation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'PlainUserNameRenderer.react', }, }, @@ -296,7 +294,6 @@ describe('execute() multiple queries with overlapping @module-s', () => { }, __fragmentOwner: userOperation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'MarkdownUserNameRenderer.react', }, }, @@ -357,7 +354,6 @@ describe('execute() multiple queries with overlapping @module-s', () => { }, __fragmentOwner: actorOperation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'MarkdownActorNameRenderer.react', }, }, diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithOverlappingStream-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithOverlappingStream-test.js index d83f997908355..28fd36854d482 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithOverlappingStream-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithOverlappingStream-test.js @@ -184,7 +184,6 @@ describe('execute() a query with multiple @stream selections on the same record' RelayModernEnvironmentExecuteWithOverlappingStreamTestDeferFragment: {}, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __id: '1', }); const deferSelector = nullthrows( @@ -246,7 +245,6 @@ describe('execute() a query with multiple @stream selections on the same record' RelayModernEnvironmentExecuteWithOverlappingStreamTestDeferFragment: {}, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __id: '1', }); @@ -272,7 +270,6 @@ describe('execute() a query with multiple @stream selections on the same record' RelayModernEnvironmentExecuteWithOverlappingStreamTestDeferFragment: {}, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __id: '1', }); @@ -384,7 +381,6 @@ describe('execute() a query with multiple @stream selections on the same record' RelayModernEnvironmentExecuteWithOverlappingStreamTestDeferFragment: {}, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __id: '1', }); @@ -449,7 +445,6 @@ describe('execute() a query with multiple @stream selections on the same record' RelayModernEnvironmentExecuteWithOverlappingStreamTestDeferFragment: {}, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __id: '1', }); diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithPluralMatch-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithPluralMatch-test.js index 7fd63e193e77f..58185f98a199c 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithPluralMatch-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithPluralMatch-test.js @@ -196,7 +196,7 @@ describe('execute() a query with plural @match', () => { node: { nameRenderers: [ { - __id: 'client:1:nameRenderers(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"]):0', + __id: 'client:1:nameRenderers(supported:"34hjiS"):0', __fragmentPropName: 'name', @@ -206,7 +206,6 @@ describe('execute() a query with plural @match', () => { }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __module_component: 'MarkdownUserNameRenderer.react', }, ], diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithProvidedVariable-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithProvidedVariable-test.js index 3366829e8c97d..03468c2e5453a 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithProvidedVariable-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithProvidedVariable-test.js @@ -198,7 +198,6 @@ describe('query with fragments that use provided variables', () => { expect(snapshot.data).toEqual({ node: { __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __fragments: { RelayModernEnvironmentExecuteWithProvidedVariableTest_profile1: {}, }, @@ -230,7 +229,6 @@ describe('query with fragments that use provided variables', () => { expect(snapshot.data).toEqual({ node: { __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __fragments: { RelayModernEnvironmentExecuteWithProvidedVariableTest_profile1: {}, RelayModernEnvironmentExecuteWithProvidedVariableTest_profile2: {}, diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithRelayClientComponent-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithRelayClientComponent-test.js deleted file mode 100644 index d567178869f14..0000000000000 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithRelayClientComponent-test.js +++ /dev/null @@ -1,334 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; -import type {GraphQLResponse} from '../../network/RelayNetworkTypes'; -import type {NormalizationRootNode} from '../../util/NormalizationNode'; -import type {RequestParameters} from 'relay-runtime/util/RelayConcreteNode'; -import type { - CacheConfig, - Variables, -} from 'relay-runtime/util/RelayRuntimeTypes'; - -const RelayNetwork = require('../../network/RelayNetwork'); -const RelayObservable = require('../../network/RelayObservable'); -const {graphql} = require('../../query/GraphQLTag'); -const RelayModernEnvironment = require('../RelayModernEnvironment'); -const { - createOperationDescriptor, -} = require('../RelayModernOperationDescriptor'); -const {getSingularSelector} = require('../RelayModernSelector'); -const RelayModernStore = require('../RelayModernStore'); -const RelayRecordSource = require('../RelayRecordSource'); -const nullthrows = require('nullthrows'); -const {RelayFeatureFlags} = require('relay-runtime'); -const { - disallowWarnings, - expectWarningWillFire, -} = require('relay-test-utils-internal'); - -disallowWarnings(); - -describe('execute() with @relay_client_component', () => { - let callbacks; - let ClientFragment; - let complete; - let dataSource; - let environment; - let error; - let fetch; - let network; - let next; - let operation; - let operationLoader; - let Query; - let shouldProcessClientComponents; - let source; - let store; - - beforeEach(() => { - ClientFragment = graphql` - fragment RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment on Story { - name - body { - text - } - } - `; - Query = graphql` - query RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery( - $id: ID! - ) { - node(id: $id) { - ...RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment - @relay_client_component - } - } - `; - - complete = jest.fn<[], mixed>(); - error = jest.fn<[Error], mixed>(); - next = jest.fn<[GraphQLResponse], mixed>(); - callbacks = {complete, error, next}; - fetch = ( - _query: RequestParameters, - _variables: Variables, - _cacheConfig: CacheConfig, - ) => { - // $FlowFixMe[missing-local-annot] Error found while enabling LTI on this file - return RelayObservable.create(sink => { - dataSource = sink; - }); - }; - network = RelayNetwork.create(fetch); - source = RelayRecordSource.create(); - operationLoader = { - load: jest.fn<[mixed], Promise>(), - get: jest.fn<[mixed], ?NormalizationRootNode>(), - }; - operation = createOperationDescriptor(Query, {id: '1'}); - }); - afterEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = false; - }); - - describe('when the query contains @relay_client_component spreads', () => { - describe('and client component processing is enabled', () => { - beforeEach(() => { - shouldProcessClientComponents = true; - store = new RelayModernStore(source, { - operationLoader, - shouldProcessClientComponents, - gcReleaseBufferSize: 0, - }); - environment = new RelayModernEnvironment({ - network, - operationLoader, - store, - shouldProcessClientComponents, - }); - }); - it('executes and reads back results', () => { - environment.execute({operation}).subscribe(callbacks); - dataSource.next({ - data: { - node: { - id: '1', - __typename: 'Story', - name: 'React Server Components: The Musical', - body: { - text: 'Presenting a new musical from the director of Cats (2019)!', - }, - }, - }, - extensions: { - is_final: true, - }, - }); - dataSource.complete(); - - expect(next).toBeCalledTimes(1); - expect(complete).toBeCalledTimes(1); - expect(error).toBeCalledTimes(0); - - const querySnapshot = environment.lookup(operation.fragment); - expect(querySnapshot.data).toEqual({ - node: { - __id: '1', - __fragments: { - [ClientFragment.name]: expect.anything(), - }, - __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, - }, - }); - - // fragment data is present - const selector = nullthrows( - getSingularSelector( - ClientFragment, - (querySnapshot.data?.node: $FlowFixMe), - ), - ); - const snapshot = environment.lookup(selector); - expect(snapshot.data).toEqual({ - name: 'React Server Components: The Musical', - body: { - text: 'Presenting a new musical from the director of Cats (2019)!', - }, - }); - expect(snapshot.isMissingData).toBe(false); - - // available before a GC - expect(environment.check(operation)).toEqual({ - fetchTime: null, - status: 'available', - }); - - // available after GC if the query is retained - const retain = environment.retain(operation); - (environment.getStore(): $FlowFixMe).scheduleGC(); - jest.runAllTimers(); - expect(environment.check(operation)).toEqual({ - fetchTime: null, - status: 'available', - }); - - // missing after being freed plus a GC run - retain.dispose(); - (environment.getStore(): $FlowFixMe).scheduleGC(); - jest.runAllTimers(); - expect(environment.check(operation)).toEqual({ - status: 'missing', - }); - }); - - it('handles missing fragment data', () => { - environment.execute({operation}).subscribe(callbacks); - expectWarningWillFire( - 'RelayResponseNormalizer: Payload did not contain a value for field `name: name`. Check that you are parsing with the same query that was used to fetch the payload.', - ); - expectWarningWillFire( - 'RelayResponseNormalizer: Payload did not contain a value for field `body: body`. Check that you are parsing with the same query that was used to fetch the payload.', - ); - dataSource.next({ - data: { - node: { - id: '1', - __typename: 'Story', - }, - }, - extensions: { - is_final: true, - }, - }); - dataSource.complete(); - - expect(next).toBeCalledTimes(1); - expect(complete).toBeCalledTimes(1); - expect(error).toBeCalledTimes(0); - - const querySnapshot = environment.lookup(operation.fragment); - expect(querySnapshot.data).toEqual({ - node: { - __id: '1', - __fragments: { - [ClientFragment.name]: expect.anything(), - }, - __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, - }, - }); - - // fragment data is missing - const selector = nullthrows( - getSingularSelector( - ClientFragment, - (querySnapshot.data?.node: $FlowFixMe), - ), - ); - const snapshot = environment.lookup(selector); - expect(snapshot.data).toEqual({ - name: undefined, - body: undefined, - }); - expect(snapshot.isMissingData).toBe(true); - expect(environment.check(operation)).toEqual({ - status: 'missing', - }); - }); - }); - - describe('and client component processing is disabled', () => { - beforeEach(() => { - shouldProcessClientComponents = false; - store = new RelayModernStore(source, { - operationLoader, - shouldProcessClientComponents, - gcReleaseBufferSize: 0, - }); - environment = new RelayModernEnvironment({ - network, - operationLoader, - store, - shouldProcessClientComponents, - }); - }); - it('executes and reads back results', () => { - environment.execute({operation}).subscribe(callbacks); - dataSource.next({ - data: { - node: { - id: '1', - __typename: 'Story', - }, - }, - }); - dataSource.complete(); - - expect(next).toBeCalledTimes(1); - expect(complete).toBeCalledTimes(1); - expect(error).toBeCalledTimes(0); - - const querySnapshot = environment.lookup(operation.fragment); - expect(querySnapshot.data).toEqual({ - node: { - __id: '1', - __fragments: { - [ClientFragment.name]: expect.anything(), - }, - __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, - }, - }); - - // fragment data is not present - const selector = nullthrows( - getSingularSelector( - ClientFragment, - (querySnapshot.data?.node: $FlowFixMe), - ), - ); - const snapshot = environment.lookup(selector); - expect(snapshot.data).toEqual({ - body: undefined, - name: undefined, - }); - expect(snapshot.isMissingData).toBe(true); - - // fragment data is missing (intentionally), because this is a server - // query, and we never need client fragment data. the operation should - // still be considered available before a GC - expect(environment.check(operation)).toEqual({ - fetchTime: null, - status: 'available', - }); - - // available after GC if the query is retained - const retain = environment.retain(operation); - (environment.getStore(): $FlowFixMe).scheduleGC(); - jest.runAllTimers(); - expect(environment.check(operation)).toEqual({ - fetchTime: null, - status: 'available', - }); - - // missing after being freed plus a GC run - retain.dispose(); - (environment.getStore(): $FlowFixMe).scheduleGC(); - jest.runAllTimers(); - expect(environment.check(operation)).toEqual({ - status: 'missing', - }); - }); - }); - }); -}); diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithSiblingAndNestedModule-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithSiblingAndNestedModule-test.js index 780906795a5b5..aca972974e0e1 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithSiblingAndNestedModule-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithSiblingAndNestedModule-test.js @@ -453,7 +453,6 @@ function runWithFeatureFlags(setFlags: (typeof RelayFeatureFlags) => void) { user: { innerRenderer: { __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __fragmentPropName: 'name', __fragments: { RelayModernEnvironmentExecuteWithSiblingAndNestedModuleTestPlainUserNameRenderer_name: diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithUndeclaredUnusedArgument-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithUndeclaredUnusedArgument-test.js index 6270c6bb8eaf4..13db879a0a3ca 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithUndeclaredUnusedArgument-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-ExecuteWithUndeclaredUnusedArgument-test.js @@ -127,7 +127,6 @@ describe('query with undeclared, unused fragment argument', () => { expect(snapshot.data).toEqual({ node: { __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __fragments: { RelayModernEnvironmentExecuteWithUndeclaredUnusedArgumentTestProfile: @@ -147,7 +146,6 @@ describe('query with undeclared, unused fragment argument', () => { expect(fragmentSnapshot.data).toEqual({ __id: '4', __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __fragments: { RelayModernEnvironmentExecuteWithUndeclaredUnusedArgumentTestProfilePhotoWrapper: @@ -169,7 +167,6 @@ describe('query with undeclared, unused fragment argument', () => { expect(innerSnapshot.data).toEqual({ __id: '4', __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __fragments: { RelayModernEnvironmentExecuteWithUndeclaredUnusedArgumentTestProfilePhoto: diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-Lookup-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-Lookup-test.js index 13848f13a8f4c..9972a4a38bf89 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-Lookup-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-Lookup-test.js @@ -77,7 +77,6 @@ describe('lookup()', () => { __id: '4', __fragments: {RelayModernEnvironmentLookupTestChildFragment: {}}, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); }); @@ -95,7 +94,6 @@ describe('lookup()', () => { __id: '4', __fragments: {RelayModernEnvironmentLookupTestChildFragment: {}}, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, }, }); // $FlowFixMe[incompatible-use] @@ -163,10 +161,10 @@ describe('lookup()', () => { __typename: 'Comment', id: 'comment:1', commentBody: { - __id: 'client:comment:1:commentBody(supported:["PlainCommentBody"])', + __id: 'client:comment:1:commentBody(supported:"2Rll6p")', __typename: 'PlainCommentBody', text: { - __id: 'client:comment:1:commentBody(supported:["PlainCommentBody"]):text', + __id: 'client:comment:1:commentBody(supported:"2Rll6p"):text', __typename: 'Text', text: 'A comment!', }, diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-NoInline-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-NoInline-test.js index 9c4b3bc48b24e..ff749b26c1534 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-NoInline-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-NoInline-test.js @@ -189,7 +189,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( [NoInlineFragment.name]: expect.anything(), }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); @@ -207,7 +206,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( [InnerFragment.name]: expect.anything(), }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, profile_picture: { uri: 'https://profile.png', }, @@ -279,7 +277,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( [NoInlineFragment.name]: expect.anything(), }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); @@ -296,10 +293,14 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( expect(selectorData.data).toEqual({ __id: '1', __fragments: { - [InnerFragment.name]: expect.anything(), + [InnerFragment.name]: { + $isWithinUnmatchedTypeRefinement: true, // fragment type didn't match + cond: true, + fileExtension: 'JPG', + preset: null, + }, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: true, // fragment type didn't match }); // Inner data should be missing bc the type didn't match @@ -424,7 +425,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( [NoInlineFragmentWithArgs.name]: expect.anything(), }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, }, username: { __id: '2', @@ -432,7 +432,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( [NoInlineFragmentWithArgs.name]: expect.anything(), }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); @@ -450,7 +449,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( [InnerFragment.name]: expect.anything(), }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, profile_picture: { uri: 'https://profile.png', }, @@ -469,7 +467,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( [InnerFragment.name]: expect.anything(), }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, profile_picture: { uri: 'https://profile.png', }, @@ -616,7 +613,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( [NoInlineFragmentNested.name]: expect.anything(), }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, }, zuck: { __id: '2', @@ -624,7 +620,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( [NoInlineFragmentNested.name]: expect.anything(), }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, }, joe: { __id: '3', @@ -632,7 +627,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( [NoInlineFragmentNested.name]: expect.anything(), }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); @@ -761,7 +755,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( [NoInlineFragmentWithStream.name]: expect.anything(), }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); @@ -951,7 +944,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( }, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); @@ -1175,7 +1167,6 @@ describe.each(['RelayModernEnvironment', 'MultiActorEnvironment'])( }, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, __fragmentPropName: 'name', __module_component: 'MarkdownUserNameRenderer.react', }, diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-PartiallyNormalizedDataObservabilityWithBatchedUpdates-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-PartiallyNormalizedDataObservabilityWithBatchedUpdates-test.js index c15c94e099a59..7e2145b3ad059 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-PartiallyNormalizedDataObservabilityWithBatchedUpdates-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-PartiallyNormalizedDataObservabilityWithBatchedUpdates-test.js @@ -322,7 +322,7 @@ describe('execute() a query with nested @module fragments, where the inner @modu let callbacks; let callback; let observationSnapshot; - let promise; + let promise: Promise; let resolve; beforeEach(() => { @@ -340,8 +340,7 @@ describe('execute() a query with nested @module fragments, where the inner @modu promise = new Promise(_resolve => (resolve = _resolve)); operationLoader = { - // $FlowFixMe[incompatible-type-arg] Error found while enabling LTI on this file - load: () => promise, + load: jest.fn(() => promise), get: jest.fn(), }; source = RelayRecordSource.create(); diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-TypeRefinement-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-TypeRefinement-test.js index 15cb9002f25c1..29deb890686c7 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-TypeRefinement-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-TypeRefinement-test.js @@ -901,7 +901,6 @@ describe('missing data detection', () => { __id: 'abc', __fragmentOwner: operation.request, __fragments: {RelayModernEnvironmentTypeRefinementTest2Fragment: {}}, - __isWithinUnmatchedTypeRefinement: false, }); expect(fragmentSnapshot.isMissingData).toBe(true); const innerFragmentSnapshot = environment.lookup( @@ -947,7 +946,6 @@ describe('missing data detection', () => { __id: 'abc', __fragmentOwner: operation.request, __fragments: {RelayModernEnvironmentTypeRefinementTest2Fragment: {}}, - __isWithinUnmatchedTypeRefinement: false, }); expect(fragmentSnapshot.isMissingData).toBe(true); const innerFragmentSnapshot = environment.lookup( @@ -999,7 +997,6 @@ describe('missing data detection', () => { __id: 'abc', __fragmentOwner: operation.request, __fragments: {RelayModernEnvironmentTypeRefinementTest2Fragment: {}}, - __isWithinUnmatchedTypeRefinement: false, }); expect(fragmentSnapshot.isMissingData).toBe(true); const innerFragmentSnapshot = environment.lookup( @@ -1065,8 +1062,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest4Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest4Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1110,8 +1110,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest4Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest4Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1156,8 +1159,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest4Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest4Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1202,8 +1208,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest4Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest4Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1269,8 +1278,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest6Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest6Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1315,8 +1327,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest6Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest6Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1362,8 +1377,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest6Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest6Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1409,8 +1427,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest6Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest6Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1478,8 +1499,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest8Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest8Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1524,8 +1548,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest8Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest8Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1571,8 +1598,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest8Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest8Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1618,8 +1648,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest8Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest8Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1685,8 +1718,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest10Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest10Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1729,8 +1765,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest10Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest10Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1798,8 +1837,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest12Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest12Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( @@ -1842,8 +1884,11 @@ describe('missing data detection', () => { lastName: undefined, __id: 'abc', __fragmentOwner: operation.request, - __fragments: {RelayModernEnvironmentTypeRefinementTest12Fragment: {}}, - __isWithinUnmatchedTypeRefinement: true, + __fragments: { + RelayModernEnvironmentTypeRefinementTest12Fragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, }); expect(fragmentSnapshot.isMissingData).toBe(false); // known to not impl Actor const innerFragmentSnapshot = environment.lookup( diff --git a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-WithOperationTracker-test.js b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-WithOperationTracker-test.js index d418899339ab0..048d2ed5e2f3b 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernEnvironment-WithOperationTracker-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernEnvironment-WithOperationTracker-test.js @@ -14,6 +14,7 @@ import type {NormalizationRootNode} from '../../util/NormalizationNode'; const {graphql} = require('../../query/GraphQLTag'); +const RelayFeatureFlags = require('../../util/RelayFeatureFlags'); const { createOperationDescriptor, } = require('../RelayModernOperationDescriptor'); @@ -28,411 +29,110 @@ const {disallowWarnings} = require('relay-test-utils-internal'); disallowWarnings(); -describe('RelayModernEnvironment with RelayOperationTracker', () => { - let tracker; - let environment; - let QueryOperation1; - let QueryOperation2; - let MutationOperation; - let operationLoader: { - get: (reference: mixed) => ?NormalizationRootNode, - load: JestMockFn<$ReadOnlyArray, Promise>, - }; - - beforeEach(() => { - const Query1 = graphql` - query RelayModernEnvironmentWithOperationTrackerTest1Query($id: ID) - @relay_test_operation { - node(id: $id) { - ... on Feedback { - id - body { - text - } - comments { - edges { - node { - id - message { - text +describe.each([true, false])( + 'RelayModernEnvironment with RelayOperationTracker with ENABLE_LOOSE_SUBSCRIPTION_ATTRIBUTION=%p', + looseAttribution => { + let tracker; + let environment; + let QueryOperation1; + let QueryOperation2; + let MutationOperation; + let operationLoader: { + get: (reference: mixed) => ?NormalizationRootNode, + load: JestMockFn<$ReadOnlyArray, Promise>, + }; + + beforeEach(() => { + RelayFeatureFlags.ENABLE_LOOSE_SUBSCRIPTION_ATTRIBUTION = + looseAttribution; + const Query1 = graphql` + query RelayModernEnvironmentWithOperationTrackerTest1Query($id: ID) + @relay_test_operation { + node(id: $id) { + ... on Feedback { + id + body { + text + } + comments { + edges { + node { + id + message { + text + } } } } } } } - } - `; + `; - const Query2 = graphql` - query RelayModernEnvironmentWithOperationTrackerTest2Query($id: ID) - @relay_test_operation { - node(id: $id) { - id - } - } - `; - - const Mutation1 = graphql` - mutation RelayModernEnvironmentWithOperationTrackerTest1Mutation( - $input: CommentCreateInput - ) @relay_test_operation { - commentCreate(input: $input) { - comment { - id - message { - text - } - } - feedback { - id - body { - text - } - } - } - } - `; - - QueryOperation1 = createOperationDescriptor(Query1, {id: '1'}); - QueryOperation2 = createOperationDescriptor(Query2, {id: '2'}); - MutationOperation = createOperationDescriptor(Mutation1, {id: '1'}); - operationLoader = { - load: jest.fn(), - get: jest.fn(), - }; - tracker = new RelayOperationTracker(); - environment = createMockEnvironment({ - operationTracker: tracker, - operationLoader, - }); - }); - - it('should return an instance of tracker', () => { - expect(environment.getOperationTracker()).toBe(tracker); - }); - - it('should have operation tracker and operations should not be affected', () => { - invariant(tracker != null, 'Tracker should be defined'); - expect( - tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), - ).toBe(null); - expect( - tracker.getPendingOperationsAffectingOwner(QueryOperation2.request), - ).toBe(null); - }); - - it('sh©ould return a promise when there are pending operations that are affecting the owner', () => { - invariant(tracker != null, 'Tracker should be defined'); - environment - .execute({ - operation: QueryOperation1, - }) - .subscribe({}); - - // Note: Why do we need to `subscribe` here (and in other places)? - // We need to subscribe a fragment (with the owner) in order to simulate - // scenario when the fragment is updated - we need to update - // OperationTracker, and mark this owner as pending, or completed - if an - // operation that initiated the change is completed. - environment.subscribe( - environment.lookup(QueryOperation1.fragment), - jest.fn(), - ); - - const FEEDBACK_ID = 'my-feedback-id'; - - environment.mock.resolve( - QueryOperation1, - MockPayloadGenerator.generate(QueryOperation1, { - Feedback() { - return { - id: FEEDBACK_ID, - }; - }, - }), - ); - - environment - .executeMutation({ - operation: MutationOperation, - }) - .subscribe({}); - - expect( - tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), - ).toBe(null); - - // This mutation is changing the same feedback object, so the owner (QueryOperation1) - // will be affected by this operation - environment.mock.nextValue( - MutationOperation, - MockPayloadGenerator.generate(MutationOperation, { - Feedback(context) { - return { - id: FEEDBACK_ID, - body: { - text: 'Updated text', - }, - }; - }, - }), - ); - - expect( - tracker.getPendingOperationsAffectingOwner(QueryOperation1.request) - ?.promise, - ).toBeInstanceOf(Promise); - - // Complete the mutation - environment.mock.complete(MutationOperation.request.node); - - // There should be no pending operations affecting the owner, - // after the mutation is completed - expect( - tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), - ).toBe(null); - }); - - it('should not have pending operation affecting the owner, if owner does not have subscriptions', () => { - invariant(tracker != null, 'Tracker should be defined'); - environment - .execute({ - operation: QueryOperation1, - }) - .subscribe({}); - - const FEEDBACK_ID = 'my-feedback-id'; - - environment.mock.resolve( - QueryOperation1, - MockPayloadGenerator.generate(QueryOperation1, { - Feedback() { - return { - id: FEEDBACK_ID, - }; - }, - }), - ); - - environment - .executeMutation({ - operation: MutationOperation, - }) - .subscribe({}); - - expect( - tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), - ).toBe(null); - - // This mutation will update the same data as the QueryOperation1 - // but since there are not subscriptions that have owner QueryOperation1 - // it should not be affected - environment.mock.nextValue( - MutationOperation, - MockPayloadGenerator.generate(MutationOperation, { - Feedback(context) { - return { - id: FEEDBACK_ID, - body: { - text: 'Updated text', - }, - }; - }, - }), - ); - expect( - tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), - ).toBe(null); - }); - - it('should return a promise for operation affecting owner that resolves when operation completes', () => { - invariant(tracker != null, 'Tracker should be defined'); - environment - .execute({ - operation: QueryOperation1, - }) - .subscribe({}); - - environment.subscribe( - environment.lookup(QueryOperation1.fragment), - jest.fn(), - ); - - const FEEDBACK_ID = 'my-feedback-id'; - - environment.mock.resolve( - QueryOperation1, - MockPayloadGenerator.generate(QueryOperation1, { - Feedback() { - return { - id: FEEDBACK_ID, - }; - }, - }), - ); - - // Let's start mutation - environment - .executeMutation({ - operation: MutationOperation, - }) - .subscribe({}); - expect( - tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), - ).toBe(null); - environment.mock.nextValue( - MutationOperation, - MockPayloadGenerator.generate(MutationOperation, { - Feedback() { - return { - id: FEEDBACK_ID, - body: { - text: 'Updated text', - }, - }; - }, - }), - ); - const result = tracker.getPendingOperationsAffectingOwner( - QueryOperation1.request, - ); - - invariant(result != null, 'Expected to have promise for operation'); - const promiseCallback = jest.fn<[void], mixed>(); - result.promise.then(promiseCallback); - expect(promiseCallback).not.toBeCalled(); - environment.mock.complete(MutationOperation.request.node); - jest.runAllTimers(); - expect(promiseCallback).toBeCalled(); - }); - - it('pending queries that did not change the data should not affect the owner', () => { - invariant(tracker != null, 'Tracker should be defined'); - // Send the first query - environment - .execute({ - operation: QueryOperation1, - }) - .subscribe({}); - - environment.subscribe( - environment.lookup(QueryOperation1.fragment), - jest.fn(), - ); - - // Send the second query - environment - .execute({ - operation: QueryOperation2, - }) - .subscribe({}); - - environment.subscribe( - environment.lookup(QueryOperation2.fragment), - jest.fn(), - ); - - environment.mock.resolve( - QueryOperation1, - MockPayloadGenerator.generate(QueryOperation1, { - Feedback() { - return { - id: 'feedback-id-1', - }; - }, - }), - ); - - expect( - tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), - ).toBe(null); - - environment.mock.nextValue( - QueryOperation2, - MockPayloadGenerator.generate(QueryOperation2, { - Node() { - return { - __typename: 'Feedback', - id: 'feedback-id-2', - }; - }, - }), - ); - expect( - tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), - ).toBe(null); - }); - - describe('with @match', () => { - it('should return a promise for affecting operations', () => { - //const {Query, Mutation, FeedbackFragment} = - const Query = graphql` - query RelayModernEnvironmentWithOperationTrackerTestQuery($id: ID) + const Query2 = graphql` + query RelayModernEnvironmentWithOperationTrackerTest2Query($id: ID) @relay_test_operation { node(id: $id) { - ...RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment - } - } - `; - - graphql` - fragment RelayModernEnvironmentWithOperationTrackerTestPlainUserNameRenderer_name on PlainUserNameRenderer { - plaintext - data { - text - } - } - `; - graphql` - fragment RelayModernEnvironmentWithOperationTrackerTestMarkdownUserNameRenderer_name on MarkdownUserNameRenderer { - markdown - data { - markup - } - } - `; - - const FeedbackFragment = graphql` - fragment RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment on Feedback { - id - body { - text - } - author { - __typename - nameRenderer @match { - ...RelayModernEnvironmentWithOperationTrackerTestPlainUserNameRenderer_name - @module(name: "PlainUserNameRenderer.react") - ...RelayModernEnvironmentWithOperationTrackerTestMarkdownUserNameRenderer_name - @module(name: "MarkdownUserNameRenderer.react") - } - plainNameRenderer: nameRenderer - @match( - key: "RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment_plainNameRenderer" - ) { - ...RelayModernEnvironmentWithOperationTrackerTestPlainUserNameRenderer_name - @module(name: "PlainUserNameRenderer.react") - } + id } } `; - const Mutation = graphql` - mutation RelayModernEnvironmentWithOperationTrackerTestMutation( + const Mutation1 = graphql` + mutation RelayModernEnvironmentWithOperationTrackerTest1Mutation( $input: CommentCreateInput ) @relay_test_operation { commentCreate(input: $input) { + comment { + id + message { + text + } + } feedback { - ...RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment + id + lastName + body { + text + } } } } `; - QueryOperation1 = createOperationDescriptor(Query, {id: '1'}); - MutationOperation = createOperationDescriptor(Mutation, {id: '1'}); + QueryOperation1 = createOperationDescriptor(Query1, {id: '1'}); + QueryOperation2 = createOperationDescriptor(Query2, {id: '2'}); + MutationOperation = createOperationDescriptor(Mutation1, {id: '1'}); + operationLoader = { + load: jest.fn(), + get: jest.fn(), + }; + tracker = new RelayOperationTracker(); + environment = createMockEnvironment({ + operationTracker: tracker, + operationLoader, + }); + }); + afterEach(() => { + RelayFeatureFlags.ENABLE_LOOSE_SUBSCRIPTION_ATTRIBUTION = false; + }); + + it('should return an instance of tracker', () => { + expect(environment.getOperationTracker()).toBe(tracker); + }); + + it('should have operation tracker and operations should not be affected', () => { + invariant(tracker != null, 'Tracker should be defined'); + expect( + tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), + ).toBe(null); + expect( + tracker.getPendingOperationsAffectingOwner(QueryOperation2.request), + ).toBe(null); + }); + + it('should return a promise when there are pending operations that are affecting the owner', () => { invariant(tracker != null, 'Tracker should be defined'); environment .execute({ @@ -440,84 +140,455 @@ describe('RelayModernEnvironment with RelayOperationTracker', () => { }) .subscribe({}); - const FEEDBACK_ID = 'my-feedback-id'; - + // Note: Why do we need to `subscribe` here (and in other places)? + // We need to subscribe a fragment (with the owner) in order to simulate + // scenario when the fragment is updated - we need to update + // OperationTracker, and mark this owner as pending, or completed - if an + // operation that initiated the change is completed. environment.subscribe( environment.lookup(QueryOperation1.fragment), jest.fn(), ); - environment.subscribe( - environment.lookup( - createReaderSelector( - FeedbackFragment, - FEEDBACK_ID, - QueryOperation1.request.variables, - QueryOperation1.request, - ), - ), - jest.fn(), + + const FEEDBACK_ID = 'my-feedback-id'; + + environment.mock.resolve( + QueryOperation1, + MockPayloadGenerator.generate(QueryOperation1, { + Feedback() { + return { + id: FEEDBACK_ID, + }; + }, + }), ); - operationLoader.load.mockImplementation(() => Promise.resolve()); - environment.mock.resolve(QueryOperation1.request.node, { - data: { - node: { - __typename: 'Feedback', - id: FEEDBACK_ID, - body: { - text: '', - }, - author: { - __typename: 'User', - nameRenderer: { - __typename: 'MarkdownUserNameRenderer', - markdown: 'mock value', - data: { - markup: 'mock value', - }, - __module_component_RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment: - '', - __module_operation_RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment: - '', - }, - plainNameRenderer: { - __typename: 'PlainUserNameRenderer', - __module_component_RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment: - '', - __module_operation_RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment: - '', + environment + .executeMutation({ + operation: MutationOperation, + }) + .subscribe({}); + + expect( + tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), + ).toBe(null); + + // This mutation is changing the same feedback object, so the owner (QueryOperation1) + // will be affected by this operation + environment.mock.nextValue( + MutationOperation, + MockPayloadGenerator.generate(MutationOperation, { + Feedback(context) { + return { + id: FEEDBACK_ID, + body: { + text: 'Updated text', }, - id: '', - }, + }; }, - }, - }); - expect(operationLoader.load).toBeCalled(); - operationLoader.load.mockClear(); + }), + ); - // We still processing follow-up payloads for the initial query expect( tracker.getPendingOperationsAffectingOwner(QueryOperation1.request) ?.promise, ).toBeInstanceOf(Promise); - jest.runAllTimers(); - // All followup completed, operation tracker should be completed + // Complete the mutation + environment.mock.complete(MutationOperation.request.node); + + // There should be no pending operations affecting the owner, + // after the mutation is completed expect( tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), ).toBe(null); + }); + + it('should not have pending operation affecting the owner, if owner does not have subscriptions', () => { + invariant(tracker != null, 'Tracker should be defined'); + environment + .execute({ + operation: QueryOperation1, + }) + .subscribe({}); + + const FEEDBACK_ID = 'my-feedback-id'; + + environment.mock.resolve( + QueryOperation1, + MockPayloadGenerator.generate(QueryOperation1, { + Feedback() { + return { + id: FEEDBACK_ID, + }; + }, + }), + ); - // Send the mutation environment .executeMutation({ operation: MutationOperation, }) .subscribe({}); - environment.mock.nextValue(MutationOperation, { - data: { - commentCreate: { - feedback: { + expect( + tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), + ).toBe(null); + + // This mutation will update the same data as the QueryOperation1 + // but since there are not subscriptions that have owner QueryOperation1 + // it should not be affected + environment.mock.nextValue( + MutationOperation, + MockPayloadGenerator.generate(MutationOperation, { + Feedback(context) { + return { + id: FEEDBACK_ID, + body: { + text: 'Updated text', + }, + }; + }, + }), + ); + expect( + tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), + ).toBe(null); + }); + + it('should return a promise for operation affecting owner that resolves when operation completes', () => { + invariant(tracker != null, 'Tracker should be defined'); + environment + .execute({ + operation: QueryOperation1, + }) + .subscribe({}); + + environment.subscribe( + environment.lookup(QueryOperation1.fragment), + jest.fn(), + ); + + const FEEDBACK_ID = 'my-feedback-id'; + + environment.mock.resolve( + QueryOperation1, + MockPayloadGenerator.generate(QueryOperation1, { + Feedback() { + return { + id: FEEDBACK_ID, + }; + }, + }), + ); + + // Let's start mutation + environment + .executeMutation({ + operation: MutationOperation, + }) + .subscribe({}); + expect( + tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), + ).toBe(null); + environment.mock.nextValue( + MutationOperation, + MockPayloadGenerator.generate(MutationOperation, { + Feedback() { + return { + id: FEEDBACK_ID, + body: { + text: 'Updated text', + }, + }; + }, + }), + ); + const result = tracker.getPendingOperationsAffectingOwner( + QueryOperation1.request, + ); + + invariant(result != null, 'Expected to have promise for operation'); + const promiseCallback = jest.fn<[void], mixed>(); + // $FlowFixMe[unused-promise] + result.promise.then(promiseCallback); + expect(promiseCallback).not.toBeCalled(); + environment.mock.complete(MutationOperation.request.node); + jest.runAllTimers(); + expect(promiseCallback).toBeCalled(); + }); + + it('pending queries that did not change the data should not affect the owner', () => { + invariant(tracker != null, 'Tracker should be defined'); + // Send the first query + environment.execute({operation: QueryOperation1}).subscribe({}); + + environment.subscribe( + environment.lookup(QueryOperation1.fragment), + jest.fn(), + ); + + // Send the second query + environment.execute({operation: QueryOperation2}).subscribe({}); + + environment.subscribe( + environment.lookup(QueryOperation2.fragment), + jest.fn(), + ); + + environment.mock.resolve( + QueryOperation1, + MockPayloadGenerator.generate(QueryOperation1, { + Feedback() { + return { + id: 'feedback-id-1', + }; + }, + }), + ); + + expect( + tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), + ).toBe(null); + + environment.mock.nextValue( + QueryOperation2, + MockPayloadGenerator.generate(QueryOperation2, { + Node() { + return { + __typename: 'Feedback', + id: 'feedback-id-2', + }; + }, + }), + ); + + const operations = tracker.getPendingOperationsAffectingOwner( + QueryOperation1.request, + ); + + expect(operations).toBe(null); + }); + + // If a store update changes a record, that will force us to reread any fragment that + // read that ID. However, if that re-read results in identical data, we will not notify + // the subscribers. + // + // With ENABLE_LOOSE_SUBSCRIPTION_ATTRIBUTION disabled (default) we also + // won't mark the store update as affecing the fragment. + // + // With ENABLE_LOOSE_SUBSCRIPTION_ATTRIBUTION enabled we _will_ mark the + // store update as affecing the fragment. If this behavior is sufficient, it + // will allow us to support OperationTracker with lazy subscriptions that + // don't read eagerly. + it('pending queries that changed a record that was read, but not any fields', () => { + invariant(tracker != null, 'Tracker should be defined'); + environment.execute({operation: QueryOperation1}).subscribe({}); + + const query1Subscription = jest.fn(); + + environment.subscribe( + environment.lookup(QueryOperation1.fragment), + query1Subscription, + ); + + const FEEDBACK_ID = 'my-feedback-id'; + + environment.mock.resolve( + QueryOperation1, + MockPayloadGenerator.generate(QueryOperation1, { + Feedback() { + return {id: FEEDBACK_ID}; + }, + }), + ); + + expect(query1Subscription).toHaveBeenCalledTimes(1); + + // Let's start mutation + environment.executeMutation({operation: MutationOperation}).subscribe({}); + expect( + tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), + ).toBe(null); + environment.mock.nextValue( + MutationOperation, + MockPayloadGenerator.generate(MutationOperation, { + Feedback() { + return { + id: FEEDBACK_ID, + // This field changed on this record but, Query1 does not actually + // read it. This should mean that Query1 will get re-read, but + // should not actually trigger an update. + lastName: 'CHANGED', + }; + }, + }), + ); + + // Becuase `lastName` was not read by Query1, the subscription should not have notified. + expect(query1Subscription).toHaveBeenCalledTimes(1); + + const result = tracker.getPendingOperationsAffectingOwner( + QueryOperation1.request, + ); + + if (looseAttribution) { + invariant( + result != null, + `Expected to have promise for operation due to overlap on ${FEEDBACK_ID}.`, + ); + } else { + expect(result).toBe(null); + } + }); + + it('pending queries that changed ROOT_ID, but not other records read by the subscribed fragment', () => { + invariant(tracker != null, 'Tracker should be defined'); + environment.execute({operation: QueryOperation1}).subscribe({}); + + const query1Subscription = jest.fn(); + + environment.subscribe( + environment.lookup(QueryOperation1.fragment), + query1Subscription, + ); + + const FEEDBACK_ID = 'my-feedback-id'; + + environment.mock.resolve( + QueryOperation1, + MockPayloadGenerator.generate(QueryOperation1, { + Feedback() { + return {id: FEEDBACK_ID}; + }, + }), + ); + + expect(query1Subscription).toHaveBeenCalledTimes(1); + + // Let's start mutation + environment.executeMutation({operation: MutationOperation}).subscribe({}); + expect( + tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), + ).toBe(null); + + const payload = MockPayloadGenerator.generate(MutationOperation, { + Mutation() { + return {commentCreate: null}; + }, + }); + environment.mock.nextValue(MutationOperation, payload); + + expect(query1Subscription).toHaveBeenCalledTimes(1); + + const result = tracker.getPendingOperationsAffectingOwner( + QueryOperation1.request, + ); + + // Loose attribution should ignore changes to ROOT_ID, and without loose + // attribution there should be no changes to the read fragment data. + expect(result).toBe(null); + }); + + describe('with @match', () => { + it('should return a promise for affecting operations', () => { + //const {Query, Mutation, FeedbackFragment} = + const Query = graphql` + query RelayModernEnvironmentWithOperationTrackerTestQuery($id: ID) + @relay_test_operation { + node(id: $id) { + ...RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment + } + } + `; + + graphql` + fragment RelayModernEnvironmentWithOperationTrackerTestPlainUserNameRenderer_name on PlainUserNameRenderer { + plaintext + data { + text + } + } + `; + graphql` + fragment RelayModernEnvironmentWithOperationTrackerTestMarkdownUserNameRenderer_name on MarkdownUserNameRenderer { + markdown + data { + markup + } + } + `; + + const FeedbackFragment = graphql` + fragment RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment on Feedback { + id + body { + text + } + author { + __typename + nameRenderer @match { + ...RelayModernEnvironmentWithOperationTrackerTestPlainUserNameRenderer_name + @module(name: "PlainUserNameRenderer.react") + ...RelayModernEnvironmentWithOperationTrackerTestMarkdownUserNameRenderer_name + @module(name: "MarkdownUserNameRenderer.react") + } + plainNameRenderer: nameRenderer + @match( + key: "RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment_plainNameRenderer" + ) { + ...RelayModernEnvironmentWithOperationTrackerTestPlainUserNameRenderer_name + @module(name: "PlainUserNameRenderer.react") + } + } + } + `; + + const Mutation = graphql` + mutation RelayModernEnvironmentWithOperationTrackerTestMutation( + $input: CommentCreateInput + ) @relay_test_operation { + commentCreate(input: $input) { + feedback { + ...RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment + } + } + } + `; + + QueryOperation1 = createOperationDescriptor(Query, {id: '1'}); + MutationOperation = createOperationDescriptor(Mutation, {id: '1'}); + + invariant(tracker != null, 'Tracker should be defined'); + environment + .execute({ + operation: QueryOperation1, + }) + .subscribe({}); + + const FEEDBACK_ID = 'my-feedback-id'; + + environment.subscribe( + environment.lookup(QueryOperation1.fragment), + jest.fn(), + ); + environment.subscribe( + environment.lookup( + createReaderSelector( + FeedbackFragment, + FEEDBACK_ID, + QueryOperation1.request.variables, + QueryOperation1.request, + ), + ), + jest.fn(), + ); + + operationLoader.load.mockImplementation(() => Promise.resolve()); + environment.mock.resolve(QueryOperation1.request.node, { + data: { + node: { + __typename: 'Feedback', id: FEEDBACK_ID, body: { text: '', @@ -525,7 +596,11 @@ describe('RelayModernEnvironment with RelayOperationTracker', () => { author: { __typename: 'User', nameRenderer: { - __typename: 'PlainUserNameRenderer', + __typename: 'MarkdownUserNameRenderer', + markdown: 'mock value', + data: { + markup: 'mock value', + }, __module_component_RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment: '', __module_operation_RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment: @@ -533,29 +608,81 @@ describe('RelayModernEnvironment with RelayOperationTracker', () => { }, plainNameRenderer: { __typename: 'PlainUserNameRenderer', - __module_component_RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment_plainNameRenderer: + __module_component_RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment: '', - __module_operation_RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment_plainNameRenderer: + __module_operation_RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment: '', }, id: '', }, }, }, - }, + }); + expect(operationLoader.load).toBeCalled(); + operationLoader.load.mockClear(); + + // We still processing follow-up payloads for the initial query + expect( + tracker.getPendingOperationsAffectingOwner(QueryOperation1.request) + ?.promise, + ).toBeInstanceOf(Promise); + jest.runAllTimers(); + + // All followup completed, operation tracker should be completed + expect( + tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), + ).toBe(null); + + // Send the mutation + environment + .executeMutation({ + operation: MutationOperation, + }) + .subscribe({}); + + environment.mock.nextValue(MutationOperation, { + data: { + commentCreate: { + feedback: { + id: FEEDBACK_ID, + body: { + text: '', + }, + author: { + __typename: 'User', + nameRenderer: { + __typename: 'PlainUserNameRenderer', + __module_component_RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment: + '', + __module_operation_RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment: + '', + }, + plainNameRenderer: { + __typename: 'PlainUserNameRenderer', + __module_component_RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment_plainNameRenderer: + '', + __module_operation_RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment_plainNameRenderer: + '', + }, + id: '', + }, + }, + }, + }, + }); + + expect( + tracker.getPendingOperationsAffectingOwner(QueryOperation1.request) + ?.promise, + ).toBeInstanceOf(Promise); + + environment.mock.complete(MutationOperation); + expect(operationLoader.load).toBeCalled(); + jest.runAllTimers(); + expect( + tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), + ).toBe(null); }); - - expect( - tracker.getPendingOperationsAffectingOwner(QueryOperation1.request) - ?.promise, - ).toBeInstanceOf(Promise); - - environment.mock.complete(MutationOperation); - expect(operationLoader.load).toBeCalled(); - jest.runAllTimers(); - expect( - tracker.getPendingOperationsAffectingOwner(QueryOperation1.request), - ).toBe(null); }); - }); -}); + }, +); diff --git a/packages/relay-runtime/store/__tests__/RelayModernFragmentSpecResolver-WithFragmentOwnership-test.js b/packages/relay-runtime/store/__tests__/RelayModernFragmentSpecResolver-WithFragmentOwnership-test.js index 53c9acc4d6b52..6a450e4a3c0c6 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernFragmentSpecResolver-WithFragmentOwnership-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernFragmentSpecResolver-WithFragmentOwnership-test.js @@ -226,7 +226,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: zuckOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); }); @@ -252,7 +251,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: zuckOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); }); @@ -280,7 +278,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: zuckOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); }); @@ -339,7 +336,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: zuckOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); }); @@ -363,7 +359,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: beastOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); }); @@ -391,7 +386,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: beastOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); }); @@ -413,7 +407,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: beastOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, }); }); @@ -483,7 +476,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, }, }); }); @@ -517,7 +509,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, }, }); }); @@ -575,7 +566,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: zuckOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, ], }); @@ -603,7 +593,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: zuckOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, ], }); @@ -629,7 +618,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: zuckOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, { id: 'beast', @@ -640,7 +628,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: beastOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, ], }); @@ -658,7 +645,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: zuckOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, ], }); @@ -690,7 +676,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: zuckOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, ], }); @@ -753,7 +738,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: zuckOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, ], }); @@ -779,7 +763,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: beastOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, ], }); @@ -811,7 +794,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: beastOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, ], }); @@ -837,7 +819,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: beastOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, ], }); @@ -856,7 +837,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: zuckOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, { id: 'beast', @@ -867,7 +847,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: beastOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, ], }); @@ -891,7 +870,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: zuckOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, { id: 'beast', @@ -905,7 +883,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: beastOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, ], }); @@ -926,7 +903,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: zuckOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, { id: 'beast', @@ -937,7 +913,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: beastOperation.request, - __isWithinUnmatchedTypeRefinement: false, }, ], }); @@ -1020,7 +995,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, }, ], }); @@ -1058,7 +1032,6 @@ describe('RelayModernFragmentSpecResolver with fragment ownership', () => { {}, }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, }, ], }); diff --git a/packages/relay-runtime/store/__tests__/RelayModernFragmentSpecResolverRequiredField-test.js b/packages/relay-runtime/store/__tests__/RelayModernFragmentSpecResolverRequiredField-test.js index 5b471b26fe51d..2a614e999fad8 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernFragmentSpecResolverRequiredField-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernFragmentSpecResolverRequiredField-test.js @@ -26,7 +26,7 @@ describe('RelayModernFragmentSpecResolver', () => { let zuckOperation; let variables; let logger; - let requiredFieldLogger; + let relayFieldLogger; function setUserField(id, fieldName, value) { environment.applyUpdate({ @@ -39,8 +39,8 @@ describe('RelayModernFragmentSpecResolver', () => { beforeEach(() => { logger = jest.fn(); - requiredFieldLogger = jest.fn(); - environment = createMockEnvironment({log: logger, requiredFieldLogger}); + relayFieldLogger = jest.fn(); + environment = createMockEnvironment({log: logger, relayFieldLogger}); UserFragment = graphql` fragment RelayModernFragmentSpecResolverRequiredFieldTestUserFragment on User { id @@ -99,7 +99,7 @@ describe('RelayModernFragmentSpecResolver', () => { true /* rootIsQueryRenderer */, ); resolver.resolve(); - expect(requiredFieldLogger).toHaveBeenCalledWith({ + expect(relayFieldLogger).toHaveBeenCalledWith({ fieldPath: 'alternate_name', kind: 'missing_field.log', owner: 'RelayModernFragmentSpecResolverRequiredFieldTestUserFragment', @@ -122,7 +122,7 @@ describe('RelayModernFragmentSpecResolver', () => { "Relay: Missing @required value at path 'name' in 'RelayModernFragmentSpecResolverRequiredFieldTestUserFragment'.", ); - expect(requiredFieldLogger).toHaveBeenCalledWith({ + expect(relayFieldLogger).toHaveBeenCalledWith({ fieldPath: 'name', kind: 'missing_field.throw', owner: 'RelayModernFragmentSpecResolverRequiredFieldTestUserFragment', diff --git a/packages/relay-runtime/store/__tests__/RelayModernFragmentSpecResolverRequiredFieldNoLogger-test.js b/packages/relay-runtime/store/__tests__/RelayModernFragmentSpecResolverRequiredFieldNoLogger-test.js index d530a69d0be2c..d6afca959519d 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernFragmentSpecResolverRequiredFieldNoLogger-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernFragmentSpecResolverRequiredFieldNoLogger-test.js @@ -88,7 +88,7 @@ describe('RelayModernFragmentSpecResolver', () => { true /* rootIsQueryRenderer */, ); expect(() => resolver.resolve()).toThrow( - 'Relay Environment Configuration Error (dev only): `@required(action: LOG)` requires that the Relay Environment be configured with a `requiredFieldLogger`', + 'Relay Environment Configuration Error (dev only): `@required(action: LOG)` requires that the Relay Environment be configured with a `relayFieldLogger`', ); }); diff --git a/packages/relay-runtime/store/__tests__/RelayModernRecord-test.js b/packages/relay-runtime/store/__tests__/RelayModernRecord-test.js index ac5fcce61fbb5..7c06ca32f979d 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernRecord-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernRecord-test.js @@ -18,7 +18,7 @@ const RelayModernRecord = require('../RelayModernRecord'); const RelayStoreUtils = require('../RelayStoreUtils'); const RelayModernTestUtils = require('relay-test-utils-internal'); -const {ID_KEY, REF_KEY, REFS_KEY, TYPENAME_KEY} = RelayStoreUtils; +const {ERRORS_KEY, ID_KEY, REF_KEY, REFS_KEY, TYPENAME_KEY} = RelayStoreUtils; describe('RelayModernRecord', () => { beforeEach(() => { @@ -67,11 +67,26 @@ describe('RelayModernRecord', () => { }); }); + describe('fromObject()', () => { + it('returns the given JSON object', () => { + const object = {}; + const record = RelayModernRecord.fromObject(object); + expect(record).toBe(object); + }); + }); + describe('getLinkedRecordIDs()', () => { let record; beforeEach(() => { record = { + [ERRORS_KEY]: { + fieldWithError: [ + { + message: 'Something bad happened!', + }, + ], + }, [ID_KEY]: 4, name: 'Mark', enemies: null, @@ -81,6 +96,7 @@ describe('RelayModernRecord', () => { 'friends{"first":10}': { [REFS_KEY]: ['beast', 'greg', null], }, + fieldWithError: null, }; }); @@ -96,6 +112,12 @@ describe('RelayModernRecord', () => { ); }); + it('returns null when the link has an error', () => { + expect( + RelayModernRecord.getLinkedRecordIDs(record, 'fieldWithError'), + ).toBeNull(); + }); + it('returns the linked record IDs when they exist', () => { expect( RelayModernRecord.getLinkedRecordIDs(record, 'friends{"first":10}'), @@ -150,11 +172,45 @@ describe('RelayModernRecord', () => { }); }); + describe('getFields()', () => { + it('returns an array with all the keys', () => { + const fields = RelayModernRecord.getFields({ + [ERRORS_KEY]: { + fieldWithError: [ + { + message: 'Something bad happened!', + path: ['fieldWithError'], + }, + ], + }, + [ID_KEY]: '4', + [TYPENAME_KEY]: 'User', + name: 'Zuck', + pets: {[REFS_KEY]: ['beast']}, + fieldWithError: null, + }); + expect(fields).toEqual([ + ID_KEY, + TYPENAME_KEY, + 'name', + 'pets', + 'fieldWithError', + ]); + }); + }); + describe('getValue()', () => { let record; beforeEach(() => { record = { + [ERRORS_KEY]: { + fieldWithError: [ + { + message: 'Something bad happened!', + }, + ], + }, [ID_KEY]: 4, name: 'Mark', blockbusterMembership: null, @@ -168,6 +224,7 @@ describe('RelayModernRecord', () => { other: { customScalar: true, }, + fieldWithError: null, }; }); @@ -175,6 +232,10 @@ describe('RelayModernRecord', () => { expect(RelayModernRecord.getValue(record, 'name')).toBe('Mark'); }); + it('returns null when field has errors', () => { + expect(RelayModernRecord.getValue(record, 'fieldWithError')).toBeNull(); + }); + it('returns a (list) scalar value', () => { // Note that lists can be scalars too. The definition of scalar value is // "not a singular or plural link", and means that no query can traverse @@ -232,6 +293,128 @@ describe('RelayModernRecord', () => { }); }); + describe('setErrors()', () => { + it('warns if the field is undefined', () => { + jest.mock('warning'); + const record = RelayModernRecord.create('4', 'User'); + expect(() => + RelayModernRecord.setErrors(record, 'pet', [ + { + message: 'There was an error on the pet field!', + }, + { + message: 'There was another error on the pet field!', + }, + ]), + ).toWarn([ + 'RelayModernRecord: Invalid error update, `%s` should not be undefined.', + 'pet', + ]); + }); + + it('adds and removes errors', () => { + const record = RelayModernRecord.create('4', 'User'); + expect(RelayModernRecord.getErrors(record, 'pet')).toBeUndefined(); + const petErrors = [ + { + message: 'There was an error on the pet field!', + }, + { + message: 'There was another error on the pet field!', + }, + ]; + RelayModernRecord.setValue(record, 'pet', null); + RelayModernRecord.setErrors(record, 'pet', petErrors); + expect(RelayModernRecord.getErrors(record, 'pet')).toBe(petErrors); + const nameErrors = [ + { + message: 'There was an error on the name field!', + }, + ]; + RelayModernRecord.setValue(record, 'name', null); + RelayModernRecord.setErrors(record, 'name', nameErrors); + expect(RelayModernRecord.getErrors(record, 'name')).toBe(nameErrors); + expect(RelayModernRecord.getErrors(record, 'pet')).toBe(petErrors); + const newPetErrors = [ + { + message: 'There was a different error on the pet field!', + }, + ]; + RelayModernRecord.setErrors(record, 'pet', newPetErrors); + expect(RelayModernRecord.getErrors(record, 'pet')).toBe(newPetErrors); + expect(RelayModernRecord.getErrors(record, 'name')).toBe(nameErrors); + const noErrors = []; + RelayModernRecord.setErrors(record, 'pet', noErrors); + expect(RelayModernRecord.getErrors(record, 'pet')).toBeUndefined(); + expect(RelayModernRecord.getErrors(record, 'name')).toBe(nameErrors); + RelayModernRecord.setErrors(record, 'name', noErrors); + expect(RelayModernRecord.getErrors(record, 'name')).toBeUndefined(); + expect(record).toEqual({ + [ID_KEY]: '4', + [TYPENAME_KEY]: 'User', + name: null, + pet: null, + }); + }); + }); + + describe('hasValue()', () => { + let record; + + beforeEach(() => { + record = { + [ERRORS_KEY]: { + fieldWithError: [ + { + message: 'Something bad happened!', + path: ['fieldWithError'], + }, + ], + }, + [ID_KEY]: '4', + [TYPENAME_KEY]: 'User', + fieldThatIsString: 'applesauce', + fieldThatIsNull: null, + fieldThatIsUndefined: undefined, + fieldWithError: null, + }; + }); + + it('has no special treatment for the id field', () => { + expect(RelayModernRecord.hasValue(record, ID_KEY)).toBe(true); + }); + + it('has no special treatment for the typename field', () => { + expect(RelayModernRecord.hasValue(record, TYPENAME_KEY)).toBe(true); + }); + + it('returns true when the value is a string', () => { + expect(RelayModernRecord.hasValue(record, 'fieldThatIsString')).toBe( + true, + ); + }); + + it('returns true when the value is null', () => { + expect(RelayModernRecord.hasValue(record, 'fieldThatIsNull')).toBe(true); + }); + + it('returns true when the value is explicitly undefined', () => { + expect(RelayModernRecord.hasValue(record, 'fieldThatIsUndefined')).toBe( + true, + ); + }); + + it('returns true when the value is an error', () => { + expect(RelayModernRecord.hasValue(record, 'fieldWithError')).toBe(true); + }); + + it('returns false when the value is missing', () => { + expect(RelayModernRecord.hasValue(record, 'fieldThatIsMissing')).toBe( + false, + ); + }); + }); + describe('update()', () => { it('returns the first record if there are no changes', () => { const prev = RelayModernRecord.create('4', 'User'); @@ -261,6 +444,70 @@ describe('RelayModernRecord', () => { }); }); + it('discards errors from first argument where applicable', () => { + const errors = [ + { + message: 'There was an error!', + path: ['name'], + }, + { + message: 'There was another error!', + path: ['name'], + }, + ]; + const prev = RelayModernRecord.create('4', 'User'); + RelayModernRecord.setValue(prev, 'name', null); + RelayModernRecord.setErrors(prev, 'name', errors); + const next = RelayModernRecord.create('4', 'User'); + RelayModernRecord.setValue(next, 'name', 'Alice'); + const updated = RelayModernRecord.update(prev, next); + expect(updated).not.toBe(prev); + expect(updated).not.toBe(next); + expect(RelayModernRecord.getErrors(updated, 'name')).toBeUndefined(); + }); + + it('preserves errors from the first argument where applicable', () => { + const prev = RelayModernRecord.create('4', 'User'); + const prevErrors = [ + { + message: 'There was an error on the "name" field!', + }, + { + message: 'There was another error on the "name" field!', + }, + ]; + RelayModernRecord.setValue(prev, 'name', null); + RelayModernRecord.setErrors(prev, 'name', prevErrors); + const next = RelayModernRecord.create('4', 'User'); + RelayModernRecord.setValue(next, 'age', 42); + const updated = RelayModernRecord.update(prev, next); + expect(RelayModernRecord.getValue(updated, 'name')).toBeNull(); + expect(RelayModernRecord.getErrors(updated, 'name')).toBe(prevErrors); + }); + + it('copies the errors from second argument where applicable', () => { + const errors = [ + { + message: 'There was an error!', + path: ['name'], + }, + { + message: 'There was another error!', + path: ['name'], + }, + ]; + const prev = RelayModernRecord.create('4', 'User'); + RelayModernRecord.setValue(prev, 'age', 42); + const next = RelayModernRecord.create('4', 'User'); + RelayModernRecord.setValue(next, 'name', null); + RelayModernRecord.setErrors(next, 'name', errors); + const updated = RelayModernRecord.update(prev, next); + expect(updated).not.toBe(prev); + expect(updated).not.toBe(next); + expect(RelayModernRecord.getValue(updated, 'name')).toBeNull(); + expect(RelayModernRecord.getErrors(updated, 'name')).toBe(errors); + }); + it('warns if __id does not match', () => { jest.mock('warning'); const prev = RelayModernRecord.create('4', 'User'); @@ -326,6 +573,70 @@ describe('RelayModernRecord', () => { }); }); + it('discards errors from first argument where applicable', () => { + const errors = [ + { + message: 'There was an error!', + path: ['name'], + }, + { + message: 'There was another error!', + path: ['name'], + }, + ]; + const prev = RelayModernRecord.create('4', 'User'); + RelayModernRecord.setValue(prev, 'name', null); + RelayModernRecord.setErrors(prev, 'name', errors); + const next = RelayModernRecord.create('4', 'User'); + RelayModernRecord.setValue(next, 'name', 'Alice'); + const updated = RelayModernRecord.merge(prev, next); + expect(updated).not.toBe(prev); + expect(updated).not.toBe(next); + expect(RelayModernRecord.getErrors(updated, 'name')).toBeUndefined(); + }); + + it('preserves errors from the first argument where applicable', () => { + const prev = RelayModernRecord.create('4', 'User'); + const prevErrors = [ + { + message: 'There was an error on the "name" field!', + }, + { + message: 'There was another error on the "name" field!', + }, + ]; + RelayModernRecord.setValue(prev, 'name', null); + RelayModernRecord.setErrors(prev, 'name', prevErrors); + const next = RelayModernRecord.create('4', 'User'); + RelayModernRecord.setValue(next, 'age', 42); + const updated = RelayModernRecord.merge(prev, next); + expect(RelayModernRecord.getValue(updated, 'name')).toBeNull(); + expect(RelayModernRecord.getErrors(updated, 'name')).toBe(prevErrors); + }); + + it('copies the errors from second argument where applicable', () => { + const errors = [ + { + message: 'There was an error!', + path: ['name'], + }, + { + message: 'There was another error!', + path: ['name'], + }, + ]; + const prev = RelayModernRecord.create('4', 'User'); + RelayModernRecord.setValue(prev, 'age', 42); + const next = RelayModernRecord.create('4', 'User'); + RelayModernRecord.setValue(next, 'name', null); + RelayModernRecord.setErrors(next, 'name', errors); + const updated = RelayModernRecord.merge(prev, next); + expect(updated).not.toBe(prev); + expect(updated).not.toBe(next); + expect(RelayModernRecord.getValue(updated, 'name')).toBeNull(); + expect(RelayModernRecord.getErrors(updated, 'name')).toBe(errors); + }); + it('warns if __id does not match', () => { jest.mock('warning'); const prev = RelayModernRecord.create('4', 'User'); diff --git a/packages/relay-runtime/store/__tests__/RelayModernSelector-test.js b/packages/relay-runtime/store/__tests__/RelayModernSelector-test.js index 87ef5c09d6c27..e6dfe61fc4981 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernSelector-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernSelector-test.js @@ -132,7 +132,7 @@ describe('RelayModernSelector', () => { 'RelayModernSelector: Expected value for fragment `RelayModernSelectorTestUserFragment` to be an object, got ' + '`[{"__fragments":{"RelayModernSelectorTestUserFragment":{},"RelayModernSelectorTestUsersFragment":{}},"__id":"4","__fragmentOwner":' + JSON.stringify(operationDescriptor.request) + - ',"__isWithinUnmatchedTypeRefinement":false}]`.', + '}]`.', ); }); @@ -681,6 +681,10 @@ describe('RelayModernSelector', () => { it('returns false for equivalent selectors but with different owners', () => { const queryNode = UserQuery; owner = createOperationDescriptor(queryNode, operationVariables); + const newOwner = createOperationDescriptor(queryNode, { + ...operationVariables, + size: '16', + }); const selector = createReaderSelector( UserFragment, '4', @@ -691,11 +695,108 @@ describe('RelayModernSelector', () => { // even if the 2 selectors represent the same selection const differentOwner = { ...selector, - owner: {...owner.request}, + owner: newOwner.request, + }; + expect(areEqualSelectors(selector, differentOwner)).toBe(false); + }); + + it('returns false for equivalent selectors but with different isWithinUnmatchedTypeRefinement', () => { + const queryNode = UserQuery; + owner = createOperationDescriptor(queryNode, operationVariables); + const selector = createReaderSelector( + UserFragment, + '4', + variables, + owner.request, + ); + const differentOwner = { + ...selector, + isWithinUnmatchedTypeRefinement: true, }; expect(areEqualSelectors(selector, differentOwner)).toBe(false); }); + it('returns false for equivalent selectors but with different clientEdgeTraversalPath', () => { + const queryNode = UserQuery; + owner = createOperationDescriptor(queryNode, operationVariables); + /*$FlowFixMe[unclear-type]*/ + const fakeAstNode: Object = {}; + const clientEdgeTraversalInfoA = { + readerClientEdge: fakeAstNode, + clientEdgeDestinationID: 'a', + }; + const clientEdgeTraversalInfoB = { + readerClientEdge: fakeAstNode, + clientEdgeDestinationID: 'b', + }; + const clientEdgeTraversalInfoANewNode = { + /*$FlowFixMe[unclear-type]*/ + readerClientEdge: ({}: Object), + clientEdgeDestinationID: 'a', + }; + const baseSelector = createReaderSelector( + UserFragment, + '4', + variables, + owner.request, + false, + null, + ); + const selectors = [ + baseSelector, + { + ...baseSelector, + clientEdgeTraversalPath: [clientEdgeTraversalInfoA], + }, + { + ...baseSelector, + clientEdgeTraversalPath: [clientEdgeTraversalInfoB], + }, + { + ...baseSelector, + clientEdgeTraversalPath: [ + clientEdgeTraversalInfoANewNode, + clientEdgeTraversalInfoANewNode, + ], + }, + { + ...baseSelector, + clientEdgeTraversalPath: [clientEdgeTraversalInfoANewNode], + }, + { + ...baseSelector, + clientEdgeTraversalPath: [null], + }, + { + ...baseSelector, + clientEdgeTraversalPath: [null, clientEdgeTraversalInfoA], + }, + ]; + for (let i = 0; i < selectors.length - 1; i++) { + for (let j = i + 1; j < selectors.length; j++) { + expect(areEqualSelectors(selectors[i], selectors[j])).toBe(false); + } + } + }); + + it('returns true for equivalent selectors and equivalent owners', () => { + const queryNode = UserQuery; + owner = createOperationDescriptor(queryNode, operationVariables); + const selector = createReaderSelector( + UserFragment, + '4', + variables, + owner.request, + ); + // When the owner is different, areEqualSelectors should return false + // even if the 2 selectors represent the same selection + const differentOwner = { + ...selector, + owner: {...owner.request}, + }; + expect(areEqualSelectors(selector, differentOwner)).toBe(true); + }); + it('returns true for equivalent selectors with same owners', () => { const queryNode = UserQuery; owner = createOperationDescriptor(queryNode, operationVariables); @@ -744,6 +845,10 @@ describe('RelayModernSelector', () => { it('returns false for different selectors with owners', () => { const queryNode = UserQuery; owner = createOperationDescriptor(queryNode, operationVariables); + const newOwner = createOperationDescriptor(queryNode, { + ...operationVariables, + size: '16', + }); const selector = createReaderSelector( UserFragment, '4', @@ -764,7 +869,7 @@ describe('RelayModernSelector', () => { }; const differentOwner = { ...selector, - owner: {...owner.request}, + owner: newOwner.request, }; expect(areEqualSelectors(selector, differentID)).toBe(false); expect(areEqualSelectors(selector, differentNode)).toBe(false); diff --git a/packages/relay-runtime/store/__tests__/RelayModernStore-Subscriptions-test.js b/packages/relay-runtime/store/__tests__/RelayModernStore-Subscriptions-test.js index f65c95ba1e938..5181b717768b1 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernStore-Subscriptions-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernStore-Subscriptions-test.js @@ -54,6 +54,7 @@ function assertIsDeeplyFrozen(value: ?{...} | ?$ReadOnlyArray<{...}>): void { value.forEach(item => assertIsDeeplyFrozen(item)); } else if (typeof value === 'object' && value !== null) { for (const key in value) { + // $FlowFixMe[invalid-computed-prop] assertIsDeeplyFrozen(value[key]); } } @@ -63,6 +64,7 @@ function cloneEventWithSets(event: LogEvent) { const nextEvent = {}; for (const key in event) { if (event.hasOwnProperty(key)) { + // $FlowFixMe[invalid-computed-prop] const val = event[key]; if (val instanceof Set) { // $FlowFixMe[prop-missing] @@ -621,7 +623,9 @@ function cloneEventWithSets(event: LogEvent) { if (!record) { throw new Error('Expected to find record with id client:1'); } - expect(record[INVALIDATED_AT_KEY]).toEqual(1); + expect( + RelayModernRecord.getValue(record, INVALIDATED_AT_KEY), + ).toEqual(1); expect(store.check(owner)).toEqual({status: 'stale'}); }); @@ -657,7 +661,9 @@ function cloneEventWithSets(event: LogEvent) { if (!record) { throw new Error('Expected to find record with id "4"'); } - expect(record[INVALIDATED_AT_KEY]).toEqual(1); + expect( + RelayModernRecord.getValue(record, INVALIDATED_AT_KEY), + ).toEqual(1); expect(store.check(owner)).toEqual({status: 'stale'}); }); }); diff --git a/packages/relay-runtime/store/__tests__/RelayModernStore-test.js b/packages/relay-runtime/store/__tests__/RelayModernStore-test.js index 00ca84b9b6fcb..5ad0350e13106 100644 --- a/packages/relay-runtime/store/__tests__/RelayModernStore-test.js +++ b/packages/relay-runtime/store/__tests__/RelayModernStore-test.js @@ -64,6 +64,7 @@ function assertIsDeeplyFrozen(value: ?{...} | ?$ReadOnlyArray<{...}>): void { value.forEach(item => assertIsDeeplyFrozen(item)); } else if (typeof value === 'object' && value !== null) { for (const key in value) { + // $FlowFixMe[invalid-computed-prop] assertIsDeeplyFrozen(value[key]); } } @@ -73,6 +74,7 @@ function cloneEventWithSets(event: LogEvent) { const nextEvent = {}; for (const key in event) { if (event.hasOwnProperty(key)) { + // $FlowFixMe[invalid-computed-prop] const val = event[key]; if (val instanceof Set) { // $FlowFixMe[prop-missing] @@ -284,6 +286,7 @@ function cloneEventWithSets(event: LogEvent) { }, seenRecords: new Set(Object.keys(data)), missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -335,10 +338,10 @@ function cloneEventWithSets(event: LogEvent) { __id: '4', __fragments: {RelayModernStoreTest4Fragment: {}}, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, }, seenRecords: new Set(Object.keys(data)), missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -397,6 +400,7 @@ function cloneEventWithSets(event: LogEvent) { }, seenRecords: new Set(['client:2', '4']), missingRequiredFields: null, + errorResponseFields: null, missingLiveResolverFields: [], relayResolverErrors: [], missingClientEdges: null, @@ -934,7 +938,9 @@ function cloneEventWithSets(event: LogEvent) { if (!record) { throw new Error('Expected to find record with id client:1'); } - expect(record[INVALIDATED_AT_KEY]).toEqual(1); + expect( + RelayModernRecord.getValue(record, INVALIDATED_AT_KEY), + ).toEqual(1); expect(store.check(owner)).toEqual({status: 'stale'}); }); @@ -970,7 +976,9 @@ function cloneEventWithSets(event: LogEvent) { if (!record) { throw new Error('Expected to find record with id "4"'); } - expect(record[INVALIDATED_AT_KEY]).toEqual(1); + expect( + RelayModernRecord.getValue(record, INVALIDATED_AT_KEY), + ).toEqual(1); expect(store.check(owner)).toEqual({status: 'stale'}); }); }); diff --git a/packages/relay-runtime/store/__tests__/RelayOperationTracker-test.js b/packages/relay-runtime/store/__tests__/RelayOperationTracker-test.js index 7571834bfdaab..4515a77ce854a 100644 --- a/packages/relay-runtime/store/__tests__/RelayOperationTracker-test.js +++ b/packages/relay-runtime/store/__tests__/RelayOperationTracker-test.js @@ -176,6 +176,7 @@ describe('RelayOperationTracker', () => { tracker.getPendingOperationsAffectingOwner(QueryOperation1); invariant(result != null, 'Expected to find operations for owner.'); const callback = jest.fn<[void], mixed>(); + // $FlowFixMe[unused-promise] result.promise.then(callback); expect(callback).not.toBeCalled(); tracker.complete(MutationOperation1); @@ -192,6 +193,7 @@ describe('RelayOperationTracker', () => { tracker.getPendingOperationsAffectingOwner(QueryOperation1); invariant(result != null, 'Expected to find operations for owner.'); const callback = jest.fn<[void], mixed>(); + // $FlowFixMe[unused-promise] result.promise.then(callback); expect(callback).not.toBeCalled(); tracker.update(MutationOperation2, new Set([QueryOperation1])); @@ -222,6 +224,7 @@ describe('RelayOperationTracker', () => { tracker.getPendingOperationsAffectingOwner(QueryOperation1); invariant(result != null, 'Expected to find operations for owner.'); const callback = jest.fn<[void], mixed>(); + // $FlowFixMe[unused-promise] result.promise.then(callback); expect(callback).not.toBeCalled(); tracker.complete(MutationOperation1); diff --git a/packages/relay-runtime/store/__tests__/RelayPublishQueue-test.js b/packages/relay-runtime/store/__tests__/RelayPublishQueue-test.js index e41e5f4bc9658..01cea50611a5b 100644 --- a/packages/relay-runtime/store/__tests__/RelayPublishQueue-test.js +++ b/packages/relay-runtime/store/__tests__/RelayPublishQueue-test.js @@ -10,10 +10,16 @@ 'use strict'; +import type {PayloadData, PayloadError} from '../network/RelayNetworkTypes'; +import type {NormalizationOptions} from './RelayResponseNormalizer'; +import type { + NormalizationSelector, + RelayResponsePayload, +} from './RelayStoreTypes'; + const {graphql} = require('../../query/GraphQLTag'); const getRelayHandleKey = require('../../util/getRelayHandleKey'); const defaultGetDataID = require('../defaultGetDataID'); -const normalizeRelayPayload = require('../normalizeRelayPayload'); const { createOperationDescriptor, } = require('../RelayModernOperationDescriptor'); @@ -21,6 +27,7 @@ const RelayModernRecord = require('../RelayModernRecord'); const RelayModernStore = require('../RelayModernStore'); const RelayPublishQueue = require('../RelayPublishQueue'); const RelayRecordSource = require('../RelayRecordSource'); +const RelayResponseNormalizer = require('../RelayResponseNormalizer'); const { ID_KEY, REF_KEY, @@ -855,7 +862,6 @@ describe('RelayPublishQueue', () => { __id: '4', __fragments: {RelayPublishQueueTest1Fragment: {}}, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, name: 'Zuck', }, nodes: [{name: 'Zuck'}], @@ -1967,3 +1973,26 @@ describe('RelayPublishQueue', () => { }); }); }); + +function normalizeRelayPayload( + selector: NormalizationSelector, + payload: PayloadData, + errors: ?Array, + options: NormalizationOptions, +): RelayResponsePayload { + const source = RelayRecordSource.create(); + source.set( + selector.dataID, + RelayModernRecord.create(selector.dataID, ROOT_TYPE), + ); + const relayPayload = RelayResponseNormalizer.normalize( + source, + selector, + payload, + options, + ); + return { + ...relayPayload, + errors, + }; +} diff --git a/packages/relay-runtime/store/__tests__/RelayReader-AliasedFragments-test.js b/packages/relay-runtime/store/__tests__/RelayReader-AliasedFragments-test.js index ee5ba1a470299..277bf0a651b15 100644 --- a/packages/relay-runtime/store/__tests__/RelayReader-AliasedFragments-test.js +++ b/packages/relay-runtime/store/__tests__/RelayReader-AliasedFragments-test.js @@ -68,7 +68,140 @@ describe('Fragment Spreads', () => { RelayReaderAliasedFragmentsTest_user: {}, }, __fragmentOwner: operation.request, - __isWithinUnmatchedTypeRefinement: false, + }, + }, + }); + }); + + it('Reads an conditional aliased fragment as its own field (alias then skip)', () => { + const userTypeID = generateTypeID('User'); + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + }, + [userTypeID]: { + __id: userTypeID, + __typename: TYPE_SCHEMA_TYPE, + }, + }); + + graphql` + fragment RelayReaderAliasedFragmentsTestConditionalFragment on User { + name + } + `; + const FooQuery = graphql` + query RelayReaderAliasedFragmentsTestConditionalQuery( + $someCondition: Boolean! + ) { + me { + ...RelayReaderAliasedFragmentsTestConditionalFragment + @alias(as: "aliased_fragment") + @skip(if: $someCondition) + } + } + `; + const operationSkipped = createOperationDescriptor(FooQuery, { + someCondition: true, + }); + const {data: dataSkipped, isMissingData: isMissingDataSkipped} = read( + source, + operationSkipped.fragment, + ); + expect(isMissingDataSkipped).toBe(false); + expect(dataSkipped).toEqual({ + me: { + // aliased_fragment is not added here + }, + }); + + const operationNotSkipped = createOperationDescriptor(FooQuery, { + someCondition: false, + }); + const {data, isMissingData} = read(source, operationNotSkipped.fragment); + expect(isMissingData).toBe(false); + expect(data).toEqual({ + me: { + aliased_fragment: { + __id: '1', + __fragments: { + RelayReaderAliasedFragmentsTestConditionalFragment: {}, + }, + __fragmentOwner: operationNotSkipped.request, + }, + }, + }); + }); + + it('Reads an conditional aliased fragment as its own field (skip then alias)', () => { + const userTypeID = generateTypeID('User'); + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + }, + [userTypeID]: { + __id: userTypeID, + __typename: TYPE_SCHEMA_TYPE, + }, + }); + + graphql` + fragment RelayReaderAliasedFragmentsTestConditional2Fragment on User { + name + } + `; + const FooQuery = graphql` + query RelayReaderAliasedFragmentsTestConditional2Query( + $someCondition: Boolean! + ) { + me { + ...RelayReaderAliasedFragmentsTestConditionalFragment + @skip(if: $someCondition) + @alias(as: "aliased_fragment") + } + } + `; + const operationSkipped = createOperationDescriptor(FooQuery, { + someCondition: true, + }); + const {data: dataSkipped, isMissingData: isMissingDataSkipped} = read( + source, + operationSkipped.fragment, + ); + expect(isMissingDataSkipped).toBe(false); + expect(dataSkipped).toEqual({ + me: { + // aliased_fragment is not added here + }, + }); + + const operationNotSkipped = createOperationDescriptor(FooQuery, { + someCondition: false, + }); + const {data, isMissingData} = read(source, operationNotSkipped.fragment); + expect(isMissingData).toBe(false); + expect(data).toEqual({ + me: { + aliased_fragment: { + __id: '1', + __fragments: { + RelayReaderAliasedFragmentsTestConditionalFragment: {}, + }, + __fragmentOwner: operationNotSkipped.request, }, }, }); @@ -245,6 +378,47 @@ describe('Inline Fragments', () => { }); }); + it('Reads an aliased inline fragment without a type condition as its own field', () => { + const userTypeID = generateTypeID('User'); + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + name: 'Chelsea', + }, + [userTypeID]: { + __id: userTypeID, + __typename: TYPE_SCHEMA_TYPE, + }, + }); + + const FooQuery = graphql` + query RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery { + me { + ... @alias(as: "aliased_fragment") { + name @required(action: NONE) + } + } + } + `; + const operation = createOperationDescriptor(FooQuery, {}); + const {data, isMissingData} = read(source, operation.fragment); + expect(isMissingData).toBe(false); + expect(data).toEqual({ + me: { + aliased_fragment: { + name: 'Chelsea', + }, + }, + }); + }); + it('Reads null if the fragment is on a concrete type that does not match the abstract parent selection.', () => { const userTypeID = generateTypeID('User'); const source = RelayRecordSource.create({ diff --git a/packages/relay-runtime/store/__tests__/RelayReader-CatchFields-test.js b/packages/relay-runtime/store/__tests__/RelayReader-CatchFields-test.js new file mode 100644 index 0000000000000..4e46d770d6829 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/RelayReader-CatchFields-test.js @@ -0,0 +1,369 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +const RelayFeatureFlags = require('../../util/RelayFeatureFlags'); +const { + createOperationDescriptor, +} = require('../RelayModernOperationDescriptor'); +const {read} = require('../RelayReader'); +const RelayRecordSource = require('../RelayRecordSource'); +const RelayReaderCatchFieldsTest0Query = require('./__mocks__/RelayReaderCatchFieldsTest0Query.graphql.js'); +const RelayReaderCatchFieldsTest1Query = require('./__mocks__/RelayReaderCatchFieldsTest1Query.graphql.js'); +const RelayReaderCatchFieldsTest2Query = require('./__mocks__/RelayReaderCatchFieldsTest2Query.graphql.js'); + +describe('RelayReader @catch', () => { + describe('when catch is enabled', () => { + beforeAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = true; + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING_CATCH_DIRECTIVE = true; + }); + + const wasFieldErrorHandlingEnabled = + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING; + const wasCatchEnabled = + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING_CATCH_DIRECTIVE; + it('if scalar has @catch(to: NULL) - scalar value should be null, and nothing should throw or catch', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + lastName: null, + }, + }); + + // Mocking the query below with RelayReaderCatchFieldsTest0Query + // const FooQuery = graphql` + // query RelayReaderCatchFieldsTest0Query { + // me { + // lastName @catch(to: NULL) + // } + // } + // `; + const operation = createOperationDescriptor( + RelayReaderCatchFieldsTest0Query, + {id: '1'}, + ); + const {data} = read(source, operation.fragment); + expect(data).toEqual({me: {lastName: null}}); + }); + + it('if scalar has catch to RESULT - scalar value should provide the error', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + lastName: null, + __errors: { + lastName: [ + { + message: 'There was an error!', + path: ['me', 'lastName'], + }, + ], + }, + }, + }); + + // Mocking the query below with RelayReaderCatchFieldsTest0Query + // const FooQuery = graphql` + // query RelayReaderCatchFieldsTest1Query { + // me { + // lastName @catch(to: RESULT) + // } + // } + // `; + const operation = createOperationDescriptor( + RelayReaderCatchFieldsTest1Query, + {id: '1'}, + ); + const {data, errorResponseFields} = read(source, operation.fragment); + expect(data).toEqual({ + me: { + lastName: { + ok: false, + errors: [ + { + message: 'There was an error!', + path: ['me', 'lastName'], + }, + ], + }, + }, + }); + + expect(errorResponseFields).toEqual([ + { + path: 'me.lastName', + to: 'RESULT', + error: { + message: 'There was an error!', + path: ['me', 'lastName'], + }, + owner: 'RelayReaderCatchFieldsTest1Query', + }, + ]); + }); + + it('if scalar has catch to RESULT with nested required', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + lastName: null, + }, + }); + + // Mocking the query below with RelayReaderCatchFieldsTest0Query + // const FooQuery = graphql` + // query RelayReaderCatchFieldsTest1Query { + // me @catch { + // lastName @required(action: THROW) + // } + // } + // `; + const operation = createOperationDescriptor( + RelayReaderCatchFieldsTest2Query, + {id: '1'}, + ); + const {data, errorResponseFields, missingRequiredFields} = read( + source, + operation.fragment, + ); + expect(data).toEqual({ + me: null, + }); + + expect(missingRequiredFields).toBeNull(); + expect(errorResponseFields).toEqual([ + { + owner: 'RelayReaderCatchFieldsTest2Query', + path: 'me.lastName', + error: { + message: + "Relay: Missing @required value at path 'me.lastName' in 'RelayReaderCatchFieldsTest2Query'.", + }, + to: 'RESULT', + }, + ]); + }); + afterAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = + wasFieldErrorHandlingEnabled; + + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING_CATCH_DIRECTIVE = + wasCatchEnabled; + }); + }); + describe('when catch is disabled', () => { + beforeAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = true; + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING_CATCH_DIRECTIVE = false; + }); + + const wasFieldErrorHandlingEnabled = + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING; + const wasCatchEnabled = + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING_CATCH_DIRECTIVE; + it('if scalar has @catch(to: NULL) - scalar value should be null, and nothing should throw or catch', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + lastName: null, + }, + }); + + // Mocking the query below with RelayReaderCatchFieldsTest0Query + // const FooQuery = graphql` + // query RelayReaderCatchFieldsTest0Query { + // me { + // lastName @catch(to: NULL) + // } + // } + // `; + const operation = createOperationDescriptor( + RelayReaderCatchFieldsTest0Query, + {id: '1'}, + ); + const {data} = read(source, operation.fragment); + expect(data).toEqual({me: {lastName: null}}); + }); + + it('if scalar has @catch(to: RESULT) - scalar value should provide the value as a CatchField object', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + lastName: null, + __errors: { + lastName: [ + { + message: 'There was an error!', + path: ['me', 'lastName'], + }, + ], + }, + }, + }); + + // Mocking the query below with RelayReaderCatchFieldsTest0Query + // const FooQuery = graphql` + // query RelayReaderCatchFieldsTest1Query { + // me { + // lastName @catch(to: RESULT) + // } + // } + // `; + const operation = createOperationDescriptor( + RelayReaderCatchFieldsTest1Query, + {id: '1'}, + ); + const {data, errorResponseFields} = read(source, operation.fragment); + expect(data).toEqual({ + me: { + lastName: null, + }, + }); + + expect(errorResponseFields).toEqual([ + { + path: 'me.lastName', + error: { + message: 'There was an error!', + path: ['me', 'lastName'], + }, + owner: 'RelayReaderCatchFieldsTest1Query', + }, + ]); + }); + + it('if scalar has catch to RESULT with nested required', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + lastName: null, + }, + }); + + // Mocking the query below with RelayReaderCatchFieldsTest0Query + // const FooQuery = graphql` + // query RelayReaderCatchFieldsTest1Query { + // me @catch { + // lastName @required(action: THROW) + // } + // } + // `; + const operation = createOperationDescriptor( + RelayReaderCatchFieldsTest2Query, + {id: '1'}, + ); + const {data, errorResponseFields, missingRequiredFields} = read( + source, + operation.fragment, + ); + expect(data).toEqual({ + me: null, + }); + expect(missingRequiredFields).toEqual({ + action: 'THROW', + field: {owner: 'RelayReaderCatchFieldsTest2Query', path: 'me.lastName'}, + }); + expect(errorResponseFields).toBeNull(); + }); + + it('if scalar has catch to RESULT with nested required THROW - do nothing', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + lastName: null, + }, + }); + + // Mocking the query below with RelayReaderCatchFieldsTest0Query + // const FooQuery = graphql` + // query RelayReaderCatchFieldsTest1Query { + // me @catch { + // lastName @required(action: THROW) + // } + // } + // `; + const operation = createOperationDescriptor( + RelayReaderCatchFieldsTest2Query, + {id: '1'}, + ); + const {data, errorResponseFields, missingRequiredFields} = read( + source, + operation.fragment, + ); + expect(data).toEqual({ + me: null, + }); + + expect(missingRequiredFields).toEqual({ + action: 'THROW', + field: {owner: 'RelayReaderCatchFieldsTest2Query', path: 'me.lastName'}, + }); + expect(errorResponseFields).toBeNull(); + }); + afterAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = + wasFieldErrorHandlingEnabled; + + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING_CATCH_DIRECTIVE = + wasCatchEnabled; + }); + }); +}); +// RelayReaderCatchFieldsTest3Query diff --git a/packages/relay-runtime/store/__tests__/RelayReader-ClientEdges-test.js b/packages/relay-runtime/store/__tests__/RelayReader-ClientEdges-test.js index 35ed53c6c0d26..4add14b26d5cd 100644 --- a/packages/relay-runtime/store/__tests__/RelayReader-ClientEdges-test.js +++ b/packages/relay-runtime/store/__tests__/RelayReader-ClientEdges-test.js @@ -33,12 +33,10 @@ disallowWarnings(); beforeEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = true; }); afterEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = false; }); const BASIC_QUERY = graphql` @@ -156,7 +154,7 @@ describe('RelayReader Client Edges behavior', () => { resolverCache, ); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (data: any); + const {me}: any = data; expect(me?.client_edge?.name).toEqual('Bob'); expect(Array.from(seenRecords).sort()).toEqual([ '1', @@ -190,7 +188,7 @@ describe('RelayReader Client Edges behavior', () => { resolverCache, ); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (data: any); + const {me}: any = data; expect(me?.client_edge?.name).toEqual(undefined); expect(Array.from(seenRecords).sort()).toEqual([ '1', @@ -288,7 +286,7 @@ describe('RelayReader Client Edges behavior', () => { resolverCache, ); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (data: any); + const {me}: any = data; expect(me?.the_alias?.name).toEqual('Bob'); expect(me?.client_edge).toBeUndefined(); expect(Array.from(seenRecords).sort()).toEqual([ @@ -323,7 +321,7 @@ describe('RelayReader Client Edges behavior', () => { resolverCache, ); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (data: any); + const {me}: any = data; expect(me?.null_client_edge).toBe(null); expect(Array.from(seenRecords).sort()).toEqual([ '1', @@ -363,7 +361,7 @@ describe('RelayReader Client Edges behavior', () => { resolverCache, ); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (data: any); + const {me}: any = data; expect(me?.client_edge?.author).toEqual(undefined); expect(Array.from(seenRecords).sort()).toEqual([ '1', @@ -411,7 +409,7 @@ describe('RelayReader Client Edges behavior', () => { resolverCache, ); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (data: any); + const {me}: any = data; expect(me?.client_edge?.name).toEqual(undefined); expect(Array.from(seenRecords).sort()).toEqual(['1337']); expect(missingClientEdges?.length).toEqual(1); @@ -446,7 +444,7 @@ describe('RelayReader Client Edges behavior', () => { resolverCache, ); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (data: any); + const {me}: any = data; expect(me?.client_edge?.name).toEqual(undefined); expect(Array.from(seenRecords).sort()).toEqual([ '1', @@ -498,7 +496,7 @@ describe('RelayReader Client Edges behavior', () => { resolverCache, ); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (data: any); + const {me}: any = data; expect(me?.client_edge?.author).toEqual(undefined); expect(Array.from(seenRecords).sort()).toEqual([ '1', diff --git a/packages/relay-runtime/store/__tests__/RelayReader-RelayErrorHandling-test.js b/packages/relay-runtime/store/__tests__/RelayReader-RelayErrorHandling-test.js new file mode 100644 index 0000000000000..314d5dbdd7a63 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/RelayReader-RelayErrorHandling-test.js @@ -0,0 +1,130 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + * @oncall relay + */ + +'use strict'; + +const {graphql} = require('../../query/GraphQLTag'); +const RelayFeatureFlags = require('../../util/RelayFeatureFlags'); +const { + createOperationDescriptor, +} = require('../RelayModernOperationDescriptor'); +const {read} = require('../RelayReader'); +const RelayRecordSource = require('../RelayRecordSource'); + +describe('RelayReader error fields', () => { + describe('when field error handling is enabled', () => { + beforeAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = true; + }); + + const wasFieldErrorHandlingEnabled = + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING; + + it('adds the errors to errorResponseFields', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + lastName: null, + __errors: { + lastName: [ + { + message: 'There was an error!', + path: ['me', 'lastName'], + }, + ], + }, + }, + }); + + const FooQuery = graphql` + query RelayReaderRelayErrorHandlingTest1Query { + me { + lastName + } + } + `; + const operation = createOperationDescriptor(FooQuery, {id: '1'}); + const {data, errorResponseFields} = read(source, operation.fragment); + expect(data).toEqual({me: {lastName: null}}); + expect(errorResponseFields).toEqual([ + { + owner: 'RelayReaderRelayErrorHandlingTest1Query', + path: 'me.lastName', + error: { + message: 'There was an error!', + path: ['me', 'lastName'], + }, + }, + ]); + }); + + afterAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = + wasFieldErrorHandlingEnabled; + }); + }); + + describe('when field error handling is disabled', () => { + beforeAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = false; + }); + + const wasFieldErrorHandlingEnabled = + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING; + + it('errorResponseFields is null', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + lastName: null, + __errors: { + lastName: [ + { + message: 'There was an error!', + path: ['me', 'lastName'], + }, + ], + }, + }, + }); + + const FooQuery = graphql` + query RelayReaderRelayErrorHandlingTest2Query { + me { + lastName + } + } + `; + const operation = createOperationDescriptor(FooQuery, {id: '1'}); + const {data, errorResponseFields} = read(source, operation.fragment); + expect(data).toEqual({me: {lastName: null}}); + expect(errorResponseFields).toEqual(null); + }); + + afterAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = + wasFieldErrorHandlingEnabled; + }); + }); +}); diff --git a/packages/relay-runtime/store/__tests__/RelayReader-RequiredFields-test.js b/packages/relay-runtime/store/__tests__/RelayReader-RequiredFields-test.js index 10f30e9ca683e..c80a50eb90302 100644 --- a/packages/relay-runtime/store/__tests__/RelayReader-RequiredFields-test.js +++ b/packages/relay-runtime/store/__tests__/RelayReader-RequiredFields-test.js @@ -16,7 +16,15 @@ const { } = require('../RelayModernOperationDescriptor'); const {read} = require('../RelayReader'); const RelayRecordSource = require('../RelayRecordSource'); -const {createReaderSelector, getPluralSelector} = require('relay-runtime'); +const { + RelayFeatureFlags, + createReaderSelector, + getPluralSelector, +} = require('relay-runtime'); +const { + LiveResolverCache, +} = require('relay-runtime/store/experimental-live-resolvers/LiveResolverCache'); +const LiveResolverStore = require('relay-runtime/store/experimental-live-resolvers/LiveResolverStore'); describe('RelayReader @required', () => { it('bubbles @required(action: LOG) scalars up to LinkedField', () => { @@ -47,6 +55,76 @@ describe('RelayReader @required', () => { expect(data).toEqual({me: null}); }); + it('bubbles @required(action: LOG) up to aliased inline fragment without type condition', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + backgroundImage: {__ref: 'client:2'}, + }, + 'client:2': { + __id: 'client:2', + __typename: 'Image', + uri: null, + }, + }); + const FooQuery = graphql` + query RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery { + me { + ... @alias(as: "requiredFields") { + backgroundImage @required(action: LOG) { + uri @required(action: LOG) + } + } + } + } + `; + const operation = createOperationDescriptor(FooQuery, {id: '1'}); + const {data} = read(source, operation.fragment); + expect(data).toEqual({me: {requiredFields: null}}); + }); + + it('bubbles @required(action: LOG) up to aliased inline fragment _with_ type condition', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + backgroundImage: {__ref: 'client:2'}, + }, + 'client:2': { + __id: 'client:2', + __typename: 'Image', + uri: null, + }, + }); + const FooQuery = graphql` + query RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery { + me { + ... on User @alias(as: "requiredFields") { + backgroundImage @required(action: LOG) { + uri @required(action: LOG) + } + } + } + } + `; + const operation = createOperationDescriptor(FooQuery, {id: '1'}); + const {data} = read(source, operation.fragment); + expect(data).toEqual({me: {requiredFields: null}}); + }); + it('if two @required(action: THROW) errors cascade, report the more deeply nested one', () => { const source = RelayRecordSource.create({ 'client:root': { @@ -791,4 +869,153 @@ describe('RelayReader @required', () => { const data = pluralSelector.selectors.map(s => read(source, s).data); expect(data).toEqual([{username: 'Wendy'}, null]); }); + + describe('client edge with @required', () => { + beforeEach(() => { + RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; + }); + afterEach(() => { + RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; + }); + test('throws when missing required field', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + }, + }); + const FooQuery = graphql` + query RelayReaderRequiredFieldsTest25Query { + me { + client_object(return_null: true) @required(action: THROW) { + description + } + } + } + `; + const store = new LiveResolverStore(source); + const operation = createOperationDescriptor(FooQuery, {}); + const resolverCache = new LiveResolverCache(() => source, store); + const {missingRequiredFields} = read( + source, + operation.fragment, + resolverCache, + ); + expect(missingRequiredFields).toEqual({ + action: 'THROW', + field: { + owner: 'RelayReaderRequiredFieldsTest25Query', + path: 'me.client_object', + }, + }); + }); + + test('does not throw when required field is present', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + birthdate: {__ref: 'client:2'}, + }, + 'client:2': { + month: 3, + day: 11, + }, + }); + const FooQuery = graphql` + query RelayReaderRequiredFieldsTest26Query { + me { + astrological_sign @required(action: THROW) { + name + } + } + } + `; + + const store = new LiveResolverStore(source); + const operation = createOperationDescriptor(FooQuery, {}); + const resolverCache = new LiveResolverCache(() => source, store); + const {data, missingRequiredFields} = read( + source, + operation.fragment, + resolverCache, + ); + expect(data).toEqual({me: {astrological_sign: {name: 'Pisces'}}}); + expect(missingRequiredFields).toBe(null); + }); + + test('does not throw when required plural field is present', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + }, + }); + const FooQuery = graphql` + query RelayReaderRequiredFieldsTest27Query { + all_astrological_signs @required(action: THROW) { + name + } + } + `; + + const store = new LiveResolverStore(source); + const operation = createOperationDescriptor(FooQuery, {}); + const resolverCache = new LiveResolverCache(() => source, store); + const {data, missingRequiredFields} = read( + source, + operation.fragment, + resolverCache, + ); + expect(data.all_astrological_signs.length).toBe(12); + expect(missingRequiredFields).toBe(null); + }); + + test('does not throw when @live required field is suspended', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + }, + }); + const FooQuery = graphql` + query RelayReaderRequiredFieldsTest28Query { + live_user_resolver_always_suspend + @waterfall + @required(action: THROW) { + name + } + } + `; + const store = new LiveResolverStore(source); + const operation = createOperationDescriptor(FooQuery, {}); + const resolverCache = new LiveResolverCache(() => source, store); + const snapshot = read(source, operation.fragment, resolverCache); + expect(snapshot.missingRequiredFields).toEqual(null); + expect(snapshot.missingLiveResolverFields).toEqual([ + { + path: 'RelayReaderRequiredFieldsTest28Query.live_user_resolver_always_suspend', + liveStateID: 'client:root:live_user_resolver_always_suspend', + }, + ]); + }); + }); }); diff --git a/packages/relay-runtime/store/__tests__/RelayReader-Resolver-test.js b/packages/relay-runtime/store/__tests__/RelayReader-Resolver-test.js index a9bd4ab3d696f..63d93aa6a17d2 100644 --- a/packages/relay-runtime/store/__tests__/RelayReader-Resolver-test.js +++ b/packages/relay-runtime/store/__tests__/RelayReader-Resolver-test.js @@ -15,17 +15,26 @@ import type {Snapshot} from '../RelayStoreTypes'; const { constant_dependent: UserConstantDependentResolver, } = require('./resolvers/UserConstantDependentResolver'); +const invariant = require('invariant'); const nullthrows = require('nullthrows'); const {RelayFeatureFlags} = require('relay-runtime'); const RelayNetwork = require('relay-runtime/network/RelayNetwork'); const {graphql} = require('relay-runtime/query/GraphQLTag'); +const { + LiveResolverCache, +} = require('relay-runtime/store/experimental-live-resolvers/LiveResolverCache'); +const LiveResolverStore = require('relay-runtime/store/experimental-live-resolvers/LiveResolverStore'); const RelayModernEnvironment = require('relay-runtime/store/RelayModernEnvironment'); const { createOperationDescriptor, } = require('relay-runtime/store/RelayModernOperationDescriptor'); +const RelayModernRecord = require('relay-runtime/store/RelayModernRecord'); const RelayModernStore = require('relay-runtime/store/RelayModernStore'); const {read} = require('relay-runtime/store/RelayReader'); const RelayRecordSource = require('relay-runtime/store/RelayRecordSource'); +const { + RELAY_RESOLVER_INVALIDATION_KEY, +} = require('relay-runtime/store/RelayStoreUtils'); const {RecordResolverCache} = require('relay-runtime/store/ResolverCache'); const { disallowConsoleErrors, @@ -41,9 +50,22 @@ beforeEach(() => { afterEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; + // The call count of the resolver used in this test + UserConstantDependentResolver._relayResolverTestCallCount = undefined; }); -describe('Relay Resolver', () => { +describe.each([ + { + name: 'RecordResolverCache', + ResolverCache: RecordResolverCache, + RelayStore: RelayModernStore, + }, + { + name: 'LiveResolverCache', + ResolverCache: LiveResolverCache, + RelayStore: LiveResolverStore, + }, +])('Relay Resolver with $name', ({ResolverCache, RelayStore}) => { it('returns the result of the resolver function', () => { const source = RelayRecordSource.create({ 'client:root': { @@ -58,7 +80,7 @@ describe('Relay Resolver', () => { name: 'Alice', }, }); - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); const FooQuery = graphql` query RelayReaderResolverTest1Query { @@ -73,7 +95,7 @@ describe('Relay Resolver', () => { const {data, seenRecords} = read(source, operation.fragment, resolverCache); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (data: any); + const {me}: any = data; expect(me.greeting).toEqual('Hello, Alice!'); // Resolver result expect(me.name).toEqual(undefined); // Fields needed by resolver's fragment don't end up in the result @@ -98,7 +120,7 @@ describe('Relay Resolver', () => { name: 'Alice', }, }); - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); const FooQuery = graphql` query RelayReaderResolverTestCustomGreetingDynamicQuery( @@ -119,7 +141,7 @@ describe('Relay Resolver', () => { const {data} = read(source, operation.fragment, resolverCache); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (data: any); + const {me}: any = data; expect(me.dynamic_greeting).toEqual('Dynamic Greeting, Alice!'); expect(me.greetz).toEqual('Greetz, Alice!'); expect(me.willkommen).toEqual('Willkommen, Alice!'); @@ -135,12 +157,96 @@ describe('Relay Resolver', () => { ); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {me: meWithNewVariables} = (dataWithNewVariables: any); + const {me: meWithNewVariables}: any = dataWithNewVariables; expect(meWithNewVariables.dynamic_greeting).toEqual( 'New Dynamic Greeting, Alice!', ); }); + describe('Relay resolver - Field Error Handling', () => { + it('propagates errors from the resolver up to the reader', () => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = true; + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + lastName: null, + __errors: { + lastName: [ + { + message: 'There was an error!', + path: ['me', 'lastName'], + }, + ], + }, + }, + }); + + const FooQuery = graphql` + query RelayReaderResolverTestFieldErrorQuery { + me { + lastName + } + } + `; + + const operation = createOperationDescriptor(FooQuery, {}); + const store = new RelayStore(source, {gcReleaseBufferSize: 0}); + const {errorResponseFields} = store.lookup(operation.fragment); + expect(errorResponseFields).toEqual([ + { + error: {message: 'There was an error!', path: ['me', 'lastName']}, + owner: 'RelayReaderResolverTestFieldErrorQuery', + path: 'me.lastName', + }, + ]); + }); + + it("doesn't propagate errors from the resolver up to the reader when flag is disabled", () => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = false; + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + lastName: null, + __errors: { + lastName: [ + { + message: 'There was an error!', + path: ['me', 'lastName'], + }, + ], + }, + }, + }); + + const FooQuery = graphql` + query RelayReaderResolverTestFieldError1Query { + me { + lastName + } + } + `; + + const operation = createOperationDescriptor(FooQuery, {}); + const store = new RelayStore(source, {gcReleaseBufferSize: 0}); + const {errorResponseFields} = store.lookup(operation.fragment); + expect(errorResponseFields).toEqual(null); + }); + }); + it('propagates @required errors from the resolver up to the reader', () => { const source = RelayRecordSource.create({ 'client:root': { @@ -164,7 +270,7 @@ describe('Relay Resolver', () => { `; const operation = createOperationDescriptor(FooQuery, {}); - const store = new RelayModernStore(source, {gcReleaseBufferSize: 0}); + const store = new RelayStore(source, {gcReleaseBufferSize: 0}); const {missingRequiredFields} = store.lookup(operation.fragment); expect(missingRequiredFields).toEqual({ action: 'LOG', @@ -206,7 +312,7 @@ describe('Relay Resolver', () => { `; const operation = createOperationDescriptor(FooQuery, {}); - const store = new RelayModernStore(source, {gcReleaseBufferSize: 0}); + const store = new RelayStore(source, {gcReleaseBufferSize: 0}); const {isMissingData} = store.lookup(operation.fragment); expect(isMissingData).toBe(true); @@ -243,7 +349,7 @@ describe('Relay Resolver', () => { `; const operation = createOperationDescriptor(FooQuery, {}); - const store = new RelayModernStore(source, {gcReleaseBufferSize: 0}); + const store = new RelayStore(source, {gcReleaseBufferSize: 0}); const {missingRequiredFields} = store.lookup(operation.fragment); expect(missingRequiredFields).toEqual({ action: 'LOG', @@ -288,7 +394,7 @@ describe('Relay Resolver', () => { name: 'Alice', }, }); - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); const FooQuery = graphql` query RelayReaderResolverTest11Query { @@ -303,7 +409,7 @@ describe('Relay Resolver', () => { const {data} = read(source, operation.fragment, resolverCache); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (data: any); + const {me}: any = data; expect(me.the_alias).toEqual('Hello, Alice!'); // Resolver result expect(me.greeting).toEqual(undefined); // Unaliased name }); @@ -323,7 +429,7 @@ describe('Relay Resolver', () => { }, }); - const store = new RelayModernStore(source, {gcReleaseBufferSize: 0}); + const store = new RelayStore(source, {gcReleaseBufferSize: 0}); const environment = new RelayModernEnvironment({ network: RelayNetwork.create(jest.fn()), store, @@ -342,7 +448,7 @@ describe('Relay Resolver', () => { const snapshot = store.lookup(operation.fragment); const subscription = store.subscribe(snapshot, cb); // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (snapshot.data: any); + const {me}: any = snapshot.data; expect(me.greeting).toEqual('Hello, Alice!'); environment.commitUpdate(theStore => { const alice = nullthrows(theStore.get('1')); @@ -376,7 +482,7 @@ describe('Relay Resolver', () => { }, }); - const store = new RelayModernStore(source, {gcReleaseBufferSize: 0}); + const store = new RelayStore(source, {gcReleaseBufferSize: 0}); const environment = new RelayModernEnvironment({ network: RelayNetwork.create(jest.fn()), store, @@ -400,7 +506,7 @@ describe('Relay Resolver', () => { const snapshot = store.lookup(operation.fragment); const subscription = store.subscribe(snapshot, cb); // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (snapshot.data: any); + const {me}: any = snapshot.data; expect(me.constant_dependent).toEqual(1); expect(resolverInternals._relayResolverTestCallCount).toBe(1); environment.commitUpdate(theStore => { @@ -411,11 +517,82 @@ describe('Relay Resolver', () => { subscription.dispose(); const newSnapshot = store.lookup(operation.fragment); // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - const {me: newMe} = (newSnapshot.data: any); + const {me: newMe}: any = newSnapshot.data; expect(newMe.constant_dependent).toEqual(1); expect(resolverInternals._relayResolverTestCallCount).toBe(1); }); + it.each([true, false])( + 'marks the resolver cache as clean if the upstream has not changed with RelayFeatureFlags.MARK_RESOLVER_VALUES_AS_CLEAN_AFTER_FRAGMENT_REREAD=%s', + markClean => { + RelayFeatureFlags.MARK_RESOLVER_VALUES_AS_CLEAN_AFTER_FRAGMENT_REREAD = + markClean; + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + me: {__ref: '1'}, + }, + '1': { + __id: '1', + id: '1', + __typename: 'User', + name: 'Alice', + }, + }); + + const store = new RelayStore(source, {gcReleaseBufferSize: 0}); + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); + + const FooQuery = graphql` + query RelayReaderResolverTestMarkCleanQuery { + me { + constant_dependent + } + } + `; + + const cb = jest.fn<[Snapshot], void>(); + const operation = createOperationDescriptor(FooQuery, {}); + const snapshot = store.lookup(operation.fragment); + const subscription = store.subscribe(snapshot, cb); + // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: + const {me}: any = snapshot.data; + expect(me.constant_dependent).toEqual(1); + environment.commitUpdate(theStore => { + const alice = nullthrows(theStore.get('1')); + alice.setValue('Alicia', 'name'); + }); + subscription.dispose(); + + // Rereading the resolver's fragment, only to find that no fields that we read have changed, + // should clear the RELAY_RESOLVER_INVALIDATION_KEY. + const resolverCacheRecord = environment + .getStore() + .getSource() + .get('client:1:constant_dependent'); + invariant( + resolverCacheRecord != null, + 'Expected a resolver cache record', + ); + + const isMaybeInvalid = RelayModernRecord.getValue( + resolverCacheRecord, + RELAY_RESOLVER_INVALIDATION_KEY, + ); + + if (markClean) { + expect(isMaybeInvalid).toBe(false); + } else { + // Without the feature flag enabled, T185969900 still reproduces. + expect(isMaybeInvalid).toBe(true); + } + }, + ); + it('handles optimistic updates (applied after subscribing)', () => { const source = RelayRecordSource.create({ 'client:root': { @@ -431,7 +608,7 @@ describe('Relay Resolver', () => { }, }); - const store = new RelayModernStore(source, {gcReleaseBufferSize: 0}); + const store = new RelayStore(source, {gcReleaseBufferSize: 0}); const environment = new RelayModernEnvironment({ network: RelayNetwork.create(jest.fn()), store, @@ -450,7 +627,7 @@ describe('Relay Resolver', () => { const snapshot = store.lookup(operation.fragment); const subscription = store.subscribe(snapshot, cb); // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (snapshot.data: any); + const {me}: any = snapshot.data; expect(me.greeting).toEqual('Hello, Alice!'); const checkUpdate = ( @@ -469,7 +646,7 @@ describe('Relay Resolver', () => { ); const newSnapshot = store.lookup(operation.fragment); // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - const {me: newMe} = (newSnapshot.data: any); + const {me: newMe}: any = newSnapshot.data; expect(newMe.greeting).toEqual(expectedGreeting); }; @@ -500,7 +677,7 @@ describe('Relay Resolver', () => { }, }); - const store = new RelayModernStore(source, {gcReleaseBufferSize: 0}); + const store = new RelayStore(source, {gcReleaseBufferSize: 0}); const environment = new RelayModernEnvironment({ network: RelayNetwork.create(jest.fn()), store, @@ -526,7 +703,7 @@ describe('Relay Resolver', () => { const snapshot = store.lookup(operation.fragment); const subscription = store.subscribe(snapshot, cb); // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (snapshot.data: any); + const {me}: any = snapshot.data; expect(me.greeting).toEqual('Hello, Alicia!'); const checkUpdate = ( @@ -545,7 +722,7 @@ describe('Relay Resolver', () => { ); const newSnapshot = store.lookup(operation.fragment); // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - const {me: newMe} = (newSnapshot.data: any); + const {me: newMe}: any = newSnapshot.data; expect(newMe.greeting).toEqual(expectedGreeting); }; @@ -589,7 +766,7 @@ describe('Relay Resolver', () => { }, }); - const store = new RelayModernStore(source, {gcReleaseBufferSize: 0}); + const store = new RelayStore(source, {gcReleaseBufferSize: 0}); const environment = new RelayModernEnvironment({ network: RelayNetwork.create(jest.fn()), store, @@ -608,7 +785,7 @@ describe('Relay Resolver', () => { const snapshot = store.lookup(operation.fragment); const subscription = store.subscribe(snapshot, cb); // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (snapshot.data: any); + const {me}: any = snapshot.data; expect(me.best_friend_greeting).toEqual('Hello, Bob!'); environment.commitUpdate(theStore => { const bob = nullthrows(theStore.get('2')); @@ -626,7 +803,7 @@ describe('Relay Resolver', () => { ); const newSnapshot = store.lookup(operation.fragment); // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - const {me: newMe} = (newSnapshot.data: any); + const {me: newMe}: any = newSnapshot.data; expect(newMe.best_friend_greeting).toEqual('Hello, Bilbo!'); subscription.dispose(); }); @@ -646,7 +823,7 @@ describe('Relay Resolver', () => { }, }); - const store = new RelayModernStore(source, {gcReleaseBufferSize: 0}); + const store = new RelayStore(source, {gcReleaseBufferSize: 0}); const environment = new RelayModernEnvironment({ network: RelayNetwork.create(jest.fn()), store, @@ -665,7 +842,7 @@ describe('Relay Resolver', () => { const snapshot = store.lookup(operation.fragment); const subscription = store.subscribe(snapshot, cb); // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (snapshot.data: any); + const {me}: any = snapshot.data; expect(me.shouted_greeting).toEqual('HELLO, ALICE!'); environment.commitUpdate(theStore => { const alice = nullthrows(theStore.get('1')); @@ -719,7 +896,7 @@ describe('Relay Resolver', () => { }, }); - const store = new RelayModernStore(source, {gcReleaseBufferSize: 0}); + const store = new RelayStore(source, {gcReleaseBufferSize: 0}); const environment = new RelayModernEnvironment({ network: RelayNetwork.create(jest.fn()), store, @@ -738,7 +915,7 @@ describe('Relay Resolver', () => { const snapshot = store.lookup(operation.fragment); const subscription = store.subscribe(snapshot, cb); // $FlowFixMe[unclear-type] - lookup() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (snapshot.data: any); + const {me}: any = snapshot.data; expect(me.best_friend_shouted_greeting).toEqual('HELLO, BOB!'); environment.commitUpdate(updateStore => { const bob = nullthrows(updateStore.get('2')); @@ -814,14 +991,14 @@ describe('Relay Resolver', () => { } `; - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); const operation = createOperationDescriptor(FooQuery, {id: '1'}); const {data} = read(source, operation.fragment, resolverCache); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {me} = (data: any); + const {me}: any = data; expect(me).toBe(null); // Resolver result }); @@ -847,7 +1024,7 @@ describe('Relay Resolver', () => { } `; - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); const operation = createOperationDescriptor(FooQuery, {id: '1'}); @@ -913,7 +1090,7 @@ describe('Relay Resolver', () => { } `; - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); const operation = createOperationDescriptor(FooQuery, {id: '1'}); @@ -972,7 +1149,7 @@ describe('Relay Resolver', () => { } `; - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); const operation = createOperationDescriptor(FooQuery, {}); @@ -1007,7 +1184,7 @@ describe('Relay Resolver', () => { __id: '1', }, }); - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); const FooQuery = graphql` query RelayReaderResolverTest15Query { @@ -1026,7 +1203,7 @@ describe('Relay Resolver', () => { expect(isMissingData).toBe(false); // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - const {undefined_field} = (data: any); + const {undefined_field}: any = data; expect(undefined_field).toBe(undefined); // Resolver result }); @@ -1049,7 +1226,7 @@ describe('Relay Resolver', () => { uri: 'http://my-url-1.5', }, }); - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); const FooQuery = graphql` query RelayReaderResolverTest16Query($scale: Float!) { @@ -1071,9 +1248,8 @@ describe('Relay Resolver', () => { expect(isMissingData).toBe(false); const { - me: {user_profile_picture_uri_with_scale}, - // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - } = (data: any); + me: {user_profile_picture_uri_with_scale}, // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: + }: any = data; expect(user_profile_picture_uri_with_scale).toBe('http://my-url-1.5'); // Resolver result }); @@ -1096,7 +1272,7 @@ describe('Relay Resolver', () => { uri: 'http://my-url-1.5', }, }); - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); const FooQuery = graphql` query RelayReaderResolverTest17Query { @@ -1116,9 +1292,8 @@ describe('Relay Resolver', () => { expect(isMissingData).toBe(false); const { - me: {user_profile_picture_uri_with_scale_and_default_value}, - // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - } = (data: any); + me: {user_profile_picture_uri_with_scale_and_default_value}, // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: + }: any = data; expect(user_profile_picture_uri_with_scale_and_default_value).toBe( 'http://my-url-1.5', ); // Resolver result @@ -1143,7 +1318,7 @@ describe('Relay Resolver', () => { uri: 'http://my-url-2', }, }); - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); const FooQuery = graphql` query RelayReaderResolverTest18Query { @@ -1166,9 +1341,8 @@ describe('Relay Resolver', () => { expect(isMissingData).toBe(false); const { - me: {profile_picture2}, - // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - } = (data: any); + me: {profile_picture2}, // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: + }: any = data; expect(profile_picture2).toBe('http://my-url-2'); // Resolver result }); @@ -1197,7 +1371,7 @@ describe('Relay Resolver', () => { uri: 'http://my-url-1.5', }, }); - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); const FooQuery = graphql` query RelayReaderResolverTest19Query($scale: Float) { @@ -1225,9 +1399,8 @@ describe('Relay Resolver', () => { expect(isMissingData).toBe(false); const { - me: {profile_picture2, big_profile_picture}, - // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: - } = (data: any); + me: {profile_picture2, big_profile_picture}, // $FlowFixMe[unclear-type] - read() doesn't have the nice types of reading a fragment through the actual APIs: + }: any = data; expect(profile_picture2).toBe('http://my-url-2'); // Resolver result expect(big_profile_picture).toEqual({ uri: 'http://my-url-1.5', @@ -1253,7 +1426,9 @@ describe('Relay Resolver', () => { __id: '1', id: '1', __typename: 'User', - 'profile_picture(scale:1.5)': {__ref: '1:profile_picture(scale:1.5)'}, + 'profile_picture(scale:1.5)': { + __ref: '1:profile_picture(scale:1.5)', + }, 'profile_picture(scale:2)': {__ref: '1:profile_picture(scale:2)'}, }, '1:profile_picture(scale:1.5)': { @@ -1267,7 +1442,7 @@ describe('Relay Resolver', () => { uri: 'http://my-url-2', }, }); - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); let operation = createOperationDescriptor(Query, {scale: 1.5}); let readResult = read(source, operation.fragment, resolverCache); @@ -1306,7 +1481,9 @@ describe('Relay Resolver', () => { __id: '1', id: '1', __typename: 'User', - 'profile_picture(scale:1.5)': {__ref: '1:profile_picture(scale:1.5)'}, + 'profile_picture(scale:1.5)': { + __ref: '1:profile_picture(scale:1.5)', + }, 'profile_picture(scale:2)': {__ref: '1:profile_picture(scale:2)'}, }, '1:profile_picture(scale:1.5)': { @@ -1320,7 +1497,7 @@ describe('Relay Resolver', () => { // uri: 'http://my-url-2', this field now is missing }, }); - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); let operation = createOperationDescriptor(Query, {scale: 1.5}); let readResult = read(source, operation.fragment, resolverCache); @@ -1362,7 +1539,9 @@ describe('Relay Resolver', () => { __id: '1', id: '1', __typename: 'User', - 'profile_picture(scale:1.5)': {__ref: '1:profile_picture(scale:1.5)'}, + 'profile_picture(scale:1.5)': { + __ref: '1:profile_picture(scale:1.5)', + }, 'profile_picture(scale:2)': {__ref: '1:profile_picture(scale:2)'}, }, '1:profile_picture(scale:1.5)': { @@ -1377,7 +1556,7 @@ describe('Relay Resolver', () => { }, }); - const resolverCache = new RecordResolverCache(() => source); + const resolverCache = new ResolverCache(() => source); let operation = createOperationDescriptor(Query, { scale: 1.5, @@ -1410,7 +1589,10 @@ describe('Relay Resolver', () => { }); // Changing both arguments - operation = createOperationDescriptor(Query, {scale: 1.5, name: 'Clair'}); + operation = createOperationDescriptor(Query, { + scale: 1.5, + name: 'Clair', + }); readResult = read(source, operation.fragment, resolverCache); expect(readResult.isMissingData).toBe(false); expect(readResult.data).toEqual({ diff --git a/packages/relay-runtime/store/__tests__/RelayReader-test.js b/packages/relay-runtime/store/__tests__/RelayReader-test.js index 0741555302792..d42ddff5702fe 100644 --- a/packages/relay-runtime/store/__tests__/RelayReader-test.js +++ b/packages/relay-runtime/store/__tests__/RelayReader-test.js @@ -11,7 +11,6 @@ 'use strict'; const {getRequest, graphql} = require('../../query/GraphQLTag'); -const RelayFeatureFlags = require('../../util/RelayFeatureFlags'); const { createOperationDescriptor, } = require('../RelayModernOperationDescriptor'); @@ -269,7 +268,6 @@ describe('RelayReader', () => { {}, }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, }); expect(data.__fragmentOwner).toBe(owner.request); expect(Array.from(seenRecords.values()).sort()).toEqual(['1']); @@ -316,7 +314,6 @@ describe('RelayReader', () => { }, }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, }); expect(Array.from(seenRecords.values()).sort()).toEqual(['1']); }); @@ -360,7 +357,6 @@ describe('RelayReader', () => { }, }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, }); expect(Array.from(seenRecords.values()).sort()).toEqual(['1']); }); @@ -631,22 +627,19 @@ describe('RelayReader', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'PlainUserNameRenderer', - __module_component_RelayReaderTestWhenMatchDirectiveIsPresentBarFragment: - 'PlainUserNameRenderer.react', - __module_operation_RelayReaderTestWhenMatchDirectiveIsPresentBarFragment: - 'RelayReaderTestWhenMatchDirectiveIsPresentPlainUserNameRenderer_name$normalization.graphql', - plaintext: 'plain name', + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'PlainUserNameRenderer', + __module_component_RelayReaderTestWhenMatchDirectiveIsPresentBarFragment: + 'PlainUserNameRenderer.react', + __module_operation_RelayReaderTestWhenMatchDirectiveIsPresentBarFragment: + 'RelayReaderTestWhenMatchDirectiveIsPresentPlainUserNameRenderer_name$normalization.graphql', + plaintext: 'plain name', + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -662,20 +655,19 @@ describe('RelayReader', () => { expect(data).toEqual({ id: '1', nameRenderer: { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + __id: 'client:1:nameRenderer(supported:"34hjiS")', __fragments: { RelayReaderTestWhenMatchDirectiveIsPresentPlainUserNameRenderer_name: {}, }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __fragmentPropName: 'name', __module_component: 'PlainUserNameRenderer.react', }, }); expect(Array.from(seenRecords.values()).sort()).toEqual([ '1', - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + 'client:1:nameRenderer(supported:"34hjiS")', ]); expect(isMissingData).toBe(false); }); @@ -687,22 +679,19 @@ describe('RelayReader', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'MarkdownUserNameRenderer', - __module_component_RelayReaderTestWhenMatchDirectiveIsPresentBarFragment: - 'MarkdownUserNameRenderer.react', - __module_operation_RelayReaderTestWhenMatchDirectiveIsPresentBarFragment: - 'RelayReaderTestWhenMatchDirectiveIsPresentMarkdownUserNameRenderer_name$normalization.graphql', - markdown: 'markdown payload', + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'MarkdownUserNameRenderer', + __module_component_RelayReaderTestWhenMatchDirectiveIsPresentBarFragment: + 'MarkdownUserNameRenderer.react', + __module_operation_RelayReaderTestWhenMatchDirectiveIsPresentBarFragment: + 'RelayReaderTestWhenMatchDirectiveIsPresentMarkdownUserNameRenderer_name$normalization.graphql', + markdown: 'markdown payload', + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -718,20 +707,19 @@ describe('RelayReader', () => { expect(data).toEqual({ id: '1', nameRenderer: { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + __id: 'client:1:nameRenderer(supported:"34hjiS")', __fragments: { RelayReaderTestWhenMatchDirectiveIsPresentMarkdownUserNameRenderer_name: {}, }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __fragmentPropName: 'name', __module_component: 'MarkdownUserNameRenderer.react', }, }); expect(Array.from(seenRecords.values()).sort()).toEqual([ '1', - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + 'client:1:nameRenderer(supported:"34hjiS")', ]); expect(isMissingData).toBe(false); }); @@ -742,18 +730,15 @@ describe('RelayReader', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'CustomNameRenderer', - customField: 'custom value', + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'CustomNameRenderer', + customField: 'custom value', + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -771,7 +756,7 @@ describe('RelayReader', () => { }); expect(Array.from(seenRecords.values()).sort()).toEqual([ '1', - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + 'client:1:nameRenderer(supported:"34hjiS")', ]); expect(isMissingData).toBe(false); }); @@ -782,8 +767,7 @@ describe('RelayReader', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - null, + 'nameRenderer(supported:"34hjiS")': null, }, 'client:root': { __id: 'client:root', @@ -911,7 +895,6 @@ describe('RelayReader', () => { {}, }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __fragmentPropName: 'name', __module_component: 'PlainUserNameRenderer.react', }, @@ -964,7 +947,6 @@ describe('RelayReader', () => { {}, }, __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __fragmentPropName: 'name', __module_component: 'MarkdownUserNameRenderer.react', }, @@ -1756,236 +1738,6 @@ describe('RelayReader', () => { ]); }); - describe('feature ENABLE_REACT_FLIGHT_COMPONENT_FIELD', () => { - let FlightQuery; - - beforeEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = true; - - FlightQuery = graphql` - query RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery( - $id: ID! - $count: Int! - ) { - node(id: $id) { - ... on Story { - flightComponent(condition: true, count: $count, id: $id) - } - } - } - `; - }); - afterEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = false; - }); - - it('should read data correctly when the ReactFlightClientResponse is valid and present in the store ', () => { - const records = { - '1': { - __id: '1', - __typename: 'Story', - 'flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __ref: - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - }, - id: '1', - }, - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __id: 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - __typename: 'ReactFlightComponent', - executableDefinitions: [ - { - module: {__dr: 'RelayFlightExampleQuery.graphql'}, - variables: { - id: '2', - }, - }, - ], - tree: { - readRoot() { - return { - $$typeof: Symbol.for('react.element'), - type: 'div', - key: null, - ref: null, - props: {foo: 1}, - }; - }, - }, - }, - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': { - __ref: '1', - }, - }, - }; - const operation = createOperationDescriptor(FlightQuery, { - count: 10, - id: '1', - }); - source = RelayRecordSource.create(records); - const {data, isMissingData, seenRecords} = read( - source, - operation.fragment, - ); - expect(isMissingData).toBe(false); - expect(data).toMatchInlineSnapshot(` - Object { - "node": Object { - "flightComponent": Object { - "readRoot": [Function], - }, - }, - } - `); - expect(Array.from(seenRecords.values()).sort()).toEqual([ - '1', - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - 'client:root', - ]); - }); - - it('should read data correctly when ReactFlightClientResponse is null in the store', () => { - const records = { - '1': { - __id: '1', - __typename: 'Story', - 'flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __ref: - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - }, - id: '1', - }, - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - null, - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': { - __ref: '1', - }, - }, - }; - const operation = createOperationDescriptor(FlightQuery, { - count: 10, - id: '1', - }); - source = RelayRecordSource.create(records); - const {data, isMissingData, seenRecords} = read( - source, - operation.fragment, - ); - expect(isMissingData).toBe(false); - expect(data).toMatchInlineSnapshot(` - Object { - "node": Object { - "flightComponent": null, - }, - } - `); - expect(Array.from(seenRecords.values()).sort()).toEqual([ - '1', - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - 'client:root', - ]); - }); - - it('should be missing data when ReactFlightClientResponse is undefined in the store', () => { - const records = { - '1': { - __id: '1', - __typename: 'Story', - 'flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __ref: - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - }, - id: '1', - }, - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - undefined, - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': { - __ref: '1', - }, - }, - }; - const operation = createOperationDescriptor(FlightQuery, { - count: 10, - id: '1', - }); - source = RelayRecordSource.create(records); - const {data, isMissingData, seenRecords} = read( - source, - operation.fragment, - ); - expect(isMissingData).toBe(true); - expect(data).toMatchInlineSnapshot(` - Object { - "node": Object { - "flightComponent": undefined, - }, - } - `); - expect(Array.from(seenRecords.values()).sort()).toEqual([ - '1', - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - 'client:root', - ]); - }); - - it('should be missing data when the linked ReactFlightClientResponseRecord is missing', () => { - const records = { - '1': { - __id: '1', - __typename: 'Story', - 'flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __ref: - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - }, - id: '1', - }, - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': { - __ref: '1', - }, - }, - }; - const operation = createOperationDescriptor(FlightQuery, { - count: 10, - id: '1', - }); - source = RelayRecordSource.create(records); - const {data, isMissingData, seenRecords} = read( - source, - operation.fragment, - ); - expect(isMissingData).toBe(true); - expect(data).toMatchInlineSnapshot(` - Object { - "node": Object { - "flightComponent": undefined, - }, - } - `); - expect(Array.from(seenRecords.values()).sort()).toEqual([ - '1', - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - 'client:root', - ]); - }); - }); - describe('Actor Change', () => { const query = graphql` query RelayReaderTestActorChangeQuery { @@ -2029,7 +1781,6 @@ describe('RelayReader', () => { __viewer: 'viewer-id', __fragmentRef: { __fragmentOwner: owner.request, - __isWithinUnmatchedTypeRefinement: false, __fragments: { RelayReaderTestActorChangeFragment: {}, }, diff --git a/packages/relay-runtime/store/__tests__/RelayReferenceMarker-test.js b/packages/relay-runtime/store/__tests__/RelayReferenceMarker-test.js index 29c1cb794250f..9d6959c75af21 100644 --- a/packages/relay-runtime/store/__tests__/RelayReferenceMarker-test.js +++ b/packages/relay-runtime/store/__tests__/RelayReferenceMarker-test.js @@ -11,7 +11,6 @@ 'use strict'; -import type {RecordObjectMap} from '../RelayStoreTypes'; import type {DataID} from 'relay-runtime/util/RelayRuntimeTypes'; import RelayNetwork from '../../network/RelayNetwork'; @@ -27,12 +26,10 @@ import {ROOT_ID} from '../RelayStoreUtils'; beforeEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = true; }); afterEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = false; }); describe('RelayReferenceMarker', () => { @@ -239,7 +236,7 @@ describe('RelayReferenceMarker', () => { }); it('marks "handle" nodes with key and filters for queries', () => { - const data: RecordObjectMap = { + const data = { '1': { __id: '1', __typename: 'User', @@ -511,9 +508,11 @@ describe('RelayReferenceMarker', () => { `; loader = { get: jest.fn( + // $FlowFixMe[invalid-computed-prop] (moduleName: mixed) => nodes[String(moduleName).replace(/\$.*/, '')], ), load: jest.fn((moduleName: mixed) => + // $FlowFixMe[invalid-computed-prop] Promise.resolve(nodes[String(moduleName).replace(/\$.*/, '')]), ), }; @@ -526,23 +525,20 @@ describe('RelayReferenceMarker', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'PlainUserNameRenderer', - __module_component_RelayReferenceMarkerTest3Fragment: - 'PlainUserNameRenderer.react', - __module_operation_RelayReferenceMarkerTest3Fragment: - 'RelayReferenceMarkerTestPlainUserNameRenderer_name$normalization.graphql', - plaintext: 'plain name', - data: {__ref: 'data'}, + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'PlainUserNameRenderer', + __module_component_RelayReferenceMarkerTest3Fragment: + 'PlainUserNameRenderer.react', + __module_operation_RelayReferenceMarkerTest3Fragment: + 'RelayReferenceMarkerTestPlainUserNameRenderer_name$normalization.graphql', + plaintext: 'plain name', + data: {__ref: 'data'}, + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -566,7 +562,7 @@ describe('RelayReferenceMarker', () => { ); expect(Array.from(references).sort()).toEqual([ '1', - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + 'client:1:nameRenderer(supported:"34hjiS")', 'client:root', 'data', ]); @@ -579,23 +575,20 @@ describe('RelayReferenceMarker', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'MarkdownUserNameRenderer', - __module_component_RelayReferenceMarkerTest3Fragment: - 'MarkdownUserNameRenderer.react', - __module_operation_RelayReferenceMarkerTest3Fragment: - 'RelayReferenceMarkerTestMarkdownUserNameRenderer_name$normalization.graphql', - markdown: 'markdown payload', - data: {__ref: 'data'}, + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'MarkdownUserNameRenderer', + __module_component_RelayReferenceMarkerTest3Fragment: + 'MarkdownUserNameRenderer.react', + __module_operation_RelayReferenceMarkerTest3Fragment: + 'RelayReferenceMarkerTestMarkdownUserNameRenderer_name$normalization.graphql', + markdown: 'markdown payload', + data: {__ref: 'data'}, + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -619,7 +612,7 @@ describe('RelayReferenceMarker', () => { ); expect(Array.from(references).sort()).toEqual([ '1', - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + 'client:1:nameRenderer(supported:"34hjiS")', 'client:root', 'data', ]); @@ -634,18 +627,15 @@ describe('RelayReferenceMarker', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'MarkdownUserNameRenderer', - // NOTE: markdown/data fields are missing, data not processed. + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'MarkdownUserNameRenderer', + // NOTE: markdown/data fields are missing, data not processed. + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -668,7 +658,7 @@ describe('RelayReferenceMarker', () => { ); expect(Array.from(references).sort()).toEqual([ '1', - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + 'client:1:nameRenderer(supported:"34hjiS")', 'client:root', ]); }); @@ -680,23 +670,20 @@ describe('RelayReferenceMarker', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'MarkdownUserNameRenderer', - __module_component_RelayReferenceMarkerTest3Fragment: - 'MarkdownUserNameRenderer.react', - __module_operation_RelayReferenceMarkerTest3Fragment: - 'RelayReferenceMarkerTestMarkdownUserNameRenderer_name$normalization.graphql', - // NOTE: 'markdown' field missing - data: {__ref: 'data'}, + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'MarkdownUserNameRenderer', + __module_component_RelayReferenceMarkerTest3Fragment: + 'MarkdownUserNameRenderer.react', + __module_operation_RelayReferenceMarkerTest3Fragment: + 'RelayReferenceMarkerTestMarkdownUserNameRenderer_name$normalization.graphql', + // NOTE: 'markdown' field missing + data: {__ref: 'data'}, + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -720,7 +707,7 @@ describe('RelayReferenceMarker', () => { ); expect(Array.from(references).sort()).toEqual([ '1', - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + 'client:1:nameRenderer(supported:"34hjiS")', 'client:root', 'data', ]); @@ -733,19 +720,16 @@ describe('RelayReferenceMarker', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'MarkdownUserNameRenderer', - markdown: 'markdown text', - // NOTE: 'data' field missing + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'MarkdownUserNameRenderer', + markdown: 'markdown text', + // NOTE: 'data' field missing + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -764,7 +748,7 @@ describe('RelayReferenceMarker', () => { ); expect(Array.from(references).sort()).toEqual([ '1', - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + 'client:1:nameRenderer(supported:"34hjiS")', 'client:root', ]); }); @@ -775,18 +759,15 @@ describe('RelayReferenceMarker', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'CustomNameRenderer', - customField: 'custom value', + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'CustomNameRenderer', + customField: 'custom value', + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -805,7 +786,7 @@ describe('RelayReferenceMarker', () => { ); expect(Array.from(references).sort()).toEqual([ '1', - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + 'client:1:nameRenderer(supported:"34hjiS")', 'client:root', ]); }); @@ -816,8 +797,7 @@ describe('RelayReferenceMarker', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - null, + 'nameRenderer(supported:"34hjiS")': null, }, 'client:root': { __id: 'client:root', @@ -888,9 +868,11 @@ describe('RelayReferenceMarker', () => { const references = new Set(); const loader = { get: jest.fn( + // $FlowFixMe[invalid-computed-prop] (moduleName: mixed) => nodes[String(moduleName).replace(/\$.*/, '')], ), load: jest.fn((moduleName: mixed) => + // $FlowFixMe[invalid-computed-prop] Promise.resolve(nodes[String(moduleName).replace(/\$.*/, '')]), ), }; @@ -937,9 +919,11 @@ describe('RelayReferenceMarker', () => { const references = new Set(); const loader = { get: jest.fn( + // $FlowFixMe[invalid-computed-prop] (moduleName: mixed) => nodes[String(moduleName).replace(/\$.*/, '')], ), load: jest.fn((moduleName: mixed) => + // $FlowFixMe[invalid-computed-prop] Promise.resolve(nodes[String(moduleName).replace(/\$.*/, '')]), ), }; @@ -985,9 +969,11 @@ describe('RelayReferenceMarker', () => { const references = new Set(); const loader = { get: jest.fn( + // $FlowFixMe[invalid-computed-prop] (moduleName: mixed) => nodes[String(moduleName).replace(/\$.*/, '')], ), load: jest.fn((moduleName: mixed) => + // $FlowFixMe[invalid-computed-prop] Promise.resolve(nodes[String(moduleName).replace(/\$.*/, '')]), ), }; @@ -1077,9 +1063,11 @@ describe('RelayReferenceMarker', () => { `; loader = { get: jest.fn( + // $FlowFixMe[invalid-computed-prop] (moduleName: mixed) => nodes[String(moduleName).replace(/\$.*/, '')], ), load: jest.fn((moduleName: mixed) => + // $FlowFixMe[invalid-computed-prop] Promise.resolve(nodes[String(moduleName).replace(/\$.*/, '')]), ), }; @@ -1559,282 +1547,4 @@ describe('RelayReferenceMarker', () => { expect(Array.from(references).sort()).toEqual(['1', 'client:root']); }); }); - - describe('with feature ENABLE_REACT_FLIGHT_COMPONENT_FIELD', () => { - let FlightQuery; - let InnerQuery; - let operationLoader; - - const readRoot = () => { - return { - $$typeof: Symbol.for('react.element'), - type: 'div', - key: null, - ref: null, - props: {foo: 1}, - }; - }; - - beforeEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = true; - - FlightQuery = graphql` - query RelayReferenceMarkerTestFlightQuery($id: ID!, $count: Int!) { - node(id: $id) { - ... on Story { - flightComponent(condition: true, count: $count, id: $id) - } - } - } - `; - InnerQuery = graphql` - query RelayReferenceMarkerTestInnerQuery($id: ID!) { - node(id: $id) { - ... on User { - name - } - } - } - `; - operationLoader = { - get: jest.fn(() => InnerQuery), - load: jest.fn(() => Promise.resolve(InnerQuery)), - }; - }); - afterEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = false; - }); - - it('marks references when Flight fields are fetched', () => { - const data = { - '1': { - __id: '1', - __typename: 'Story', - 'flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __ref: - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - }, - id: '1', - }, - '2': { - __id: '2', - __typename: 'User', - id: '2', - name: 'Lauren', - }, - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __id: 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - __typename: 'ReactFlightComponent', - executableDefinitions: [ - { - module: { - __dr: 'RelayFlightExampleQuery.graphql', - }, - variables: { - id: '2', - }, - }, - ], - tree: { - readRoot, - }, - }, - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': { - __ref: '1', - }, - 'node(id:"2")': { - __ref: '2', - }, - }, - }; - const recordSource = RelayRecordSource.create(data); - const references = new Set(); - mark( - recordSource, - createNormalizationSelector(FlightQuery.operation, 'client:root', { - count: 10, - id: '1', - }), - references, - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - ); - expect(Array.from(references).sort()).toEqual([ - '1', - '2', - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - 'client:root', - ]); - }); - - it('marks references when the Flight field exists but has not been processed', () => { - const data = { - '1': { - __id: '1', - __typename: 'Story', - 'flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __ref: - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - }, - id: '1', - }, - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __id: 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - __typename: 'ReactFlightComponent', - }, - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': { - __ref: '1', - }, - }, - }; - const recordSource = RelayRecordSource.create(data); - const references = new Set(); - mark( - recordSource, - createNormalizationSelector(FlightQuery.operation, 'client:root', { - count: 10, - id: '1', - }), - references, - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - ); - expect(Array.from(references).sort()).toEqual([ - '1', - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - 'client:root', - ]); - }); - - it('marks references when the Flight field is null', () => { - const data = { - '1': { - __id: '1', - __typename: 'Story', - 'flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __ref: - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - }, - id: '1', - }, - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - null, - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': { - __ref: '1', - }, - }, - }; - const recordSource = RelayRecordSource.create(data); - const references = new Set(); - mark( - recordSource, - createNormalizationSelector(FlightQuery.operation, 'client:root', { - count: 10, - id: '1', - }), - references, - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - ); - expect(Array.from(references).sort()).toEqual([ - '1', - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - 'client:root', - ]); - }); - - it('marks references when the Flight field is undefined', () => { - const data = { - '1': { - __id: '1', - __typename: 'Story', - 'flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __ref: - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - }, - id: '1', - }, - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - undefined, - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': { - __ref: '1', - }, - }, - }; - const recordSource = RelayRecordSource.create(data); - const references = new Set(); - mark( - recordSource, - createNormalizationSelector(FlightQuery.operation, 'client:root', { - count: 10, - id: '1', - }), - references, - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - ); - expect(Array.from(references).sort()).toEqual([ - '1', - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - 'client:root', - ]); - }); - - it('marks references when the linked ReactFlightClientResponseRecord is missing', () => { - const data = { - '1': { - __id: '1', - __typename: 'Story', - 'flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})': - { - __ref: - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - }, - id: '1', - }, - 'client:root': { - __id: 'client:root', - __typename: '__Root', - 'node(id:"1")': { - __ref: '1', - }, - }, - }; - const recordSource = RelayRecordSource.create(data); - const references = new Set(); - mark( - recordSource, - createNormalizationSelector(FlightQuery.operation, 'client:root', { - count: 10, - id: '1', - }), - references, - // $FlowFixMe[invalid-tuple-arity] Error found while enabling LTI on this file - operationLoader, - ); - expect(Array.from(references).sort()).toEqual([ - '1', - 'client:1:flight(component:"FlightComponent.server",props:{"condition":true,"count":10,"id":"1"})', - 'client:root', - ]); - }); - }); }); diff --git a/packages/relay-runtime/store/__tests__/RelayResponseNormalizer-test.js b/packages/relay-runtime/store/__tests__/RelayResponseNormalizer-test.js index 208146feea141..787c05d872ad1 100644 --- a/packages/relay-runtime/store/__tests__/RelayResponseNormalizer-test.js +++ b/packages/relay-runtime/store/__tests__/RelayResponseNormalizer-test.js @@ -10,7 +10,6 @@ */ 'use strict'; -import type {ReactFlightServerError} from '../../network/RelayNetworkTypes'; const { getActorIdentifier, @@ -438,21 +437,18 @@ describe('RelayResponseNormalizer', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'MarkdownUserNameRenderer', - __module_component_RelayResponseNormalizerTestFragment: - 'MarkdownUserNameRenderer.react', - __module_operation_RelayResponseNormalizerTestFragment: - 'RelayResponseNormalizerTestMarkdownUserNameRenderer_name$normalization.graphql', + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'MarkdownUserNameRenderer', + __module_component_RelayResponseNormalizerTestFragment: + 'MarkdownUserNameRenderer.react', + __module_operation_RelayResponseNormalizerTestFragment: + 'RelayResponseNormalizerTestMarkdownUserNameRenderer_name$normalization.graphql', + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -464,8 +460,7 @@ describe('RelayResponseNormalizer', () => { args: null, operationReference: 'RelayResponseNormalizerTestMarkdownUserNameRenderer_name$normalization.graphql', - dataID: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + dataID: 'client:1:nameRenderer(supported:"34hjiS")', kind: 'ModuleImportPayload', data: { __typename: 'MarkdownUserNameRenderer', @@ -521,21 +516,18 @@ describe('RelayResponseNormalizer', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'MarkdownUserNameRenderer', - __module_component_RelayResponseNormalizerTestFragment: - 'MarkdownUserNameRenderer.react', - __module_operation_RelayResponseNormalizerTestFragment: - 'RelayResponseNormalizerTestMarkdownUserNameRenderer_name$normalization.graphql', + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'MarkdownUserNameRenderer', + __module_component_RelayResponseNormalizerTestFragment: + 'MarkdownUserNameRenderer.react', + __module_operation_RelayResponseNormalizerTestFragment: + 'RelayResponseNormalizerTestMarkdownUserNameRenderer_name$normalization.graphql', + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -547,8 +539,7 @@ describe('RelayResponseNormalizer', () => { args: null, operationReference: 'RelayResponseNormalizerTestMarkdownUserNameRenderer_name$normalization.graphql', - dataID: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', + dataID: 'client:1:nameRenderer(supported:"34hjiS")', kind: 'ModuleImportPayload', data: { __typename: 'MarkdownUserNameRenderer', @@ -596,18 +587,15 @@ describe('RelayResponseNormalizer', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __ref: - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - }, - }, - 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - { - __id: 'client:1:nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])', - __typename: 'CustomNameRenderer', - // note: 'customField' data not processed, there is no selection on this type + 'nameRenderer(supported:"34hjiS")': { + __ref: 'client:1:nameRenderer(supported:"34hjiS")', }, + }, + 'client:1:nameRenderer(supported:"34hjiS")': { + __id: 'client:1:nameRenderer(supported:"34hjiS")', + __typename: 'CustomNameRenderer', + // note: 'customField' data not processed, there is no selection on this type + }, 'client:root': { __id: 'client:root', __typename: '__Root', @@ -640,8 +628,7 @@ describe('RelayResponseNormalizer', () => { __id: '1', id: '1', __typename: 'User', - 'nameRenderer(supported:["PlainUserNameRenderer","MarkdownUserNameRenderer"])': - null, + 'nameRenderer(supported:"34hjiS")': null, }, 'client:root': { __id: 'client:root', @@ -3491,564 +3478,6 @@ describe('RelayResponseNormalizer', () => { }); }); - describe('feature ENABLE_REACT_FLIGHT_COMPONENT_FIELD', () => { - let FlightQuery; - let recordSource; - let ServerOrClientQuery; - const dummyReactFlightPayloadDeserializer = () => { - return { - readRoot() { - return { - $$typeof: Symbol.for('react.element'), - type: 'div', - key: null, - ref: null, - props: {foo: 1}, - }; - }, - }; - }; - - beforeEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = true; - - FlightQuery = graphql` - query RelayResponseNormalizerTestFlightQuery($id: ID!, $count: Int!) { - node(id: $id) { - ... on Story { - flightComponent(condition: true, count: $count, id: $id) - } - } - } - `; - graphql` - fragment RelayResponseNormalizerTest_clientFragment on Story { - name - body { - text - } - } - `; - ServerOrClientQuery = graphql` - query RelayResponseNormalizerTestServerOrClientQuery($id: ID!) { - node(id: $id) { - ...RelayResponseNormalizerTest_clientFragment - @relay_client_component - } - } - `; - recordSource = new RelayRecordSource(); - recordSource.set(ROOT_ID, RelayModernRecord.create(ROOT_ID, ROOT_TYPE)); - }); - afterEach(() => { - RelayFeatureFlags.ENABLE_REACT_FLIGHT_COMPONENT_FIELD = false; - }); - - describe('when successful', () => { - it('normalizes Flight fields', () => { - const payload: $FlowFixMe = { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'SUCCESS', - tree: [ - { - type: 'div', - key: null, - ref: null, - props: {foo: 1}, - }, - ], - queries: [ - { - id: 'b0dbe24703062b69e6b1d0c38c4f69d2', - module: {__dr: 'RelayFlightExampleQuery.graphql'}, - response: { - data: { - story: { - id: '2', - name: 'Lauren', - __typename: 'User', - }, - }, - extensions: [], - }, - variables: { - id: '2', - }, - }, - ], - errors: [], - fragments: [ - { - module: { - __dr: 'RelayResponseNormalizerTest_clientFragment$normalization.graphql', - }, - __id: '3', - __typename: 'Story', - response: { - data: { - node: { - id: '3', - __typename: 'Story', - name: 'React Server Components: The Musical', - body: { - text: 'Presenting a new musical from the director of Cats (2019)!', - }, - }, - }, - }, - variables: { - id: '3', - }, - }, - ], - }, - }, - }; - normalize( - recordSource, - createNormalizationSelector(FlightQuery.operation, ROOT_ID, { - count: 10, - id: '1', - }), - payload, - { - ...defaultOptions, - reactFlightPayloadDeserializer: dummyReactFlightPayloadDeserializer, - }, - ); - expect(recordSource.toJSON()).toMatchInlineSnapshot(` - Object { - "1": Object { - "__id": "1", - "__typename": "Story", - "flight(component:\\"FlightComponent.server\\",props:{\\"condition\\":true,\\"count\\":10,\\"id\\":\\"1\\"})": Object { - "__ref": "client:1:flight(component:\\"FlightComponent.server\\",props:{\\"condition\\":true,\\"count\\":10,\\"id\\":\\"1\\"})", - }, - "id": "1", - }, - "client:1:flight(component:\\"FlightComponent.server\\",props:{\\"condition\\":true,\\"count\\":10,\\"id\\":\\"1\\"})": Object { - "__id": "client:1:flight(component:\\"FlightComponent.server\\",props:{\\"condition\\":true,\\"count\\":10,\\"id\\":\\"1\\"})", - "__typename": "ReactFlightComponent", - "executableDefinitions": Array [ - Object { - "module": Object { - "__dr": "RelayFlightExampleQuery.graphql", - }, - "variables": Object { - "id": "2", - }, - }, - Object { - "module": Object { - "__dr": "RelayResponseNormalizerTest_clientFragment$normalization.graphql", - }, - "variables": Object { - "id": "3", - }, - }, - ], - "tree": Object { - "readRoot": [Function], - }, - }, - "client:root": Object { - "__id": "client:root", - "__typename": "__Root", - "node(id:\\"1\\")": Object { - "__ref": "1", - }, - }, - } - `); - }); - - it('asserts that reactFlightPayloadDeserializer is defined as a function', () => { - const payload: $FlowFixMe = { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'SUCCESS', - tree: [], - queries: [], - errors: [], - fragments: [], - }, - }, - }; - - expect(() => { - normalize( - recordSource, - createNormalizationSelector(FlightQuery.operation, ROOT_ID, { - count: 10, - id: '1', - }), - payload, - { - ...defaultOptions, - reactFlightPayloadDeserializer: - dummyReactFlightPayloadDeserializer, - }, - ); - }).not.toThrow(); - expect(() => { - normalize( - recordSource, - createNormalizationSelector(FlightQuery.operation, ROOT_ID, { - count: 10, - id: '1', - }), - payload, - defaultOptions, - ); - }).toThrow(); - }); - }); - - describe('when server errors are encountered', () => { - describe('and ReactFlightServerErrorHandler is specified', () => { - const reactFlightServerErrorHandler = jest.fn< - [string, Array], - void, - >(); - it('calls ReactFlightServerErrorHandler', () => { - const payload: $FlowFixMe = { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'FAIL_JS_ERROR', - tree: [], - queries: [], - errors: [ - { - message: 'Something threw an error on the server', - stack: 'Error\n at :1:1', - }, - ], - fragments: [], - }, - }, - }; - normalize( - recordSource, - createNormalizationSelector(FlightQuery.operation, ROOT_ID, { - count: 10, - id: '1', - }), - payload, - { - ...defaultOptions, - reactFlightPayloadDeserializer: - dummyReactFlightPayloadDeserializer, - reactFlightServerErrorHandler, - }, - ); - expect(reactFlightServerErrorHandler).toHaveBeenCalledWith( - 'FAIL_JS_ERROR', - expect.arrayContaining([ - expect.objectContaining({ - message: 'Something threw an error on the server', - stack: 'Error\n at :1:1', - }), - ]), - ); - }); - }); - describe('and no ReactFlightServerErrorHandler is specified', () => { - it('warns', () => { - const payload: $FlowFixMe = { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'FAIL_JS_ERROR', - tree: [], - queries: [], - errors: [ - { - message: 'Something threw an error on the server', - stack: 'Error\n at :1:1', - }, - ], - fragments: [], - }, - }, - }; - expectToWarn( - 'RelayResponseNormalizer: Received server errors for field `flightComponent`.\n\n' + - 'Something threw an error on the server\n' + - 'Error\n at :1:1', - () => { - normalize( - recordSource, - createNormalizationSelector(FlightQuery.operation, ROOT_ID, { - count: 10, - id: '1', - }), - payload, - { - ...defaultOptions, - reactFlightPayloadDeserializer: - dummyReactFlightPayloadDeserializer, - }, - ); - }, - ); - }); - }); - }); - - describe('when the response is malformed', () => { - it('normalizes when the response is null', () => { - const payload = { - node: { - id: '1', - __typename: 'Story', - flightComponent: null, - }, - }; - normalize( - recordSource, - createNormalizationSelector(FlightQuery.operation, ROOT_ID, { - count: 10, - id: '1', - }), - payload, - { - ...defaultOptions, - reactFlightPayloadDeserializer: dummyReactFlightPayloadDeserializer, - }, - ); - expect(recordSource.toJSON()).toMatchInlineSnapshot(` - Object { - "1": Object { - "__id": "1", - "__typename": "Story", - "flight(component:\\"FlightComponent.server\\",props:{\\"condition\\":true,\\"count\\":10,\\"id\\":\\"1\\"})": null, - "id": "1", - }, - "client:root": Object { - "__id": "client:root", - "__typename": "__Root", - "node(id:\\"1\\")": Object { - "__ref": "1", - }, - }, - } - `); - }); - it('throws if the response is undefined', () => { - const payload = { - node: { - id: '1', - __typename: 'Story', - flightComponent: undefined, - }, - }; - expect(() => { - normalize( - recordSource, - createNormalizationSelector(FlightQuery.operation, ROOT_ID, { - count: 10, - id: '1', - }), - payload, - { - ...defaultOptions, - reactFlightPayloadDeserializer: - dummyReactFlightPayloadDeserializer, - }, - ); - }).toThrow(/Payload did not contain a value for field/); - }); - - it('warns if the row protocol is null', () => { - const payload: $FlowFixMe = { - node: { - id: '1', - __typename: 'Story', - flightComponent: { - status: 'UNEXPECTED_ERROR', - tree: null, - queries: [], - errors: [], - fragments: [], - }, - }, - }; - expectToWarn( - 'RelayResponseNormalizer: Expected `tree` not to be null. This typically indicates that a fatal server error prevented any Server Component rows from being written.', - () => { - normalize( - recordSource, - createNormalizationSelector(FlightQuery.operation, ROOT_ID, { - count: 10, - id: '1', - }), - payload, - { - ...defaultOptions, - reactFlightPayloadDeserializer: - dummyReactFlightPayloadDeserializer, - }, - ); - }, - ); - }); - }); - - describe('when the query contains @relay_client_component spreads', () => { - let options; - describe('and client component processing is enabled', () => { - beforeEach(() => { - options = { - ...defaultOptions, - shouldProcessClientComponents: true, - }; - }); - it('normalizes', () => { - const payload = { - node: { - id: '1', - __typename: 'Story', - name: 'React Server Components: The Musical', - body: { - text: 'Presenting a new musical from the director of Cats (2019)!', - }, - }, - }; - normalize( - recordSource, - createNormalizationSelector( - ServerOrClientQuery.operation, - ROOT_ID, - { - id: '1', - }, - ), - payload, - options, - ); - expect(recordSource.toJSON()).toMatchInlineSnapshot(` - Object { - "1": Object { - "__id": "1", - "__typename": "Story", - "body": Object { - "__ref": "client:1:body", - }, - "id": "1", - "name": "React Server Components: The Musical", - }, - "client:1:body": Object { - "__id": "client:1:body", - "__typename": "Text", - "text": "Presenting a new musical from the director of Cats (2019)!", - }, - "client:root": Object { - "__id": "client:root", - "__typename": "__Root", - "node(id:\\"1\\")": Object { - "__ref": "1", - }, - }, - } - `); - }); - }); - - describe('and client component processing is disabled', () => { - beforeEach(() => { - options = { - ...defaultOptions, - shouldProcessClientComponents: false, - }; - }); - it('does not normalize', () => { - const payload = { - node: { - id: '1', - __typename: 'Story', - }, - }; - normalize( - recordSource, - createNormalizationSelector( - ServerOrClientQuery.operation, - ROOT_ID, - { - id: '1', - }, - ), - payload, - options, - ); - expect(recordSource.toJSON()).toMatchInlineSnapshot(` - Object { - "1": Object { - "__id": "1", - "__typename": "Story", - "id": "1", - }, - "client:root": Object { - "__id": "client:root", - "__typename": "__Root", - "node(id:\\"1\\")": Object { - "__ref": "1", - }, - }, - } - `); - }); - - it('does not normalize client fragment data even if present', () => { - const payload = { - node: { - id: '1', - __typename: 'Story', - name: 'React Server Components: The Musical', - body: { - text: 'Presenting a new musical from the director of Cats (2019)!', - }, - }, - }; - normalize( - recordSource, - createNormalizationSelector( - ServerOrClientQuery.operation, - ROOT_ID, - { - id: '1', - }, - ), - payload, - options, - ); - expect(recordSource.toJSON()).toMatchInlineSnapshot(` - Object { - "1": Object { - "__id": "1", - "__typename": "Story", - "id": "1", - }, - "client:root": Object { - "__id": "client:root", - "__typename": "__Root", - "node(id:\\"1\\")": Object { - "__ref": "1", - }, - }, - } - `); - }); - }); - }); - }); describe('"falsy" IDs in payload', () => { let recordSource; const Query = graphql` @@ -4275,7 +3704,7 @@ describe('RelayResponseNormalizer', () => { ); expect(result).toEqual({ - errors: null, + errors: undefined, fieldPayloads: [], followupPayloads: [ { @@ -4343,7 +3772,7 @@ describe('RelayResponseNormalizer', () => { {...defaultOptions, actorIdentifier: getActorIdentifier('actor-1234')}, ); expect(result).toEqual({ - errors: null, + errors: undefined, fieldPayloads: [], followupPayloads: [ { @@ -4412,7 +3841,7 @@ describe('RelayResponseNormalizer', () => { {...defaultOptions, actorIdentifier: getActorIdentifier('actor-1234')}, ); expect(result).toEqual({ - errors: null, + errors: undefined, fieldPayloads: [], followupPayloads: [ { @@ -4494,7 +3923,7 @@ describe('RelayResponseNormalizer', () => { }, ); expect(result).toEqual({ - errors: null, + errors: undefined, fieldPayloads: [], followupPayloads: [], incrementalPlaceholders: [], @@ -4538,7 +3967,7 @@ describe('RelayResponseNormalizer', () => { }, ); expect(result).toEqual({ - errors: null, + errors: undefined, fieldPayloads: [], followupPayloads: [], incrementalPlaceholders: [], @@ -4601,7 +4030,7 @@ describe('RelayResponseNormalizer', () => { {...defaultOptions, actorIdentifier: getActorIdentifier('actor-1234')}, ); expect(result).toEqual({ - errors: null, + errors: undefined, fieldPayloads: [], followupPayloads: [ { @@ -4712,4 +4141,441 @@ describe('RelayResponseNormalizer', () => { }); }); }); + + describe('when field error handling is disabled', () => { + const wasFieldErrorHandlingEnabled = + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING; + + beforeAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = false; + }); + + it('ignores field errors', () => { + const FooQuery = graphql` + query RelayResponseNormalizerTest36Query($id: ID!) { + node(id: $id) { + id + __typename + ... on User { + firstName + lastName + friends(first: 3) { + edges { + cursor + node { + firstName + lastName + } + } + } + } + } + } + `; + const payload = { + node: { + id: '1', + __typename: 'User', + firstName: 'Jerry', + lastName: 'Seinfeld', + friends: { + edges: [ + { + cursor: 'cursor:2', + node: { + id: '2', + firstName: 'George', + lastName: 'Costanza', + }, + }, + { + cursor: 'cursor:3', + node: { + id: '3', + firstName: null, + lastName: 'Kramer', + }, + }, + { + cursor: 'cursor:4', + node: { + id: '4', + firstName: null, + lastName: 'Newman', + }, + }, + ], + }, + }, + }; + const errors = [ + { + message: "No one knows Kramer's first name until season six!", + path: ['node', 'friends', 'edges', 1, 'node', 'firstName'], + }, + { + message: 'There was another error!', + path: ['node', 'friends', 'edges', 1, 'node', 'firstName'], + }, + { + message: "No one knows Newman's first name!", + path: ['node', 'friends', 'edges', 2, 'node', 'firstName'], + }, + ]; + const recordSource = new RelayRecordSource(); + recordSource.set(ROOT_ID, RelayModernRecord.create(ROOT_ID, ROOT_TYPE)); + normalize( + recordSource, + createNormalizationSelector(FooQuery.operation, ROOT_ID, { + id: '1', + size: 32, + }), + payload, + defaultOptions, + errors, + ); + const friendsID = 'client:1:friends(first:3)'; + const edge0ID = `${friendsID}:edges:0`; + const edge1ID = `${friendsID}:edges:1`; + const edge2ID = `${friendsID}:edges:2`; + expect(recordSource.toJSON()).toEqual({ + '1': { + __id: '1', + id: '1', + __typename: 'User', + firstName: 'Jerry', + 'friends(first:3)': {__ref: friendsID}, + lastName: 'Seinfeld', + }, + '2': { + __id: '2', + __typename: 'User', + firstName: 'George', + id: '2', + lastName: 'Costanza', + }, + '3': { + __id: '3', + __typename: 'User', + firstName: null, + id: '3', + lastName: 'Kramer', + }, + '4': { + __id: '4', + __typename: 'User', + firstName: null, + id: '4', + lastName: 'Newman', + }, + [friendsID]: { + __id: friendsID, + __typename: 'FriendsConnection', + edges: { + __refs: [edge0ID, edge1ID, edge2ID], + }, + }, + [edge0ID]: { + __id: edge0ID, + __typename: 'FriendsEdge', + cursor: 'cursor:2', + node: {__ref: '2'}, + }, + [edge1ID]: { + __id: edge1ID, + __typename: 'FriendsEdge', + cursor: 'cursor:3', + node: {__ref: '3'}, + }, + [edge2ID]: { + __id: edge2ID, + __typename: 'FriendsEdge', + cursor: 'cursor:4', + node: {__ref: '4'}, + }, + 'client:root': { + __id: 'client:root', + __typename: '__Root', + 'node(id:"1")': {__ref: '1'}, + }, + }); + }); + + afterAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = + wasFieldErrorHandlingEnabled; + }); + }); + + describe('when field error handling is enabled', () => { + const wasFieldErrorHandlingEnabled = + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING; + + beforeAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = true; + }); + + it('normalizes queries with multiple field errors', () => { + const FooQuery = graphql` + query RelayResponseNormalizerTest37Query($id: ID!) { + node(id: $id) { + id + __typename + ... on User { + firstName + lastName + friends(first: 3) { + edges { + cursor + node { + firstName + lastName + } + } + } + } + } + } + `; + const payload = { + node: { + id: '1', + __typename: 'User', + firstName: 'Jerry', + lastName: 'Seinfeld', + friends: { + edges: [ + { + cursor: 'cursor:2', + node: { + id: '2', + firstName: 'George', + lastName: 'Costanza', + }, + }, + { + cursor: 'cursor:3', + node: { + id: '3', + firstName: null, + lastName: 'Kramer', + }, + }, + { + cursor: 'cursor:4', + node: { + id: '4', + firstName: null, + lastName: 'Newman', + }, + }, + ], + }, + }, + }; + const errors = [ + { + message: "No one knows Kramer's first name until season six!", + path: ['node', 'friends', 'edges', 1, 'node', 'firstName'], + }, + { + message: 'There was another error!', + path: ['node', 'friends', 'edges', 1, 'node', 'firstName'], + }, + { + message: "No one knows Newman's first name!", + path: ['node', 'friends', 'edges', 2, 'node', 'firstName'], + }, + ]; + const recordSource = new RelayRecordSource(); + recordSource.set(ROOT_ID, RelayModernRecord.create(ROOT_ID, ROOT_TYPE)); + normalize( + recordSource, + createNormalizationSelector(FooQuery.operation, ROOT_ID, { + id: '1', + size: 32, + }), + payload, + defaultOptions, + errors, + ); + const friendsID = 'client:1:friends(first:3)'; + const edge0ID = `${friendsID}:edges:0`; + const edge1ID = `${friendsID}:edges:1`; + const edge2ID = `${friendsID}:edges:2`; + expect(recordSource.toJSON()).toEqual({ + '1': { + __id: '1', + id: '1', + __typename: 'User', + firstName: 'Jerry', + 'friends(first:3)': {__ref: friendsID}, + lastName: 'Seinfeld', + }, + '2': { + __id: '2', + __typename: 'User', + firstName: 'George', + id: '2', + lastName: 'Costanza', + }, + '3': { + __errors: { + firstName: [ + { + message: "No one knows Kramer's first name until season six!", + }, + { + message: 'There was another error!', + }, + ], + }, + __id: '3', + __typename: 'User', + firstName: null, + id: '3', + lastName: 'Kramer', + }, + '4': { + __errors: { + firstName: [ + { + message: "No one knows Newman's first name!", + }, + ], + }, + __id: '4', + __typename: 'User', + firstName: null, + id: '4', + lastName: 'Newman', + }, + [friendsID]: { + __id: friendsID, + __typename: 'FriendsConnection', + edges: { + __refs: [edge0ID, edge1ID, edge2ID], + }, + }, + [edge0ID]: { + __id: edge0ID, + __typename: 'FriendsEdge', + cursor: 'cursor:2', + node: {__ref: '2'}, + }, + [edge1ID]: { + __id: edge1ID, + __typename: 'FriendsEdge', + cursor: 'cursor:3', + node: {__ref: '3'}, + }, + [edge2ID]: { + __id: edge2ID, + __typename: 'FriendsEdge', + cursor: 'cursor:4', + node: {__ref: '4'}, + }, + 'client:root': { + __id: 'client:root', + __typename: '__Root', + 'node(id:"1")': {__ref: '1'}, + }, + }); + }); + + it('normalizes queries with field errors that bubbled up', () => { + const FooQuery = graphql` + query RelayResponseNormalizerTest38Query($id: ID!) { + node(id: $id) { + id + __typename + ... on User { + firstName + lastName + friends(first: 3) { + edges { + cursor + node { + firstName + lastName + } + } + } + } + } + } + `; + const payload = { + node: { + id: '1', + __typename: 'User', + firstName: 'Jerry', + lastName: 'Seinfeld', + friends: null, + }, + }; + const errors = [ + { + message: "No one knows Kramer's first name until season six!", + path: ['node', 'friends', 'edges', 1, 'node', 'firstName'], + }, + { + message: 'There was another error!', + path: ['node', 'friends', 'edges', 1, 'node', 'firstName'], + }, + { + message: "No one knows Newman's first name!", + path: ['node', 'friends', 'edges', 2, 'node', 'firstName'], + }, + ]; + const recordSource = new RelayRecordSource(); + recordSource.set(ROOT_ID, RelayModernRecord.create(ROOT_ID, ROOT_TYPE)); + normalize( + recordSource, + createNormalizationSelector(FooQuery.operation, ROOT_ID, { + id: '1', + size: 32, + }), + payload, + defaultOptions, + errors, + ); + expect(recordSource.toJSON()).toEqual({ + '1': { + __id: '1', + __typename: 'User', + __errors: { + 'friends(first:3)': [ + { + message: "No one knows Kramer's first name until season six!", + path: ['edges', 1, 'node', 'firstName'], + }, + { + message: 'There was another error!', + path: ['edges', 1, 'node', 'firstName'], + }, + { + message: "No one knows Newman's first name!", + path: ['edges', 2, 'node', 'firstName'], + }, + ], + }, + firstName: 'Jerry', + 'friends(first:3)': null, + id: '1', + lastName: 'Seinfeld', + }, + 'client:root': { + __id: 'client:root', + __typename: '__Root', + 'node(id:"1")': {__ref: '1'}, + }, + }); + }); + + afterAll(() => { + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING = + wasFieldErrorHandlingEnabled; + }); + }); }); diff --git a/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend.graphql.js new file mode 100644 index 0000000000000..cfbba51690459 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend.graphql.js @@ -0,0 +1,143 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$fragmentType } from "./RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend.graphql"; +export type ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$variables = {| + id: string, +|}; +export type ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$data = {| + +node: ?{| + +$fragmentSpreads: RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$fragmentType, + |}, +|}; +export type ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend = {| + response: ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$data, + variables: ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } +]; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "6b4377983658f804a7b0a8e3be8b892c", + "id": null, + "metadata": {}, + "name": "ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend", + "operationKind": "query", + "text": "query ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ...RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend\n id\n }\n}\n\nfragment RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend on User {\n name\n id\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "1ea17c6315e8ba285db304130201310d"; +} + +module.exports = ((node/*: any*/)/*: Query< + ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$variables, + ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeToClientObjectTest1Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeToClientObjectTest1Query.graphql.js index 9e0c50b26f1d2..7e60ade5f716e 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeToClientObjectTest1Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeToClientObjectTest1Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<09e3064c92490719ad5471d37be08cbd>> * @flow * @lightSyntaxTransform * @nogrep @@ -28,13 +28,13 @@ import {house as astrologicalSignHouseResolverType} from "../resolvers/Astrologi // A type error here indicates that the type signature of the resolver module is incorrect. (astrologicalSignHouseResolverType: ( rootKey: AstrologicalSignHouseResolver$key, -) => mixed); +) => ?number); import {name as astrologicalSignNameResolverType} from "../resolvers/AstrologicalSignNameResolver.js"; // Type assertion validating that `astrologicalSignNameResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (astrologicalSignNameResolverType: ( rootKey: AstrologicalSignNameResolver$key, -) => mixed); +) => ?string); import {opposite as astrologicalSignOppositeResolverType} from "../resolvers/AstrologicalSignOppositeResolver.js"; // Type assertion validating that `astrologicalSignOppositeResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. @@ -56,15 +56,15 @@ export type ClientEdgeToClientObjectTest1Query$data = {| +me: ?{| +astrological_sign: ?{| +__id: string, - +house: ?$Call<((...empty[]) => R) => R, typeof astrologicalSignHouseResolverType>, - +name: ?$Call<((...empty[]) => R) => R, typeof astrologicalSignNameResolverType>, + +house: ?number, + +name: ?string, +opposite: ?{| +__id: string, - +house: ?$Call<((...empty[]) => R) => R, typeof astrologicalSignHouseResolverType>, - +name: ?$Call<((...empty[]) => R) => R, typeof astrologicalSignNameResolverType>, + +house: ?number, + +name: ?string, +opposite: ?{| +__id: string, - +name: ?$Call<((...empty[]) => R) => R, typeof astrologicalSignNameResolverType>, + +name: ?string, |}, |}, |}, @@ -85,43 +85,19 @@ var v0 = { "storageKey": null }, v1 = { - "alias": null, "args": null, - "fragment": { - "args": null, - "kind": "FragmentSpread", - "name": "AstrologicalSignNameResolver" - }, - "kind": "RelayResolver", - "name": "name", - "resolverModule": require('./../resolvers/AstrologicalSignNameResolver').name, - "path": "me.name" + "kind": "FragmentSpread", + "name": "AstrologicalSignNameResolver" }, v2 = { - "alias": null, "args": null, - "fragment": { - "args": null, - "kind": "FragmentSpread", - "name": "AstrologicalSignHouseResolver" - }, - "kind": "RelayResolver", - "name": "house", - "resolverModule": require('./../resolvers/AstrologicalSignHouseResolver').house, - "path": "me.house" + "kind": "FragmentSpread", + "name": "AstrologicalSignHouseResolver" }, v3 = { - "alias": null, "args": null, - "fragment": { - "args": null, - "kind": "FragmentSpread", - "name": "AstrologicalSignOppositeResolver" - }, - "kind": "RelayResolver", - "name": "opposite", - "resolverModule": require('./../resolvers/AstrologicalSignOppositeResolver').opposite, - "path": "me.opposite" + "kind": "FragmentSpread", + "name": "AstrologicalSignOppositeResolver" }, v4 = { "alias": null, @@ -146,7 +122,7 @@ v5 = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "AstrologicalSign", @@ -158,7 +134,7 @@ v6 = { "fragment": (v5/*: any*/), "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, v7 = { "name": "house", @@ -166,7 +142,7 @@ v7 = { "fragment": (v5/*: any*/), "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, v8 = { "name": "opposite", @@ -196,6 +172,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "AstrologicalSign", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -218,12 +195,37 @@ return { "plural": false, "selections": [ (v0/*: any*/), - (v1/*: any*/), - (v2/*: any*/), + { + "alias": null, + "args": null, + "fragment": (v1/*: any*/), + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('./../resolvers/AstrologicalSignNameResolver').name, + "path": "me.astrological_sign.name" + }, + { + "alias": null, + "args": null, + "fragment": (v2/*: any*/), + "kind": "RelayResolver", + "name": "house", + "resolverModule": require('./../resolvers/AstrologicalSignHouseResolver').house, + "path": "me.astrological_sign.house" + }, { "kind": "ClientEdgeToClientObject", "concreteType": "AstrologicalSign", - "backingField": (v3/*: any*/), + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": (v3/*: any*/), + "kind": "RelayResolver", + "name": "opposite", + "resolverModule": require('./../resolvers/AstrologicalSignOppositeResolver').opposite, + "path": "me.astrological_sign.opposite" + }, "linkedField": { "alias": null, "args": null, @@ -233,12 +235,37 @@ return { "plural": false, "selections": [ (v0/*: any*/), - (v1/*: any*/), - (v2/*: any*/), + { + "alias": null, + "args": null, + "fragment": (v1/*: any*/), + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('./../resolvers/AstrologicalSignNameResolver').name, + "path": "me.astrological_sign.opposite.name" + }, + { + "alias": null, + "args": null, + "fragment": (v2/*: any*/), + "kind": "RelayResolver", + "name": "house", + "resolverModule": require('./../resolvers/AstrologicalSignHouseResolver').house, + "path": "me.astrological_sign.opposite.house" + }, { "kind": "ClientEdgeToClientObject", "concreteType": "AstrologicalSign", - "backingField": (v3/*: any*/), + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": (v3/*: any*/), + "kind": "RelayResolver", + "name": "opposite", + "resolverModule": require('./../resolvers/AstrologicalSignOppositeResolver').opposite, + "path": "me.astrological_sign.opposite.opposite" + }, "linkedField": { "alias": null, "args": null, @@ -248,7 +275,15 @@ return { "plural": false, "selections": [ (v0/*: any*/), - (v1/*: any*/) + { + "alias": null, + "args": null, + "fragment": (v1/*: any*/), + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('./../resolvers/AstrologicalSignNameResolver').name, + "path": "me.astrological_sign.opposite.opposite.name" + } ], "storageKey": null } diff --git a/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeToClientObjectTest2Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeToClientObjectTest2Query.graphql.js index 01096a767e350..89650598a0a5a 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeToClientObjectTest2Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeToClientObjectTest2Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<9dbe872665053dd2d1154ff50a74046d>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -26,19 +26,19 @@ import {name as astrologicalSignNameResolverType} from "../resolvers/Astrologica // A type error here indicates that the type signature of the resolver module is incorrect. (astrologicalSignNameResolverType: ( rootKey: AstrologicalSignNameResolver$key, -) => mixed); +) => ?string); import {all_astrological_signs as queryAllAstrologicalSignsResolverType} from "../resolvers/QueryAllAstrologicalSignsResolver.js"; // Type assertion validating that `queryAllAstrologicalSignsResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryAllAstrologicalSignsResolverType: ( rootKey: QueryAllAstrologicalSignsResolver$key, -) => ?$ReadOnlyArray ?$ReadOnlyArray<{| +id: DataID, |}>); export type ClientEdgeToClientObjectTest2Query$variables = {||}; export type ClientEdgeToClientObjectTest2Query$data = {| - +all_astrological_signs: ?$ReadOnlyArray((...empty[]) => R) => R, typeof astrologicalSignNameResolverType>, + +all_astrological_signs: ?$ReadOnlyArray<{| + +name: ?string, |}>, |}; export type ClientEdgeToClientObjectTest2Query = {| @@ -67,6 +67,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "AstrologicalSign", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -99,7 +100,7 @@ return { "kind": "RelayResolver", "name": "name", "resolverModule": require('./../resolvers/AstrologicalSignNameResolver').name, - "path": "name" + "path": "all_astrological_signs.name" } ], "storageKey": null @@ -177,7 +178,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "AstrologicalSign", @@ -185,7 +186,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, (v0/*: any*/) ], diff --git a/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeToClientObjectTest3Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeToClientObjectTest3Query.graphql.js index 4f63c89b46640..dbbdec9c00e88 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeToClientObjectTest3Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/ClientEdgeToClientObjectTest3Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<3d9a1ffe0ea64b857ab56f2e2d080096>> + * @generated SignedSource<<34e42a80712b3fa3b413aece3c6e1ca4>> * @flow * @lightSyntaxTransform * @nogrep @@ -26,7 +26,7 @@ import {name as astrologicalSignNameResolverType} from "../resolvers/Astrologica // A type error here indicates that the type signature of the resolver module is incorrect. (astrologicalSignNameResolverType: ( rootKey: AstrologicalSignNameResolver$key, -) => mixed); +) => ?string); import {astrological_sign as userAstrologicalSignResolverType} from "../resolvers/UserAstrologicalSignResolver.js"; // Type assertion validating that `userAstrologicalSignResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. @@ -40,7 +40,7 @@ export type ClientEdgeToClientObjectTest3Query$data = {| +me: ?{| +astrological_sign: ?{| +__id: string, - +name: ?$Call<((...empty[]) => R) => R, typeof astrologicalSignNameResolverType>, + +name: ?string, +notes: ?string, |}, |}, @@ -93,6 +93,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "AstrologicalSign", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -126,7 +127,7 @@ return { "kind": "RelayResolver", "name": "name", "resolverModule": require('./../resolvers/AstrologicalSignNameResolver').name, - "path": "me.name" + "path": "me.astrological_sign.name" }, (v1/*: any*/) ], @@ -223,7 +224,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "AstrologicalSign", @@ -231,7 +232,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, (v1/*: any*/), (v2/*: any*/) diff --git a/packages/relay-runtime/store/__tests__/__generated__/DataCheckerTest4Fragment.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/DataCheckerTest4Fragment.graphql.js index 9b9aecd08a2e1..489280231051b 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/DataCheckerTest4Fragment.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/DataCheckerTest4Fragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<3f24cdc9cd3096cdcf46130f08a39290>> + * @generated SignedSource<<465e6f93772371c4d817178f3d463ad6>> * @flow * @lightSyntaxTransform * @nogrep @@ -59,10 +59,7 @@ var node/*: ReaderFragment*/ = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -99,7 +96,7 @@ var node/*: ReaderFragment*/ = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "type": "User", diff --git a/packages/relay-runtime/store/__tests__/__generated__/DataCheckerTest4Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/DataCheckerTest4Query.graphql.js index b4697d21aa957..5402279a15563 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/DataCheckerTest4Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/DataCheckerTest4Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -115,10 +115,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -156,7 +153,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "type": "User", diff --git a/packages/relay-runtime/store/__tests__/__generated__/DataCheckerTestFlightQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/DataCheckerTestFlightQuery.graphql.js deleted file mode 100644 index 9604c71722b58..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/DataCheckerTestFlightQuery.graphql.js +++ /dev/null @@ -1,175 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -// @ReactFlightServerDependency FlightComponent.server - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -export type DataCheckerTestFlightQuery$variables = {| - count: number, - id: string, -|}; -export type DataCheckerTestFlightQuery$data = {| - +node: ?{| - +flightComponent?: ?any, - |}, -|}; -export type DataCheckerTestFlightQuery = {| - response: DataCheckerTestFlightQuery$data, - variables: DataCheckerTestFlightQuery$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "count" -}, -v1 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" -}, -v2 = { - "kind": "Variable", - "name": "id", - "variableName": "id" -}, -v3 = [ - (v2/*: any*/) -], -v4 = { - "kind": "InlineFragment", - "selections": [ - { - "alias": "flightComponent", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "FlightComponent.server" - }, - { - "fields": [ - { - "kind": "Literal", - "name": "condition", - "value": true - }, - { - "kind": "Variable", - "name": "count", - "variableName": "count" - }, - (v2/*: any*/) - ], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": null - } - ], - "type": "Story", - "abstractKey": null -}; -return { - "fragment": { - "argumentDefinitions": [ - (v0/*: any*/), - (v1/*: any*/) - ], - "kind": "Fragment", - "metadata": null, - "name": "DataCheckerTestFlightQuery", - "selections": [ - { - "alias": null, - "args": (v3/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - (v4/*: any*/) - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - (v1/*: any*/), - (v0/*: any*/) - ], - "kind": "Operation", - "name": "DataCheckerTestFlightQuery", - "selections": [ - { - "alias": null, - "args": (v3/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - (v4/*: any*/), - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "8057911ba030096bf50db84986d9103b", - "id": null, - "metadata": {}, - "name": "DataCheckerTestFlightQuery", - "operationKind": "query", - "text": "query DataCheckerTestFlightQuery(\n $id: ID!\n $count: Int!\n) {\n node(id: $id) {\n __typename\n ... on Story {\n flightComponent: flight(component: \"FlightComponent.server\", props: {condition: true, count: $count, id: $id})\n }\n id\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "5deb0c7fbfc1629f14967d717067775d"; -} - -module.exports = ((node/*: any*/)/*: Query< - DataCheckerTestFlightQuery$variables, - DataCheckerTestFlightQuery$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/DataCheckerTestInnerQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/DataCheckerTestInnerQuery.graphql.js deleted file mode 100644 index e156f304826e4..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/DataCheckerTestInnerQuery.graphql.js +++ /dev/null @@ -1,139 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<631849a7ba0985c7c0ceb300fa04eb95>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -export type DataCheckerTestInnerQuery$variables = {| - id: string, -|}; -export type DataCheckerTestInnerQuery$data = {| - +node: ?{| - +name?: ?string, - |}, -|}; -export type DataCheckerTestInnerQuery = {| - response: DataCheckerTestInnerQuery$data, - variables: DataCheckerTestInnerQuery$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } -], -v1 = [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } -], -v2 = { - "kind": "InlineFragment", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - } - ], - "type": "User", - "abstractKey": null -}; -return { - "fragment": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Fragment", - "metadata": null, - "name": "DataCheckerTestInnerQuery", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - (v2/*: any*/) - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Operation", - "name": "DataCheckerTestInnerQuery", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - (v2/*: any*/), - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "e140c5016c9604edb00067693511c4ba", - "id": null, - "metadata": {}, - "name": "DataCheckerTestInnerQuery", - "operationKind": "query", - "text": "query DataCheckerTestInnerQuery(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ... on User {\n name\n }\n id\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "137eac9e94abaaa5f18959dc2f8cfc23"; -} - -module.exports = ((node/*: any*/)/*: Query< - DataCheckerTestInnerQuery$variables, - DataCheckerTestInnerQuery$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest1Query_me__client_edge.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest1Query_me__client_edge.graphql.js index d7da8c36426f0..92eceb5d67b88 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest1Query_me__client_edge.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest1Query_me__client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<1c3802a11b3c43edbd60a1310b4ae15f>> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_RelayReaderClientEdgesTest1Query_me__client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_RelayReaderClientEdgesTest1Query_me__client_edge", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest2Query_me__client_edge.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest2Query_me__client_edge.graphql.js index d99fd12252f62..317ce738a7cd1 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest2Query_me__client_edge.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest2Query_me__client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<3907c91ef6989ecfcc0f23c05c06be6b>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -45,7 +45,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_RelayReaderClientEdgesTest2Query_me__client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_RelayReaderClientEdgesTest2Query_me__client_edge", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest3Query_me__client_edge.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest3Query_me__client_edge.graphql.js index 9dd7a8f9d6a68..1a2fa3eeda1e7 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest3Query_me__client_edge.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest3Query_me__client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<8a73771292d1141c30dbe0de66b22144>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -44,7 +44,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_RelayReaderClientEdgesTest3Query_me__client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_RelayReaderClientEdgesTest3Query_me__client_edge", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest4Query_me__client_edge.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest4Query_me__client_edge.graphql.js index 4c96289085b99..5685371aae18b 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest4Query_me__client_edge.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest4Query_me__client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<6ff8e6f67c61293509ba016782dc028b>> * @flow * @lightSyntaxTransform * @nogrep @@ -55,7 +55,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_RelayReaderClientEdgesTest4Query_me__client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_RelayReaderClientEdgesTest4Query_me__client_edge", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest4Query_me__client_edge__another_client_edge.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest4Query_me__client_edge__another_client_edge.graphql.js index 62833ab725080..addf4c1bdbdec 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest4Query_me__client_edge__another_client_edge.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest4Query_me__client_edge__another_client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_RelayReaderClientEdgesTest4Query_me__client_edge__another_client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_RelayReaderClientEdgesTest4Query_me__client_edge__another_client_edge", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest5Query_me__client_extension_linked_field__client_edge.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest5Query_me__client_extension_linked_field__client_edge.graphql.js index 8d33da47ecf20..1190427b81c66 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest5Query_me__client_extension_linked_field__client_edge.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest5Query_me__client_extension_linked_field__client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<8d81f4a70ab5e158ffad5a1a1cb9bf20>> + * @generated SignedSource<<2e579079be8fbf9250e99cd159c62a8c>> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_RelayReaderClientEdgesTest5Query_me__client_extension_linked_field__client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_RelayReaderClientEdgesTest5Query_me__client_extension_linked_field__client_edge", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest6Query_me__the_alias.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest6Query_me__the_alias.graphql.js index 6cfb7a5c23057..cbec88e78dac4 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest6Query_me__the_alias.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest6Query_me__the_alias.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<370f2f8e5400766a979a7c663fa2ce6a>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_RelayReaderClientEdgesTest6Query_me__the_alias.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_RelayReaderClientEdgesTest6Query_me__the_alias", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest7Query_me__null_client_edge.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest7Query_me__null_client_edge.graphql.js index a0aff1512a9cd..43e38ec1a0070 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest7Query_me__null_client_edge.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderClientEdgesTest7Query_me__null_client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<5394b7a4bfe5d4c8b429f3b05cde6fda>> + * @generated SignedSource<<3ba1d057907a8ad61aca61235523a1b3>> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_RelayReaderClientEdgesTest7Query_me__null_client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_RelayReaderClientEdgesTest7Query_me__null_client_edge", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend.graphql.js new file mode 100644 index 0000000000000..2195a5c8b2f1e --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend.graphql.js @@ -0,0 +1,81 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ReaderFragment, RefetchableFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$fragmentType: FragmentType; +type ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$variables = any; +export type RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$data = {| + +id: string, + +name: ?string, + +$fragmentType: RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$fragmentType, +|}; +export type RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$key = { + +$data?: RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$data, + +$fragmentSpreads: RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "refetch": { + "connection": null, + "fragmentPathInResult": [ + "node" + ], + "operation": require('./ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend.graphql'), + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } + } + }, + "name": "RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "1ea17c6315e8ba285db304130201310d"; +} + +module.exports = ((node/*: any*/)/*: RefetchableFragment< + RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$fragmentType, + RefetchableClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$data, + ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend$variables, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderResolverTest24Query_me__client_edge.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderResolverTest24Query_me__client_edge.graphql.js index 95697bddf5753..fcb1dfa92c092 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderResolverTest24Query_me__client_edge.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RefetchableClientEdgeQuery_RelayReaderResolverTest24Query_me__client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<768501c48375b9cd265e1ca1f28a6be0>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_RelayReaderResolverTest24Query_me__client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_RelayReaderResolverTest24Query_me__client_edge", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery.graphql.js deleted file mode 100644 index 05fc0fd50c34d..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery.graphql.js +++ /dev/null @@ -1,178 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -// @ReactFlightServerDependency FlightComponent.server - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -export type RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery$variables = {| - count: number, - id: string, -|}; -export type RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery$data = {| - +node: ?{| - +flightComponent?: ?any, - |}, -|}; -export type RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery = {| - response: RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery$data, - variables: RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "count" -}, -v1 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" -}, -v2 = [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } -], -v3 = { - "kind": "InlineFragment", - "selections": [ - { - "alias": "flightComponent", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "FlightComponent.server" - }, - { - "fields": [ - { - "kind": "Literal", - "name": "condition", - "value": true - }, - { - "kind": "Variable", - "name": "count", - "variableName": "count" - }, - { - "kind": "Literal", - "name": "id", - "value": "x" - } - ], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": null - } - ], - "type": "Story", - "abstractKey": null -}; -return { - "fragment": { - "argumentDefinitions": [ - (v0/*: any*/), - (v1/*: any*/) - ], - "kind": "Fragment", - "metadata": null, - "name": "RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery", - "selections": [ - { - "alias": null, - "args": (v2/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - (v3/*: any*/) - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - (v1/*: any*/), - (v0/*: any*/) - ], - "kind": "Operation", - "name": "RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery", - "selections": [ - { - "alias": null, - "args": (v2/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - (v3/*: any*/), - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "3b418060ab265ecc5c7746282402c5c2", - "id": null, - "metadata": {}, - "name": "RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery", - "operationKind": "query", - "text": "query RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery(\n $id: ID!\n $count: Int!\n) {\n node(id: $id) {\n __typename\n ... on Story {\n flightComponent: flight(component: \"FlightComponent.server\", props: {condition: true, count: $count, id: \"x\"})\n }\n id\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "a8576cffa1a503ae3169c301dbe17dc9"; -} - -module.exports = ((node/*: any*/)/*: Query< - RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery$variables, - RelayModernEnvironmentExecuteMutationWithFlightTest_FlightQuery$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery.graphql.js deleted file mode 100644 index 6f745188decf1..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery.graphql.js +++ /dev/null @@ -1,139 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -export type RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery$variables = {| - id: string, -|}; -export type RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery$data = {| - +node: ?{| - +name?: ?string, - |}, -|}; -export type RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery = {| - response: RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery$data, - variables: RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } -], -v1 = [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } -], -v2 = { - "kind": "InlineFragment", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - } - ], - "type": "User", - "abstractKey": null -}; -return { - "fragment": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Fragment", - "metadata": null, - "name": "RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - (v2/*: any*/) - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Operation", - "name": "RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - (v2/*: any*/), - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "f62b1d65d77831be7e666ea04fbcf8e9", - "id": null, - "metadata": {}, - "name": "RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery", - "operationKind": "query", - "text": "query RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ... on User {\n name\n }\n id\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "09a947eb9531ea2baf54ad751801b7f8"; -} - -module.exports = ((node/*: any*/)/*: Query< - RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery$variables, - RelayModernEnvironmentExecuteMutationWithFlightTest_InnerQuery$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation.graphql.js deleted file mode 100644 index e2361edf6a903..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation.graphql.js +++ /dev/null @@ -1,192 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -// @ReactFlightServerDependency FlightComponent.server - -/*:: -import type { ConcreteRequest, Mutation } from 'relay-runtime'; -export type StoryUpdateInput = {| - body?: ?InputText, -|}; -export type InputText = {| - ranges?: ?$ReadOnlyArray, - text?: ?string, -|}; -export type RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation$variables = {| - count: number, - input: StoryUpdateInput, -|}; -export type RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation$data = {| - +storyUpdate: ?{| - +story: ?{| - +body: ?{| - +text: ?string, - |}, - +flightComponent: ?any, - +id: string, - |}, - |}, -|}; -export type RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation = {| - response: RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation$data, - variables: RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "count" -}, -v1 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "input" -}, -v2 = [ - { - "alias": null, - "args": [ - { - "kind": "Variable", - "name": "input", - "variableName": "input" - } - ], - "concreteType": "StoryUpdateResponsePayload", - "kind": "LinkedField", - "name": "storyUpdate", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "concreteType": "Story", - "kind": "LinkedField", - "name": "story", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - }, - { - "alias": null, - "args": null, - "concreteType": "Text", - "kind": "LinkedField", - "name": "body", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "text", - "storageKey": null - } - ], - "storageKey": null - }, - { - "alias": "flightComponent", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "FlightComponent.server" - }, - { - "fields": [ - { - "kind": "Literal", - "name": "condition", - "value": true - }, - { - "kind": "Variable", - "name": "count", - "variableName": "count" - }, - { - "kind": "Literal", - "name": "id", - "value": "x" - } - ], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": null - } - ], - "storageKey": null - } - ], - "storageKey": null - } -]; -return { - "fragment": { - "argumentDefinitions": [ - (v0/*: any*/), - (v1/*: any*/) - ], - "kind": "Fragment", - "metadata": null, - "name": "RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation", - "selections": (v2/*: any*/), - "type": "Mutation", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - (v1/*: any*/), - (v0/*: any*/) - ], - "kind": "Operation", - "name": "RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation", - "selections": (v2/*: any*/) - }, - "params": { - "cacheID": "bc77fdcb1d93e6598f93a1f482af7052", - "id": null, - "metadata": {}, - "name": "RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation", - "operationKind": "mutation", - "text": "mutation RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation(\n $input: StoryUpdateInput!\n $count: Int!\n) {\n storyUpdate(input: $input) {\n story {\n id\n body {\n text\n }\n flightComponent: flight(component: \"FlightComponent.server\", props: {condition: true, count: $count, id: \"x\"})\n }\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "3f3551ba1af651d056e6777df90ef1d4"; -} - -module.exports = ((node/*: any*/)/*: Mutation< - RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation$variables, - RelayModernEnvironmentExecuteMutationWithFlightTest_UpdateStoryMutation$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithMatchTestCommentFragment.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithMatchTestCommentFragment.graphql.js index 2d62c4d76204d..2cfbc7b4ed7be 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithMatchTestCommentFragment.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithMatchTestCommentFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<8145e012403cbe89a1719d24deba15af>> * @flow * @lightSyntaxTransform * @nogrep @@ -77,10 +77,7 @@ var node/*: ReaderFragment*/ = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -117,7 +114,7 @@ var node/*: ReaderFragment*/ = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "storageKey": null diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithMatchTestCommentQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithMatchTestCommentQuery.graphql.js index 695db8ec6e706..68c5c953d36f4 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithMatchTestCommentQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithMatchTestCommentQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<294c647ee395bc40905542cf5c12d28b>> + * @generated SignedSource<<971fe46e359244d6b6f6b67ef992985e>> * @flow * @lightSyntaxTransform * @nogrep @@ -134,10 +134,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -175,7 +172,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" }, (v2/*: any*/) ], diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithMatchTestCreateCommentMutation.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithMatchTestCreateCommentMutation.graphql.js index 7bd846c76e936..b1ec77c498530 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithMatchTestCreateCommentMutation.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteMutationWithMatchTestCreateCommentMutation.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<9e204ea0dbc08221d9a8b62eeeb16a70>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -81,10 +81,7 @@ v3 = [ { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], v4 = { @@ -172,7 +169,7 @@ return { (v4/*: any*/), (v5/*: any*/) ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "storageKey": null @@ -231,7 +228,7 @@ return { (v4/*: any*/), (v5/*: any*/) ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" }, (v7/*: any*/) ], diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteSubscriptionWithMatchTestCommentCreateSubscription.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteSubscriptionWithMatchTestCommentCreateSubscription.graphql.js index e7003752bfef5..024d644dc8063 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteSubscriptionWithMatchTestCommentCreateSubscription.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteSubscriptionWithMatchTestCommentCreateSubscription.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<67c7122ff0ce40058c03251d7258e9ad>> + * @generated SignedSource<<71298e8ba42d5946b77669231f1dbd1a>> * @flow * @lightSyntaxTransform * @nogrep @@ -75,10 +75,7 @@ v3 = [ { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], v4 = { @@ -166,7 +163,7 @@ return { (v4/*: any*/), (v5/*: any*/) ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "storageKey": null @@ -225,7 +222,7 @@ return { (v4/*: any*/), (v5/*: any*/) ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" }, (v7/*: any*/) ], diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteSubscriptionWithMatchTestCommentFragment.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteSubscriptionWithMatchTestCommentFragment.graphql.js index bfef698a491f6..18c7a1c10a325 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteSubscriptionWithMatchTestCommentFragment.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteSubscriptionWithMatchTestCommentFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<9a2a8da93a2831b0276225229d4ed1a5>> + * @generated SignedSource<<9b956e78be808809ccfe725ba1b108fc>> * @flow * @lightSyntaxTransform * @nogrep @@ -77,10 +77,7 @@ var node/*: ReaderFragment*/ = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -117,7 +114,7 @@ var node/*: ReaderFragment*/ = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "storageKey": null diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteSubscriptionWithMatchTestCommentQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteSubscriptionWithMatchTestCommentQuery.graphql.js index 27cc47e780e8d..be1655b720e85 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteSubscriptionWithMatchTestCommentQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteSubscriptionWithMatchTestCommentQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<37d87a29127c66506f4f587bbf3ce34b>> * @flow * @lightSyntaxTransform * @nogrep @@ -134,10 +134,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -175,7 +172,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" }, (v2/*: any*/) ], diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery.graphql.js deleted file mode 100644 index 4d8185a24eb00..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery.graphql.js +++ /dev/null @@ -1,175 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -// @ReactFlightServerDependency FlightComponent.server - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -export type RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery$variables = {| - count: number, - id: string, -|}; -export type RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery$data = {| - +node: ?{| - +flightComponent?: ?any, - |}, -|}; -export type RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery = {| - response: RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery$data, - variables: RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "count" -}, -v1 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" -}, -v2 = { - "kind": "Variable", - "name": "id", - "variableName": "id" -}, -v3 = [ - (v2/*: any*/) -], -v4 = { - "kind": "InlineFragment", - "selections": [ - { - "alias": "flightComponent", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "FlightComponent.server" - }, - { - "fields": [ - { - "kind": "Literal", - "name": "condition", - "value": true - }, - { - "kind": "Variable", - "name": "count", - "variableName": "count" - }, - (v2/*: any*/) - ], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": null - } - ], - "type": "Story", - "abstractKey": null -}; -return { - "fragment": { - "argumentDefinitions": [ - (v0/*: any*/), - (v1/*: any*/) - ], - "kind": "Fragment", - "metadata": null, - "name": "RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery", - "selections": [ - { - "alias": null, - "args": (v3/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - (v4/*: any*/) - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - (v1/*: any*/), - (v0/*: any*/) - ], - "kind": "Operation", - "name": "RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery", - "selections": [ - { - "alias": null, - "args": (v3/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - (v4/*: any*/), - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "a8934f5931bb2ca91455943e0eba59be", - "id": null, - "metadata": {}, - "name": "RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery", - "operationKind": "query", - "text": "query RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery(\n $id: ID!\n $count: Int!\n) {\n node(id: $id) {\n __typename\n ... on Story {\n flightComponent: flight(component: \"FlightComponent.server\", props: {condition: true, count: $count, id: $id})\n }\n id\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "af935db133f1abf1a578d02f3bb73db0"; -} - -module.exports = ((node/*: any*/)/*: Query< - RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery$variables, - RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestFlightQuery$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery.graphql.js deleted file mode 100644 index 05f2f32ed32c2..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery.graphql.js +++ /dev/null @@ -1,136 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<87c42af271f7245ab44a8a837d3829a9>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -// @ReactFlightClientDependency RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$normalization.graphql - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$fragmentType } from "./RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment.graphql"; -export type RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery$variables = {| - id: string, -|}; -export type RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery$data = {| - +node: ?{| - +$fragmentSpreads: RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$fragmentType, - |}, -|}; -export type RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery = {| - response: RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery$data, - variables: RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } -], -v1 = [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } -]; -return { - "fragment": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Fragment", - "metadata": null, - "name": "RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "args": null, - "kind": "FragmentSpread", - "name": "RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment" - } - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Operation", - "name": "RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - { - "args": null, - "fragment": require('./RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$normalization.graphql'), - "kind": "ClientComponent" - }, - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "16a3c110f5321eb844d02c0421a2e253", - "id": null, - "metadata": {}, - "name": "RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery", - "operationKind": "query", - "text": "query RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ...RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment @relay_client_component_server(module_id: \"RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$normalization.graphql\")\n id\n }\n}\n\nfragment RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment on Story {\n name\n body {\n text\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "337f43907f43a7d61eed932a0bf7f0d2"; -} - -module.exports = ((node/*: any*/)/*: Query< - RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery$variables, - RelayModernEnvironmentExecuteWithFlightAndClientFragmentTestInnerQuery$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$normalization.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$normalization.graphql.js deleted file mode 100644 index 4f519cf5a0c23..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$normalization.graphql.js +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<2a4ecd388a80e42c9f408c071fdedd24>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { NormalizationSplitOperation } from 'relay-runtime'; - -*/ - -var node/*: NormalizationSplitOperation*/ = { - "kind": "SplitOperation", - "metadata": {}, - "name": "RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$normalization", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - }, - { - "alias": null, - "args": null, - "concreteType": "Text", - "kind": "LinkedField", - "name": "body", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "text", - "storageKey": null - } - ], - "storageKey": null - } - ] -}; - -if (__DEV__) { - (node/*: any*/).hash = "ea85bfcba4ce6cf4e091669c406f440d"; -} - -module.exports = node; diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment.graphql.js deleted file mode 100644 index af2143cd89d19..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment.graphql.js +++ /dev/null @@ -1,80 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { Fragment, ReaderFragment } from 'relay-runtime'; -import type { FragmentType } from "relay-runtime"; -declare export opaque type RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$fragmentType: FragmentType; -export type RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$data = {| - +body: ?{| - +text: ?string, - |}, - +name: ?string, - +$fragmentType: RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$fragmentType, -|}; -export type RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$key = { - +$data?: RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$data, - +$fragmentSpreads: RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$fragmentType, - ... -}; -*/ - -var node/*: ReaderFragment*/ = { - "argumentDefinitions": [], - "kind": "Fragment", - "metadata": null, - "name": "RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - }, - { - "alias": null, - "args": null, - "concreteType": "Text", - "kind": "LinkedField", - "name": "body", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "text", - "storageKey": null - } - ], - "storageKey": null - } - ], - "type": "Story", - "abstractKey": null -}; - -if (__DEV__) { - (node/*: any*/).hash = "ea85bfcba4ce6cf4e091669c406f440d"; -} - -module.exports = ((node/*: any*/)/*: Fragment< - RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$fragmentType, - RelayModernEnvironmentExecuteWithFlightAndClientFragmentTest_clientFragment$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightTestFlightQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightTestFlightQuery.graphql.js deleted file mode 100644 index ad13c18d0c300..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightTestFlightQuery.graphql.js +++ /dev/null @@ -1,175 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<865a3d04a44bce22cbc1815bfad2f209>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -// @ReactFlightServerDependency FlightComponent.server - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -export type RelayModernEnvironmentExecuteWithFlightTestFlightQuery$variables = {| - count: number, - id: string, -|}; -export type RelayModernEnvironmentExecuteWithFlightTestFlightQuery$data = {| - +node: ?{| - +flightComponent?: ?any, - |}, -|}; -export type RelayModernEnvironmentExecuteWithFlightTestFlightQuery = {| - response: RelayModernEnvironmentExecuteWithFlightTestFlightQuery$data, - variables: RelayModernEnvironmentExecuteWithFlightTestFlightQuery$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "count" -}, -v1 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" -}, -v2 = { - "kind": "Variable", - "name": "id", - "variableName": "id" -}, -v3 = [ - (v2/*: any*/) -], -v4 = { - "kind": "InlineFragment", - "selections": [ - { - "alias": "flightComponent", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "FlightComponent.server" - }, - { - "fields": [ - { - "kind": "Literal", - "name": "condition", - "value": true - }, - { - "kind": "Variable", - "name": "count", - "variableName": "count" - }, - (v2/*: any*/) - ], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": null - } - ], - "type": "Story", - "abstractKey": null -}; -return { - "fragment": { - "argumentDefinitions": [ - (v0/*: any*/), - (v1/*: any*/) - ], - "kind": "Fragment", - "metadata": null, - "name": "RelayModernEnvironmentExecuteWithFlightTestFlightQuery", - "selections": [ - { - "alias": null, - "args": (v3/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - (v4/*: any*/) - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - (v1/*: any*/), - (v0/*: any*/) - ], - "kind": "Operation", - "name": "RelayModernEnvironmentExecuteWithFlightTestFlightQuery", - "selections": [ - { - "alias": null, - "args": (v3/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - (v4/*: any*/), - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "6253e6b9124de07a9d1fa7f9a8274790", - "id": null, - "metadata": {}, - "name": "RelayModernEnvironmentExecuteWithFlightTestFlightQuery", - "operationKind": "query", - "text": "query RelayModernEnvironmentExecuteWithFlightTestFlightQuery(\n $id: ID!\n $count: Int!\n) {\n node(id: $id) {\n __typename\n ... on Story {\n flightComponent: flight(component: \"FlightComponent.server\", props: {condition: true, count: $count, id: $id})\n }\n id\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "dd0fa7d8034b2f3bba51ca36fbec30b0"; -} - -module.exports = ((node/*: any*/)/*: Query< - RelayModernEnvironmentExecuteWithFlightTestFlightQuery$variables, - RelayModernEnvironmentExecuteWithFlightTestFlightQuery$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightTestInnerQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightTestInnerQuery.graphql.js deleted file mode 100644 index 3c4e6c81be9df..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithFlightTestInnerQuery.graphql.js +++ /dev/null @@ -1,139 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<0872b54bdd20677a900cb703b45c000d>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -export type RelayModernEnvironmentExecuteWithFlightTestInnerQuery$variables = {| - id: string, -|}; -export type RelayModernEnvironmentExecuteWithFlightTestInnerQuery$data = {| - +node: ?{| - +name?: ?string, - |}, -|}; -export type RelayModernEnvironmentExecuteWithFlightTestInnerQuery = {| - response: RelayModernEnvironmentExecuteWithFlightTestInnerQuery$data, - variables: RelayModernEnvironmentExecuteWithFlightTestInnerQuery$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } -], -v1 = [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } -], -v2 = { - "kind": "InlineFragment", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - } - ], - "type": "User", - "abstractKey": null -}; -return { - "fragment": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Fragment", - "metadata": null, - "name": "RelayModernEnvironmentExecuteWithFlightTestInnerQuery", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - (v2/*: any*/) - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Operation", - "name": "RelayModernEnvironmentExecuteWithFlightTestInnerQuery", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - (v2/*: any*/), - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "5bcfc11827ad90de437d3ab08d3382b1", - "id": null, - "metadata": {}, - "name": "RelayModernEnvironmentExecuteWithFlightTestInnerQuery", - "operationKind": "query", - "text": "query RelayModernEnvironmentExecuteWithFlightTestInnerQuery(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ... on User {\n name\n }\n id\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "b2fdcec723be1551d06257004e593265"; -} - -module.exports = ((node/*: any*/)/*: Query< - RelayModernEnvironmentExecuteWithFlightTestInnerQuery$variables, - RelayModernEnvironmentExecuteWithFlightTestInnerQuery$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithMatchAdditionalArgumentsTestUserQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithMatchAdditionalArgumentsTestUserQuery.graphql.js index c5810e1317402..5e13e3e4a9378 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithMatchAdditionalArgumentsTestUserQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithMatchAdditionalArgumentsTestUserQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<2d2a28f571edf837da821165bfdcf52c>> * @flow * @lightSyntaxTransform * @nogrep @@ -64,10 +64,7 @@ v2 = [ { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], v3 = { @@ -134,7 +131,7 @@ return { (v3/*: any*/), (v4/*: any*/) ], - "storageKey": "nameRendererForContext(context:\"HEADER\",supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRendererForContext(context:\"HEADER\",supported:\"34hjiS\")" } ], "type": "User", @@ -177,7 +174,7 @@ return { (v3/*: any*/), (v4/*: any*/) ], - "storageKey": "nameRendererForContext(context:\"HEADER\",supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRendererForContext(context:\"HEADER\",supported:\"34hjiS\")" } ], "type": "User", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithMatchTestUserQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithMatchTestUserQuery.graphql.js index 084dafb4d3d07..7f7ce38ec7a17 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithMatchTestUserQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithMatchTestUserQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<8e085c9ac99f9d314340b1ce705014f6>> * @flow * @lightSyntaxTransform * @nogrep @@ -81,10 +81,7 @@ v3 = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -122,7 +119,7 @@ v3 = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "type": "User", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithNestedMatchTestMarkdownUserNameRenderer_name$normalization.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithNestedMatchTestMarkdownUserNameRenderer_name$normalization.graphql.js index ca02efaa558d3..9d8627ead8803 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithNestedMatchTestMarkdownUserNameRenderer_name$normalization.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithNestedMatchTestMarkdownUserNameRenderer_name$normalization.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<91df6f6dbf9fc313ebed5459e6696e37>> + * @generated SignedSource<<2d52cbc03adc3b93cb6d157bf0ddddb2>> * @flow * @lightSyntaxTransform * @nogrep @@ -91,9 +91,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer" - ] + "value": "1AwQS7" } ], "concreteType": null, @@ -117,7 +115,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"1AwQS7\")" }, (v1/*: any*/) ], diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithNestedMatchTestMarkdownUserNameRenderer_name.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithNestedMatchTestMarkdownUserNameRenderer_name.graphql.js index 8758136520cf9..283a9b7cb9794 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithNestedMatchTestMarkdownUserNameRenderer_name.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithNestedMatchTestMarkdownUserNameRenderer_name.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<0d2dce9d1800623115e25385d93c91d4>> * @flow * @lightSyntaxTransform * @nogrep @@ -97,9 +97,7 @@ var node/*: ReaderFragment*/ = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer" - ] + "value": "1AwQS7" } ], "concreteType": null, @@ -122,7 +120,7 @@ var node/*: ReaderFragment*/ = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"1AwQS7\")" } ], "storageKey": null diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithNestedMatchTestUserQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithNestedMatchTestUserQuery.graphql.js index 20394ab18e1fd..24f4c8cbb5240 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithNestedMatchTestUserQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithNestedMatchTestUserQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<8b4c425eb635d0959a7f528f7ed305c5>> + * @generated SignedSource<<0d6ec96dbc33423bb574c16adb4e3be9>> * @flow * @lightSyntaxTransform * @nogrep @@ -59,9 +59,7 @@ v2 = [ { "kind": "Literal", "name": "supported", - "value": [ - "MarkdownUserNameRenderer" - ] + "value": "2aTHRe" } ], v3 = { @@ -113,7 +111,7 @@ return { "selections": [ (v3/*: any*/) ], - "storageKey": "nameRenderer(supported:[\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"2aTHRe\")" } ], "type": "User", @@ -155,7 +153,7 @@ return { (v4/*: any*/), (v3/*: any*/) ], - "storageKey": "nameRenderer(supported:[\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"2aTHRe\")" } ], "type": "User", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithPluralMatchTestUserQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithPluralMatchTestUserQuery.graphql.js index 3d4edbe388020..77fbccae0d831 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithPluralMatchTestUserQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithPluralMatchTestUserQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<2876a51e0e7653e5855aacebb73555c7>> * @flow * @lightSyntaxTransform * @nogrep @@ -59,10 +59,7 @@ v2 = [ { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], v3 = { @@ -129,7 +126,7 @@ return { (v3/*: any*/), (v4/*: any*/) ], - "storageKey": "nameRenderers(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderers(supported:\"34hjiS\")" } ], "type": "User", @@ -172,7 +169,7 @@ return { (v3/*: any*/), (v4/*: any*/) ], - "storageKey": "nameRenderers(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderers(supported:\"34hjiS\")" } ], "type": "User", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgManyFragmentsQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgManyFragmentsQuery.graphql.js index 51d2c8413d02b..8fd77d65ae7f1 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgManyFragmentsQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgManyFragmentsQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<859dd5aabd816b54e16e217f0b1004c3>> + * @generated SignedSource<<6713b0a8e08ed95789878b165ffac985>> * @flow * @lightSyntaxTransform * @nogrep @@ -33,21 +33,19 @@ export type RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgManyFra response: RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgManyFragmentsQuery$data, variables: RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgManyFragmentsQuery$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider'), + "__relay_internal__pv__RelayProvider_pictureScalerelayprovider": require('./../RelayProvider_pictureScale.relayprovider') +}: {| +__relay_internal__pv__RelayProvider_pictureScalerelayprovider: {| +get: () => number, |}, +__relay_internal__pv__RelayProvider_returnsTruerelayprovider: {| +get: () => boolean, |}, -|}; +|}); */ -var providedVariablesDefinition/*: ProvidedVariablesType*/ = { - "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider'), - "__relay_internal__pv__RelayProvider_pictureScalerelayprovider": require('./../RelayProvider_pictureScale.relayprovider') -}; - var node/*: ConcreteRequest*/ = (function(){ var v0 = { "defaultValue": null, @@ -229,7 +227,10 @@ return { "name": "RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgManyFragmentsQuery", "operationKind": "query", "text": "query RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgManyFragmentsQuery(\n $id: ID!\n $__relay_internal__pv__RelayProvider_returnsTruerelayprovider: Boolean!\n $__relay_internal__pv__RelayProvider_pictureScalerelayprovider: Float!\n) {\n node(id: $id) {\n __typename\n ...RelayModernEnvironmentExecuteWithProvidedVariableTest_profile1\n ...RelayModernEnvironmentExecuteWithProvidedVariableTest_profile2\n ...RelayModernEnvironmentExecuteWithProvidedVariableTest_profile3\n id\n }\n}\n\nfragment RelayModernEnvironmentExecuteWithProvidedVariableTest_profile1 on User {\n id\n name @include(if: $__relay_internal__pv__RelayProvider_returnsTruerelayprovider)\n username @skip(if: $__relay_internal__pv__RelayProvider_returnsTruerelayprovider)\n profilePicture {\n uri\n }\n}\n\nfragment RelayModernEnvironmentExecuteWithProvidedVariableTest_profile2 on User {\n name @include(if: $__relay_internal__pv__RelayProvider_returnsTruerelayprovider)\n alternate_name @include(if: $__relay_internal__pv__RelayProvider_returnsTruerelayprovider)\n}\n\nfragment RelayModernEnvironmentExecuteWithProvidedVariableTest_profile3 on User {\n profile_picture(scale: $__relay_internal__pv__RelayProvider_pictureScalerelayprovider) {\n uri\n }\n}\n", - "providedVariables": providedVariablesDefinition + "providedVariables": { + "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider'), + "__relay_internal__pv__RelayProvider_pictureScalerelayprovider": require('./../RelayProvider_pictureScale.relayprovider') + } } }; })(); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgSingleFragmentQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgSingleFragmentQuery.graphql.js index e3bdd40e70445..f426f2dc21593 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgSingleFragmentQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgSingleFragmentQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<202e4fac8401882f52063c14c4da0afc>> * @flow * @lightSyntaxTransform * @nogrep @@ -31,17 +31,15 @@ export type RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgSingleF response: RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgSingleFragmentQuery$data, variables: RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgSingleFragmentQuery$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider') +}: {| +__relay_internal__pv__RelayProvider_returnsTruerelayprovider: {| +get: () => boolean, |}, -|}; +|}); */ -var providedVariablesDefinition/*: ProvidedVariablesType*/ = { - "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider') -}; - var node/*: ConcreteRequest*/ = (function(){ var v0 = { "defaultValue": null, @@ -184,7 +182,9 @@ return { "name": "RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgSingleFragmentQuery", "operationKind": "query", "text": "query RelayModernEnvironmentExecuteWithProvidedVariableTest_UserArgSingleFragmentQuery(\n $id: ID!\n $__relay_internal__pv__RelayProvider_returnsTruerelayprovider: Boolean!\n) {\n node(id: $id) {\n __typename\n ...RelayModernEnvironmentExecuteWithProvidedVariableTest_profile1\n id\n }\n}\n\nfragment RelayModernEnvironmentExecuteWithProvidedVariableTest_profile1 on User {\n id\n name @include(if: $__relay_internal__pv__RelayProvider_returnsTruerelayprovider)\n username @skip(if: $__relay_internal__pv__RelayProvider_returnsTruerelayprovider)\n profilePicture {\n uri\n }\n}\n", - "providedVariables": providedVariablesDefinition + "providedVariables": { + "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider') + } } }; })(); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery.graphql.js deleted file mode 100644 index 84b9e9b93596e..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery.graphql.js +++ /dev/null @@ -1,136 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<7eb01d6195f1c4b41ba3bc4131c4d516>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -// @ReactFlightClientDependency RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$normalization.graphql - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$fragmentType } from "./RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment.graphql"; -export type RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery$variables = {| - id: string, -|}; -export type RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery$data = {| - +node: ?{| - +$fragmentSpreads: RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$fragmentType, - |}, -|}; -export type RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery = {| - response: RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery$data, - variables: RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } -], -v1 = [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } -]; -return { - "fragment": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Fragment", - "metadata": null, - "name": "RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "args": null, - "kind": "FragmentSpread", - "name": "RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment" - } - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Operation", - "name": "RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - { - "args": null, - "fragment": require('./RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$normalization.graphql'), - "kind": "ClientComponent" - }, - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "07ae87a9f65ccf4bcb38dbf960d82515", - "id": null, - "metadata": {}, - "name": "RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery", - "operationKind": "query", - "text": "query RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ...RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment @relay_client_component_server(module_id: \"RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$normalization.graphql\")\n id\n }\n}\n\nfragment RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment on Story {\n name\n body {\n text\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "7bf230bc939d43ce10e55e7a2cc0af8e"; -} - -module.exports = ((node/*: any*/)/*: Query< - RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery$variables, - RelayModernEnvironmentExecuteWithRelayClientComponentTestQuery$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$normalization.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$normalization.graphql.js deleted file mode 100644 index 53182decfc1fd..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$normalization.graphql.js +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<6ce75d78dbb2fa9b746fe370d9a6dfb6>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { NormalizationSplitOperation } from 'relay-runtime'; - -*/ - -var node/*: NormalizationSplitOperation*/ = { - "kind": "SplitOperation", - "metadata": {}, - "name": "RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$normalization", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - }, - { - "alias": null, - "args": null, - "concreteType": "Text", - "kind": "LinkedField", - "name": "body", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "text", - "storageKey": null - } - ], - "storageKey": null - } - ] -}; - -if (__DEV__) { - (node/*: any*/).hash = "18679cc241c9b27229ab29e32aad5597"; -} - -module.exports = node; diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment.graphql.js deleted file mode 100644 index 1dd08738a58c6..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment.graphql.js +++ /dev/null @@ -1,80 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<299d46278318ffb9f9ae62c8ede05c33>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { Fragment, ReaderFragment } from 'relay-runtime'; -import type { FragmentType } from "relay-runtime"; -declare export opaque type RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$fragmentType: FragmentType; -export type RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$data = {| - +body: ?{| - +text: ?string, - |}, - +name: ?string, - +$fragmentType: RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$fragmentType, -|}; -export type RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$key = { - +$data?: RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$data, - +$fragmentSpreads: RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$fragmentType, - ... -}; -*/ - -var node/*: ReaderFragment*/ = { - "argumentDefinitions": [], - "kind": "Fragment", - "metadata": null, - "name": "RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - }, - { - "alias": null, - "args": null, - "concreteType": "Text", - "kind": "LinkedField", - "name": "body", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "text", - "storageKey": null - } - ], - "storageKey": null - } - ], - "type": "Story", - "abstractKey": null -}; - -if (__DEV__) { - (node/*: any*/).hash = "18679cc241c9b27229ab29e32aad5597"; -} - -module.exports = ((node/*: any*/)/*: Fragment< - RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$fragmentType, - RelayModernEnvironmentExecuteWithRelayClientComponentTest_clientFragment$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithSiblingAndNestedModuleTestUserQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithSiblingAndNestedModuleTestUserQuery.graphql.js index 7d0e59cbe1225..3caec2c49d57c 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithSiblingAndNestedModuleTestUserQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentExecuteWithSiblingAndNestedModuleTestUserQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -66,9 +66,7 @@ v2 = [ { "kind": "Literal", "name": "supported", - "value": [ - "MarkdownUserNameRenderer" - ] + "value": "2aTHRe" } ], v3 = { @@ -89,9 +87,7 @@ v4 = [ { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer" - ] + "value": "1AwQS7" } ], v5 = { @@ -143,7 +139,7 @@ return { "selections": [ (v3/*: any*/) ], - "storageKey": "nameRenderer(supported:[\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"2aTHRe\")" }, { "alias": "outerRendererB", @@ -155,7 +151,7 @@ return { "selections": [ (v5/*: any*/) ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"1AwQS7\")" } ], "type": "User", @@ -197,7 +193,7 @@ return { (v6/*: any*/), (v3/*: any*/) ], - "storageKey": "nameRenderer(supported:[\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"2aTHRe\")" }, { "alias": "outerRendererB", @@ -210,7 +206,7 @@ return { (v6/*: any*/), (v5/*: any*/) ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"1AwQS7\")" } ], "type": "User", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentLookupTestQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentLookupTestQuery.graphql.js index f28105e77262d..07624e041f200 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentLookupTestQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentLookupTestQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<6468af404e3077989cac236c8be0838f>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -123,9 +123,7 @@ v4 = [ { "kind": "Literal", "name": "supported", - "value": [ - "PlainCommentBody" - ] + "value": "2Rll6p" } ], "concreteType": null, @@ -164,7 +162,7 @@ v4 = [ }, (v3/*: any*/) ], - "storageKey": "commentBody(supported:[\"PlainCommentBody\"])" + "storageKey": "commentBody(supported:\"2Rll6p\")" } ], "type": "Comment", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTest1Mutation.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTest1Mutation.graphql.js index 30008cbf3f67a..7898187b4fd28 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTest1Mutation.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTest1Mutation.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<0d585e4fcf0408c51ad2551e13a03a93>> + * @generated SignedSource<<4ed5f99cf70e4f1f44b9cc579229be03>> * @flow * @lightSyntaxTransform * @nogrep @@ -44,6 +44,7 @@ export type RelayModernEnvironmentWithOperationTrackerTest1Mutation$data = {| +text: ?string, |}, +id: string, + +lastName: ?string, |}, |}, |}; @@ -123,6 +124,13 @@ v3 = [ "plural": false, "selections": [ (v1/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "lastName", + "storageKey": null + }, { "alias": null, "args": null, @@ -176,7 +184,7 @@ return { "selections": (v3/*: any*/) }, "params": { - "cacheID": "33e7acb2141a8b4cc30d8eb08feb28b2", + "cacheID": "bc860715cdbb5c16bfec84cccf10eb67", "id": null, "metadata": { "relayTestingSelectionTypeInfo": { @@ -203,18 +211,19 @@ return { }, "commentCreate.feedback.body": (v5/*: any*/), "commentCreate.feedback.body.text": (v6/*: any*/), - "commentCreate.feedback.id": (v4/*: any*/) + "commentCreate.feedback.id": (v4/*: any*/), + "commentCreate.feedback.lastName": (v6/*: any*/) } }, "name": "RelayModernEnvironmentWithOperationTrackerTest1Mutation", "operationKind": "mutation", - "text": "mutation RelayModernEnvironmentWithOperationTrackerTest1Mutation(\n $input: CommentCreateInput\n) {\n commentCreate(input: $input) {\n comment {\n id\n message {\n text\n }\n }\n feedback {\n id\n body {\n text\n }\n }\n }\n}\n" + "text": "mutation RelayModernEnvironmentWithOperationTrackerTest1Mutation(\n $input: CommentCreateInput\n) {\n commentCreate(input: $input) {\n comment {\n id\n message {\n text\n }\n }\n feedback {\n id\n lastName\n body {\n text\n }\n }\n }\n}\n" } }; })(); if (__DEV__) { - (node/*: any*/).hash = "191ed594a345f64de3ccd4b8bc51e924"; + (node/*: any*/).hash = "6a10ff9c1fc045181ae2f8edcaf0e88a"; } module.exports = ((node/*: any*/)/*: Mutation< diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment.graphql.js index 975571380a1dc..e8f349f0c5350 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTestFeedbackFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<64502aba61fde822f8bd4288691ff4bf>> * @flow * @lightSyntaxTransform * @nogrep @@ -104,10 +104,7 @@ var node/*: ReaderFragment*/ = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -144,7 +141,7 @@ var node/*: ReaderFragment*/ = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" }, { "alias": "plainNameRenderer", @@ -152,9 +149,7 @@ var node/*: ReaderFragment*/ = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer" - ] + "value": "1AwQS7" } ], "concreteType": null, @@ -177,7 +172,7 @@ var node/*: ReaderFragment*/ = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"1AwQS7\")" } ], "storageKey": null diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTestMutation.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTestMutation.graphql.js index c0488ae0ee9bc..1adcfc888d464 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTestMutation.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTestMutation.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<1225f379460bdfccffc2728a23932428>> * @flow * @lightSyntaxTransform * @nogrep @@ -213,10 +213,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -254,7 +251,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" }, { "alias": "plainNameRenderer", @@ -262,9 +259,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer" - ] + "value": "1AwQS7" } ], "concreteType": null, @@ -288,7 +283,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"1AwQS7\")" }, (v2/*: any*/) ], diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTestQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTestQuery.graphql.js index c0cc3da0b52a5..badc8f111ad23 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTestQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayModernEnvironmentWithOperationTrackerTestQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<7b4c058a465de1a10f9a9ed50272696d>> + * @generated SignedSource<<0bd821834d2425699b7cf0ece8c733ff>> * @flow * @lightSyntaxTransform * @nogrep @@ -186,10 +186,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -227,7 +224,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" }, { "alias": "plainNameRenderer", @@ -235,9 +232,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer" - ] + "value": "1AwQS7" } ], "concreteType": null, @@ -261,7 +256,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"1AwQS7\")" }, (v3/*: any*/) ], diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTest2Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTest2Query.graphql.js index 7f0852307a46f..fb6c78ebb8113 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTest2Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTest2Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<1cfb508e2d0094231d72d3244e935bf8>> + * @generated SignedSource<<6f49d4bee95cf370371492f2bb49b315>> * @flow * @lightSyntaxTransform * @nogrep @@ -22,7 +22,7 @@ import type { RelayReaderAliasedFragmentsTest_user$fragmentType } from "./RelayR export type RelayReaderAliasedFragmentsTest2Query$variables = {||}; export type RelayReaderAliasedFragmentsTest2Query$data = {| +me: ?{| - +aliased_fragment: ?{| + +aliased_fragment: {| +$fragmentSpreads: RelayReaderAliasedFragmentsTest_user$fragmentType, |}, |}, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery.graphql.js new file mode 100644 index 0000000000000..395ab372d5a77 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery.graphql.js @@ -0,0 +1,127 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<592fb071da6a8c9365872feb31ecd07e>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery$variables = {||}; +export type RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery$data = {| + +me: ?{| + +aliased_fragment: ?{| + +name: string, + |}, + |}, +|}; +export type RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery = {| + response: RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery$data, + variables: RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "kind": "RequiredField", + "field": (v0/*: any*/), + "action": "NONE", + "path": "me.aliased_fragment.name" + } + ], + "type": null, + "abstractKey": null + }, + "kind": "AliasedInlineFragmentSpread", + "name": "aliased_fragment" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "4cc950c6ab6ecce0af2e58e9ebbe882a", + "id": null, + "metadata": {}, + "name": "RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery", + "operationKind": "query", + "text": "query RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery {\n me {\n name\n id\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "2e11b67dd1bbc6f668f878ea40e358e7"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery$variables, + RelayReaderAliasedFragmentsTestAliasedInlineFragmentWithoutTypeConditionQuery$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestConditional2Fragment.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestConditional2Fragment.graphql.js new file mode 100644 index 0000000000000..2a18652cbf21c --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestConditional2Fragment.graphql.js @@ -0,0 +1,59 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<5d5b904f8dd5ea05c5da736e5c9344eb>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayReaderAliasedFragmentsTestConditional2Fragment$fragmentType: FragmentType; +export type RelayReaderAliasedFragmentsTestConditional2Fragment$data = {| + +name: ?string, + +$fragmentType: RelayReaderAliasedFragmentsTestConditional2Fragment$fragmentType, +|}; +export type RelayReaderAliasedFragmentsTestConditional2Fragment$key = { + +$data?: RelayReaderAliasedFragmentsTestConditional2Fragment$data, + +$fragmentSpreads: RelayReaderAliasedFragmentsTestConditional2Fragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderAliasedFragmentsTestConditional2Fragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "da47c7d96012f84b35aa45d5fb33b715"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayReaderAliasedFragmentsTestConditional2Fragment$fragmentType, + RelayReaderAliasedFragmentsTestConditional2Fragment$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestConditional2Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestConditional2Query.graphql.js new file mode 100644 index 0000000000000..186bb76860bf4 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestConditional2Query.graphql.js @@ -0,0 +1,144 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<6b55c2cbb0376b54d4c943aa34563477>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RelayReaderAliasedFragmentsTestConditionalFragment$fragmentType } from "./RelayReaderAliasedFragmentsTestConditionalFragment.graphql"; +export type RelayReaderAliasedFragmentsTestConditional2Query$variables = {| + someCondition: boolean, +|}; +export type RelayReaderAliasedFragmentsTestConditional2Query$data = {| + +me: ?{| + +aliased_fragment?: ?{| + +$fragmentSpreads: RelayReaderAliasedFragmentsTestConditionalFragment$fragmentType, + |}, + |}, +|}; +export type RelayReaderAliasedFragmentsTestConditional2Query = {| + response: RelayReaderAliasedFragmentsTestConditional2Query$data, + variables: RelayReaderAliasedFragmentsTestConditional2Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "someCondition" + } +]; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderAliasedFragmentsTestConditional2Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "condition": "someCondition", + "kind": "Condition", + "passingValue": false, + "selections": [ + { + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "RelayReaderAliasedFragmentsTestConditionalFragment" + }, + "kind": "AliasedFragmentSpread", + "name": "aliased_fragment", + "type": "User", + "abstractKey": null + } + ] + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayReaderAliasedFragmentsTestConditional2Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "condition": "someCondition", + "kind": "Condition", + "passingValue": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ] + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "e8cfb317c8a012b383b7012a00d99efd", + "id": null, + "metadata": {}, + "name": "RelayReaderAliasedFragmentsTestConditional2Query", + "operationKind": "query", + "text": "query RelayReaderAliasedFragmentsTestConditional2Query(\n $someCondition: Boolean!\n) {\n me {\n ...RelayReaderAliasedFragmentsTestConditionalFragment @skip(if: $someCondition)\n id\n }\n}\n\nfragment RelayReaderAliasedFragmentsTestConditionalFragment on User {\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "74cc4a7c44594fd9bd8ba6862c83a664"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderAliasedFragmentsTestConditional2Query$variables, + RelayReaderAliasedFragmentsTestConditional2Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestConditionalFragment.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestConditionalFragment.graphql.js new file mode 100644 index 0000000000000..277b2e94c31ef --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestConditionalFragment.graphql.js @@ -0,0 +1,59 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<43f61d60d326a9969d11c793eb4e018d>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayReaderAliasedFragmentsTestConditionalFragment$fragmentType: FragmentType; +export type RelayReaderAliasedFragmentsTestConditionalFragment$data = {| + +name: ?string, + +$fragmentType: RelayReaderAliasedFragmentsTestConditionalFragment$fragmentType, +|}; +export type RelayReaderAliasedFragmentsTestConditionalFragment$key = { + +$data?: RelayReaderAliasedFragmentsTestConditionalFragment$data, + +$fragmentSpreads: RelayReaderAliasedFragmentsTestConditionalFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderAliasedFragmentsTestConditionalFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "12db2e04f1efe5abcf500fbf98c4748f"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayReaderAliasedFragmentsTestConditionalFragment$fragmentType, + RelayReaderAliasedFragmentsTestConditionalFragment$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestConditionalQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestConditionalQuery.graphql.js new file mode 100644 index 0000000000000..d81467d1edee7 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderAliasedFragmentsTestConditionalQuery.graphql.js @@ -0,0 +1,144 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RelayReaderAliasedFragmentsTestConditionalFragment$fragmentType } from "./RelayReaderAliasedFragmentsTestConditionalFragment.graphql"; +export type RelayReaderAliasedFragmentsTestConditionalQuery$variables = {| + someCondition: boolean, +|}; +export type RelayReaderAliasedFragmentsTestConditionalQuery$data = {| + +me: ?{| + +aliased_fragment?: ?{| + +$fragmentSpreads: RelayReaderAliasedFragmentsTestConditionalFragment$fragmentType, + |}, + |}, +|}; +export type RelayReaderAliasedFragmentsTestConditionalQuery = {| + response: RelayReaderAliasedFragmentsTestConditionalQuery$data, + variables: RelayReaderAliasedFragmentsTestConditionalQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "someCondition" + } +]; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderAliasedFragmentsTestConditionalQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "condition": "someCondition", + "kind": "Condition", + "passingValue": false, + "selections": [ + { + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "RelayReaderAliasedFragmentsTestConditionalFragment" + }, + "kind": "AliasedFragmentSpread", + "name": "aliased_fragment", + "type": "User", + "abstractKey": null + } + ] + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayReaderAliasedFragmentsTestConditionalQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "condition": "someCondition", + "kind": "Condition", + "passingValue": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ] + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "f5ba044f54f62ff8f59612bd2e688f9b", + "id": null, + "metadata": {}, + "name": "RelayReaderAliasedFragmentsTestConditionalQuery", + "operationKind": "query", + "text": "query RelayReaderAliasedFragmentsTestConditionalQuery(\n $someCondition: Boolean!\n) {\n me {\n ...RelayReaderAliasedFragmentsTestConditionalFragment @skip(if: $someCondition)\n id\n }\n}\n\nfragment RelayReaderAliasedFragmentsTestConditionalFragment on User {\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "8a983bb43381d44133495544bf14d58a"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderAliasedFragmentsTestConditionalQuery$variables, + RelayReaderAliasedFragmentsTestConditionalQuery$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderClientEdgesTest4Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderClientEdgesTest4Query.graphql.js index d9f2b28c66f7e..1d8c95750d9e3 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderClientEdgesTest4Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderClientEdgesTest4Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<25ae7fc4adf755df6b05db0fd9b189b2>> + * @generated SignedSource<<75f61164d73fa4c9cafabd8f73cae583>> * @flow * @lightSyntaxTransform * @nogrep @@ -118,7 +118,7 @@ return { "kind": "RelayResolver", "name": "another_client_edge", "resolverModule": require('./../resolvers/UserAnotherClientEdgeResolver').another_client_edge, - "path": "me.another_client_edge" + "path": "me.client_edge.another_client_edge" }, "linkedField": { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderClientEdgesTestMissingClientEdgeDataQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderClientEdgesTestMissingClientEdgeDataQuery.graphql.js index 2e226fc6f9b9b..f20b08573a4cb 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderClientEdgesTestMissingClientEdgeDataQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderClientEdgesTestMissingClientEdgeDataQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<48ae794e5dde6a9465bbf5892679cd31>> + * @generated SignedSource<<30aff183aa754716b7491388977376dd>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {reads_client_edge as userReadsClientEdgeResolverType} from "../resolvers // A type error here indicates that the type signature of the resolver module is incorrect. (userReadsClientEdgeResolverType: ( rootKey: UserReadsClientEdgeResolver$key, -) => mixed); +) => ?string); export type RelayReaderClientEdgesTestMissingClientEdgeDataQuery$variables = {||}; export type RelayReaderClientEdgesTestMissingClientEdgeDataQuery$data = {| +me: ?{| - +reads_client_edge: ?$Call<((...empty[]) => R) => R, typeof userReadsClientEdgeResolverType>, + +reads_client_edge: ?string, |}, |}; export type RelayReaderClientEdgesTestMissingClientEdgeDataQuery = {| @@ -119,7 +119,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRelayErrorHandlingTest1Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRelayErrorHandlingTest1Query.graphql.js new file mode 100644 index 0000000000000..334e99ea265f6 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRelayErrorHandlingTest1Query.graphql.js @@ -0,0 +1,109 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<8109949e4e7d1f86610c1aad68b02782>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type RelayReaderRelayErrorHandlingTest1Query$variables = {||}; +export type RelayReaderRelayErrorHandlingTest1Query$data = {| + +me: ?{| + +lastName: ?string, + |}, +|}; +export type RelayReaderRelayErrorHandlingTest1Query = {| + response: RelayReaderRelayErrorHandlingTest1Query$data, + variables: RelayReaderRelayErrorHandlingTest1Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "lastName", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderRelayErrorHandlingTest1Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + (v0/*: any*/) + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderRelayErrorHandlingTest1Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "e3098f4a550368c2e87bbe6b06628604", + "id": null, + "metadata": {}, + "name": "RelayReaderRelayErrorHandlingTest1Query", + "operationKind": "query", + "text": "query RelayReaderRelayErrorHandlingTest1Query {\n me {\n lastName\n id\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "7be565b5d7792f0144f4cc4e34853a6f"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderRelayErrorHandlingTest1Query$variables, + RelayReaderRelayErrorHandlingTest1Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRelayErrorHandlingTest2Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRelayErrorHandlingTest2Query.graphql.js new file mode 100644 index 0000000000000..c0f1caf9ef02c --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRelayErrorHandlingTest2Query.graphql.js @@ -0,0 +1,109 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type RelayReaderRelayErrorHandlingTest2Query$variables = {||}; +export type RelayReaderRelayErrorHandlingTest2Query$data = {| + +me: ?{| + +lastName: ?string, + |}, +|}; +export type RelayReaderRelayErrorHandlingTest2Query = {| + response: RelayReaderRelayErrorHandlingTest2Query$data, + variables: RelayReaderRelayErrorHandlingTest2Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "lastName", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderRelayErrorHandlingTest2Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + (v0/*: any*/) + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderRelayErrorHandlingTest2Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "6b8c010a0d91f4f7baa63cff39a7134a", + "id": null, + "metadata": {}, + "name": "RelayReaderRelayErrorHandlingTest2Query", + "operationKind": "query", + "text": "query RelayReaderRelayErrorHandlingTest2Query {\n me {\n lastName\n id\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "89a4174b333dd303677d0e197c236178"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderRelayErrorHandlingTest2Query$variables, + RelayReaderRelayErrorHandlingTest2Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTest25Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTest25Query.graphql.js new file mode 100644 index 0000000000000..c137856aed3a1 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTest25Query.graphql.js @@ -0,0 +1,176 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<86902294f3e87ef8874799bbffd9a469>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import {client_object as userClientObjectResolverType} from "../resolvers/UserClientEdgeClientObjectResolver.js"; +// Type assertion validating that `userClientObjectResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userClientObjectResolverType: ( + args: {| + return_null: boolean, + |}, +) => ?User__client_object$normalization); +import type { User__client_object$normalization } from "./../resolvers/__generated__/User__client_object$normalization.graphql"; +export type RelayReaderRequiredFieldsTest25Query$variables = {||}; +export type RelayReaderRequiredFieldsTest25Query$data = {| + +me: ?{| + +client_object: {| + +description: ?string, + |}, + |}, +|}; +export type RelayReaderRequiredFieldsTest25Query = {| + response: RelayReaderRequiredFieldsTest25Query$data, + variables: RelayReaderRequiredFieldsTest25Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "kind": "Literal", + "name": "return_null", + "value": true + } +], +v1 = { + "alias": null, + "args": (v0/*: any*/), + "concreteType": "ClientObject", + "kind": "LinkedField", + "name": "client_object", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "description", + "storageKey": null + } + ], + "storageKey": "client_object(return_null:true)" +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayReaderRequiredFieldsTest25Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "kind": "RequiredField", + "field": { + "kind": "ClientEdgeToClientObject", + "concreteType": "ClientObject", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": (v0/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "name": "client_object", + "resolverModule": require('./../resolvers/UserClientEdgeClientObjectResolver').client_object, + "path": "me.client_object", + "normalizationInfo": { + "kind": "OutputType", + "concreteType": "ClientObject", + "plural": false, + "normalizationNode": require('./../resolvers/__generated__/User__client_object$normalization.graphql') + } + }, + "linkedField": (v1/*: any*/) + }, + "action": "THROW", + "path": "me.client_object" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderRequiredFieldsTest25Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "client_object", + "args": (v0/*: any*/), + "fragment": null, + "kind": "RelayResolver", + "storageKey": "client_object(return_null:true)", + "isOutputType": true + }, + "linkedField": (v1/*: any*/) + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "4847d92a3b6003ac088ad440f688d84e", + "id": null, + "metadata": {}, + "name": "RelayReaderRequiredFieldsTest25Query", + "operationKind": "query", + "text": "query RelayReaderRequiredFieldsTest25Query {\n me {\n id\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "779eb09bcd57cff82e91efb6a9e12664"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderRequiredFieldsTest25Query$variables, + RelayReaderRequiredFieldsTest25Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTest26Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTest26Query.graphql.js new file mode 100644 index 0000000000000..b8b3e10a68541 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTest26Query.graphql.js @@ -0,0 +1,252 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<7aab50412e7bc7c879acb50c40814aa9>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { AstrologicalSignNameResolver$key } from "./../resolvers/__generated__/AstrologicalSignNameResolver.graphql"; +import type { UserAstrologicalSignResolver$key } from "./../resolvers/__generated__/UserAstrologicalSignResolver.graphql"; +import {name as astrologicalSignNameResolverType} from "../resolvers/AstrologicalSignNameResolver.js"; +// Type assertion validating that `astrologicalSignNameResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(astrologicalSignNameResolverType: ( + rootKey: AstrologicalSignNameResolver$key, +) => ?string); +import {astrological_sign as userAstrologicalSignResolverType} from "../resolvers/UserAstrologicalSignResolver.js"; +// Type assertion validating that `userAstrologicalSignResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userAstrologicalSignResolverType: ( + rootKey: UserAstrologicalSignResolver$key, +) => ?{| + +id: DataID, +|}); +export type RelayReaderRequiredFieldsTest26Query$variables = {||}; +export type RelayReaderRequiredFieldsTest26Query$data = {| + +me: ?{| + +astrological_sign: {| + +name: ?string, + |}, + |}, +|}; +export type RelayReaderRequiredFieldsTest26Query = {| + response: RelayReaderRequiredFieldsTest26Query$data, + variables: RelayReaderRequiredFieldsTest26Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayReaderRequiredFieldsTest26Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "kind": "RequiredField", + "field": { + "kind": "ClientEdgeToClientObject", + "concreteType": "AstrologicalSign", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "UserAstrologicalSignResolver" + }, + "kind": "RelayResolver", + "name": "astrological_sign", + "resolverModule": require('./../resolvers/UserAstrologicalSignResolver').astrological_sign, + "path": "me.astrological_sign" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "AstrologicalSign", + "kind": "LinkedField", + "name": "astrological_sign", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "AstrologicalSignNameResolver" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('./../resolvers/AstrologicalSignNameResolver').name, + "path": "me.astrological_sign.name" + } + ], + "storageKey": null + } + }, + "action": "THROW", + "path": "me.astrological_sign" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderRequiredFieldsTest26Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "astrological_sign", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "Date", + "kind": "LinkedField", + "name": "birthdate", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "month", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "day", + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "AstrologicalSign", + "kind": "LinkedField", + "name": "astrological_sign", + "plural": false, + "selections": [ + { + "name": "name", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "self", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + (v0/*: any*/) + ], + "type": "AstrologicalSign", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "AstrologicalSign", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + (v0/*: any*/) + ], + "storageKey": null + } + }, + (v0/*: any*/) + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "9823e24d5d3e64302fecd87b35bba3af", + "id": null, + "metadata": {}, + "name": "RelayReaderRequiredFieldsTest26Query", + "operationKind": "query", + "text": "query RelayReaderRequiredFieldsTest26Query {\n me {\n ...UserAstrologicalSignResolver\n id\n }\n}\n\nfragment UserAstrologicalSignResolver on User {\n birthdate {\n month\n day\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "5945ab813a6acb50eef24fffbe6bdc50"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderRequiredFieldsTest26Query$variables, + RelayReaderRequiredFieldsTest26Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTest27Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTest27Query.graphql.js new file mode 100644 index 0000000000000..3cbcfecf1485f --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTest27Query.graphql.js @@ -0,0 +1,221 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { DataID } from "relay-runtime"; +import type { AstrologicalSignNameResolver$key } from "./../resolvers/__generated__/AstrologicalSignNameResolver.graphql"; +import type { QueryAllAstrologicalSignsResolver$key } from "./../resolvers/__generated__/QueryAllAstrologicalSignsResolver.graphql"; +import {name as astrologicalSignNameResolverType} from "../resolvers/AstrologicalSignNameResolver.js"; +// Type assertion validating that `astrologicalSignNameResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(astrologicalSignNameResolverType: ( + rootKey: AstrologicalSignNameResolver$key, +) => ?string); +import {all_astrological_signs as queryAllAstrologicalSignsResolverType} from "../resolvers/QueryAllAstrologicalSignsResolver.js"; +// Type assertion validating that `queryAllAstrologicalSignsResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryAllAstrologicalSignsResolverType: ( + rootKey: QueryAllAstrologicalSignsResolver$key, +) => ?$ReadOnlyArray<{| + +id: DataID, +|}>); +export type RelayReaderRequiredFieldsTest27Query$variables = {||}; +export type RelayReaderRequiredFieldsTest27Query$data = {| + +all_astrological_signs: $ReadOnlyArray<{| + +name: ?string, + |}>, +|}; +export type RelayReaderRequiredFieldsTest27Query = {| + response: RelayReaderRequiredFieldsTest27Query$data, + variables: RelayReaderRequiredFieldsTest27Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayReaderRequiredFieldsTest27Query", + "selections": [ + { + "kind": "RequiredField", + "field": { + "kind": "ClientEdgeToClientObject", + "concreteType": "AstrologicalSign", + "modelResolvers": null, + "backingField": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "QueryAllAstrologicalSignsResolver" + }, + "kind": "RelayResolver", + "name": "all_astrological_signs", + "resolverModule": require('./../resolvers/QueryAllAstrologicalSignsResolver').all_astrological_signs, + "path": "all_astrological_signs" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "AstrologicalSign", + "kind": "LinkedField", + "name": "all_astrological_signs", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "AstrologicalSignNameResolver" + }, + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('./../resolvers/AstrologicalSignNameResolver').name, + "path": "all_astrological_signs.name" + } + ], + "storageKey": null + } + }, + "action": "THROW", + "path": "all_astrological_signs" + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderRequiredFieldsTest27Query", + "selections": [ + { + "kind": "ClientEdgeToClientObject", + "backingField": { + "name": "all_astrological_signs", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + (v0/*: any*/) + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "AstrologicalSign", + "kind": "LinkedField", + "name": "all_astrological_signs", + "plural": true, + "selections": [ + { + "name": "name", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "self", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + (v0/*: any*/) + ], + "type": "AstrologicalSign", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "AstrologicalSign", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + (v0/*: any*/) + ], + "storageKey": null + } + } + ] + }, + "params": { + "cacheID": "c37ac49844094f5ea8eac001dfc21ffc", + "id": null, + "metadata": {}, + "name": "RelayReaderRequiredFieldsTest27Query", + "operationKind": "query", + "text": "query RelayReaderRequiredFieldsTest27Query {\n ...QueryAllAstrologicalSignsResolver\n}\n\nfragment QueryAllAstrologicalSignsResolver on Query {\n me {\n __typename\n id\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "2251dd1ef8dfb9d0586df501107b45be"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderRequiredFieldsTest27Query$variables, + RelayReaderRequiredFieldsTest27Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTest28Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTest28Query.graphql.js new file mode 100644 index 0000000000000..478238d218272 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTest28Query.graphql.js @@ -0,0 +1,122 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<2982ed5188239b12ccf6a67c6f56d80c>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { LiveState, DataID } from "relay-runtime"; +import {live_user_resolver_always_suspend as queryLiveUserResolverAlwaysSuspendResolverType} from "../resolvers/LiveUserAlwaysSuspendResolver.js"; +// Type assertion validating that `queryLiveUserResolverAlwaysSuspendResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryLiveUserResolverAlwaysSuspendResolverType: () => LiveState); +export type RelayReaderRequiredFieldsTest28Query$variables = {||}; +export type RelayReaderRequiredFieldsTest28Query$data = {| + +live_user_resolver_always_suspend: {| + +name: ?string, + |}, +|}; +export type RelayReaderRequiredFieldsTest28Query = {| + response: RelayReaderRequiredFieldsTest28Query$data, + variables: RelayReaderRequiredFieldsTest28Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": { + "hasClientEdges": true + }, + "name": "RelayReaderRequiredFieldsTest28Query", + "selections": [ + { + "kind": "RequiredField", + "field": { + "kind": "ClientEdgeToServerObject", + "operation": require('./ClientEdgeQuery_RelayReaderRequiredFieldsTest28Query_live_user_resolver_always_suspend.graphql'), + "backingField": { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayLiveResolver", + "name": "live_user_resolver_always_suspend", + "resolverModule": require('./../resolvers/LiveUserAlwaysSuspendResolver').live_user_resolver_always_suspend, + "path": "live_user_resolver_always_suspend" + }, + "linkedField": { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "live_user_resolver_always_suspend", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "storageKey": null + } + }, + "action": "THROW", + "path": "live_user_resolver_always_suspend" + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderRequiredFieldsTest28Query", + "selections": [ + { + "name": "live_user_resolver_always_suspend", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": false + } + ] + }, + "params": { + "cacheID": "fb8240592c2eefb489c064bb56f44668", + "id": null, + "metadata": {}, + "name": "RelayReaderRequiredFieldsTest28Query", + "operationKind": "query", + "text": null + } +}; + +if (__DEV__) { + (node/*: any*/).hash = "1ea17c6315e8ba285db304130201310d"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + RelayReaderRequiredFieldsTest28Query$variables, + RelayReaderRequiredFieldsTest28Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery.graphql.js new file mode 100644 index 0000000000000..8b72bd1e70165 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery.graphql.js @@ -0,0 +1,156 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery$variables = {||}; +export type RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery$data = {| + +me: ?{| + +requiredFields: ?{| + +backgroundImage: {| + +uri: string, + |}, + |}, + |}, +|}; +export type RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery = {| + response: RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery$data, + variables: RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "uri", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "kind": "RequiredField", + "field": { + "alias": null, + "args": null, + "concreteType": "Image", + "kind": "LinkedField", + "name": "backgroundImage", + "plural": false, + "selections": [ + { + "kind": "RequiredField", + "field": (v0/*: any*/), + "action": "LOG", + "path": "me.requiredFields.backgroundImage.uri" + } + ], + "storageKey": null + }, + "action": "LOG", + "path": "me.requiredFields.backgroundImage" + } + ], + "type": "User", + "abstractKey": null + }, + "kind": "AliasedInlineFragmentSpread", + "name": "requiredFields" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "Image", + "kind": "LinkedField", + "name": "backgroundImage", + "plural": false, + "selections": [ + (v0/*: any*/) + ], + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "0ffed2e7ad10cb518c2ce832925ff3d8", + "id": null, + "metadata": {}, + "name": "RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery", + "operationKind": "query", + "text": "query RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery {\n me {\n backgroundImage {\n uri\n }\n id\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "4511503ad7e5191998ea5cb15995a0cc"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery$variables, + RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithTypeQuery$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery.graphql.js new file mode 100644 index 0000000000000..f11f23e2133a8 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery.graphql.js @@ -0,0 +1,156 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<7d3164fc843ce9965e74a982ba290e5f>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery$variables = {||}; +export type RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery$data = {| + +me: ?{| + +requiredFields: ?{| + +backgroundImage: {| + +uri: string, + |}, + |}, + |}, +|}; +export type RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery = {| + response: RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery$data, + variables: RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "uri", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "kind": "RequiredField", + "field": { + "alias": null, + "args": null, + "concreteType": "Image", + "kind": "LinkedField", + "name": "backgroundImage", + "plural": false, + "selections": [ + { + "kind": "RequiredField", + "field": (v0/*: any*/), + "action": "LOG", + "path": "me.requiredFields.backgroundImage.uri" + } + ], + "storageKey": null + }, + "action": "LOG", + "path": "me.requiredFields.backgroundImage" + } + ], + "type": null, + "abstractKey": null + }, + "kind": "AliasedInlineFragmentSpread", + "name": "requiredFields" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "Image", + "kind": "LinkedField", + "name": "backgroundImage", + "plural": false, + "selections": [ + (v0/*: any*/) + ], + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "9a3c80d36cc49662a1e808d9a1d03cd4", + "id": null, + "metadata": {}, + "name": "RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery", + "operationKind": "query", + "text": "query RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery {\n me {\n backgroundImage {\n uri\n }\n id\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "8ffc3f3576c04809264bbc5cf439e34d"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery$variables, + RelayReaderRequiredFieldsTestBubbleToAliasedInlineFragmentWithoutTypeQuery$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest10Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest10Query.graphql.js index 9a439d5071f57..8c3a226e76b31 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest10Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest10Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<7228ecabbb1754ecaf019c29b7598f10>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {greeting as userGreetingResolverType} from "../resolvers/UserGreetingRes // A type error here indicates that the type signature of the resolver module is incorrect. (userGreetingResolverType: ( rootKey: UserGreetingResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTest10Query$variables = {||}; export type RelayReaderResolverTest10Query$data = {| +me: ?{| - +greeting: ?$Call<((...empty[]) => R) => R, typeof userGreetingResolverType>, + +greeting: ?string, |}, |}; export type RelayReaderResolverTest10Query = {| @@ -105,7 +105,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest11Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest11Query.graphql.js index 2bee5f4941b30..c77510a423c87 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest11Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest11Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<8465822cd6ae5a738909ed11c8f12f7d>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {greeting as userGreetingResolverType} from "../resolvers/UserGreetingRes // A type error here indicates that the type signature of the resolver module is incorrect. (userGreetingResolverType: ( rootKey: UserGreetingResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTest11Query$variables = {||}; export type RelayReaderResolverTest11Query$data = {| +me: ?{| - +the_alias: ?$Call<((...empty[]) => R) => R, typeof userGreetingResolverType>, + +the_alias: ?string, |}, |}; export type RelayReaderResolverTest11Query = {| @@ -105,7 +105,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest12Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest12Query.graphql.js index 65fc3e44d5831..f6947a74fc0e9 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest12Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest12Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<13b521fbf8e48f80d041322ccb45e364>> + * @generated SignedSource<<869fcdd6d7df80060874622ffc7e5dd3>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {always_throws as userAlwaysThrowsResolverType} from "../resolvers/UserAl // A type error here indicates that the type signature of the resolver module is incorrect. (userAlwaysThrowsResolverType: ( rootKey: UserAlwaysThrowsResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTest12Query$variables = {||}; export type RelayReaderResolverTest12Query$data = {| +me: ?{| - +always_throws: ?$Call<((...empty[]) => R) => R, typeof userAlwaysThrowsResolverType>, + +always_throws: ?string, |}, |}; export type RelayReaderResolverTest12Query = {| @@ -105,7 +105,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest13Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest13Query.graphql.js index 67dd7c3edef06..0c719029e4cb7 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest13Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest13Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<7914ff4f288dd75e679e2a02227172ea>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {always_throws_transitively as userAlwaysThrowsTransitivelyResolverType} // A type error here indicates that the type signature of the resolver module is incorrect. (userAlwaysThrowsTransitivelyResolverType: ( rootKey: UserAlwaysThrowsTransitivelyResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTest13Query$variables = {||}; export type RelayReaderResolverTest13Query$data = {| +me: ?{| - +always_throws_transitively: ?$Call<((...empty[]) => R) => R, typeof userAlwaysThrowsTransitivelyResolverType>, + +always_throws_transitively: ?string, |}, |}; export type RelayReaderResolverTest13Query = {| @@ -111,7 +111,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "User", @@ -119,7 +119,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest14Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest14Query.graphql.js index f31a171e347e6..69068bb465c63 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest14Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest14Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<1cb32f6ea5b2d1b32c45d946001c7db5>> + * @generated SignedSource<<676a859c5112d5c227b5eabf2289ac72>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,10 +24,10 @@ import {throw_before_read as queryThrowBeforeReadResolverType} from "../resolver // A type error here indicates that the type signature of the resolver module is incorrect. (queryThrowBeforeReadResolverType: ( rootKey: ThrowBeforeReadResolver$key, -) => mixed); +) => ?mixed); export type RelayReaderResolverTest14Query$variables = {||}; export type RelayReaderResolverTest14Query$data = {| - +throw_before_read: ?$Call<((...empty[]) => R) => R, typeof queryThrowBeforeReadResolverType>, + +throw_before_read: ?ReturnType, |}; export type RelayReaderResolverTest14Query = {| response: RelayReaderResolverTest14Query$data, @@ -107,7 +107,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest15Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest15Query.graphql.js index 6a1a8e31ee7dc..214742285b8ff 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest15Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest15Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -24,10 +24,10 @@ import {undefined_field as queryUndefinedFieldResolverType} from "../resolvers/U // A type error here indicates that the type signature of the resolver module is incorrect. (queryUndefinedFieldResolverType: ( rootKey: UndefinedFieldResolver$key, -) => mixed); +) => ?mixed); export type RelayReaderResolverTest15Query$variables = {||}; export type RelayReaderResolverTest15Query$data = {| - +undefined_field: ?$Call<((...empty[]) => R) => R, typeof queryUndefinedFieldResolverType>, + +undefined_field: ?ReturnType, |}; export type RelayReaderResolverTest15Query = {| response: RelayReaderResolverTest15Query$data, @@ -107,7 +107,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest16Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest16Query.graphql.js index 990758903465c..a3b8e15311a6b 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest16Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest16Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<250e4ece52c71a593e9f193980c0bc45>> + * @generated SignedSource<<66ea8d74a2400604765a13c448a7fd07>> * @flow * @lightSyntaxTransform * @nogrep @@ -27,13 +27,13 @@ import {user_profile_picture_uri_with_scale as userUserProfilePictureUriWithScal args: {| scale: ?number, |}, -) => mixed); +) => ?string); export type RelayReaderResolverTest16Query$variables = {| scale: number, |}; export type RelayReaderResolverTest16Query$data = {| +me: ?{| - +user_profile_picture_uri_with_scale: ?$Call<((...empty[]) => R) => R, typeof userUserProfilePictureUriWithScaleResolverType>, + +user_profile_picture_uri_with_scale: ?string, |}, |}; export type RelayReaderResolverTest16Query = {| @@ -74,7 +74,7 @@ return { "selections": [ { "alias": null, - "args": null, + "args": [], "fragment": { "args": (v1/*: any*/), "kind": "FragmentSpread", @@ -136,7 +136,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest17Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest17Query.graphql.js index f1f0edb1adf22..bb476ce465d23 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest17Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest17Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<5133942a5dca67f9143ad4e504070b3f>> * @flow * @lightSyntaxTransform * @nogrep @@ -27,11 +27,11 @@ import {user_profile_picture_uri_with_scale_and_default_value as userUserProfile args: {| scale: ?number, |}, -) => mixed); +) => ?string); export type RelayReaderResolverTest17Query$variables = {||}; export type RelayReaderResolverTest17Query$data = {| +me: ?{| - +user_profile_picture_uri_with_scale_and_default_value: ?$Call<((...empty[]) => R) => R, typeof userUserProfilePictureUriWithScaleAndDefaultValueResolverType>, + +user_profile_picture_uri_with_scale_and_default_value: ?string, |}, |}; export type RelayReaderResolverTest17Query = {| @@ -57,7 +57,7 @@ var node/*: ConcreteRequest*/ = { "selections": [ { "alias": null, - "args": null, + "args": [], "fragment": { "args": null, "kind": "FragmentSpread", @@ -125,7 +125,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest18Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest18Query.graphql.js index a07598f00f292..8904451f3f51c 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest18Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest18Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -27,11 +27,11 @@ import {user_profile_picture_uri_with_scale_and_default_value as userUserProfile args: {| scale: ?number, |}, -) => mixed); +) => ?string); export type RelayReaderResolverTest18Query$variables = {||}; export type RelayReaderResolverTest18Query$data = {| +me: ?{| - +profile_picture2: ?$Call<((...empty[]) => R) => R, typeof userUserProfilePictureUriWithScaleAndDefaultValueResolverType>, + +profile_picture2: ?string, |}, |}; export type RelayReaderResolverTest18Query = {| @@ -65,7 +65,7 @@ return { "selections": [ { "alias": "profile_picture2", - "args": null, + "args": [], "fragment": { "args": (v0/*: any*/), "kind": "FragmentSpread", @@ -127,7 +127,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest19Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest19Query.graphql.js index 9cfcf84fecddb..933df4574bdea 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest19Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest19Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -27,7 +27,7 @@ import {user_profile_picture_uri_with_scale_and_default_value as userUserProfile args: {| scale: ?number, |}, -) => mixed); +) => ?string); export type RelayReaderResolverTest19Query$variables = {| scale?: ?number, |}; @@ -36,7 +36,7 @@ export type RelayReaderResolverTest19Query$data = {| +big_profile_picture: ?{| +uri: ?string, |}, - +profile_picture2: ?$Call<((...empty[]) => R) => R, typeof userUserProfilePictureUriWithScaleAndDefaultValueResolverType>, + +profile_picture2: ?string, |}, |}; export type RelayReaderResolverTest19Query = {| @@ -102,7 +102,7 @@ return { "selections": [ { "alias": "profile_picture2", - "args": null, + "args": [], "fragment": { "args": (v1/*: any*/), "kind": "FragmentSpread", @@ -157,7 +157,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, (v3/*: any*/), { diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest1Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest1Query.graphql.js index 4440d41dc3199..b2b1fd190a8d1 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest1Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest1Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<7c6d8f64453af714478bc9ad66dacc8c>> + * @generated SignedSource<<3b146cb3a2dfe538f90cbdcbcd3f5a8c>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {greeting as userGreetingResolverType} from "../resolvers/UserGreetingRes // A type error here indicates that the type signature of the resolver module is incorrect. (userGreetingResolverType: ( rootKey: UserGreetingResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTest1Query$variables = {||}; export type RelayReaderResolverTest1Query$data = {| +me: ?{| - +greeting: ?$Call<((...empty[]) => R) => R, typeof userGreetingResolverType>, + +greeting: ?string, |}, |}; export type RelayReaderResolverTest1Query = {| @@ -105,7 +105,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest20Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest20Query.graphql.js index 200cb997d1fbb..f7454997dbf7e 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest20Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest20Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<1bcc156d24711fc019f0e40f190a5035>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -27,13 +27,13 @@ import {user_profile_picture_uri_with_scale as userUserProfilePictureUriWithScal args: {| scale: ?number, |}, -) => mixed); +) => ?string); export type RelayReaderResolverTest20Query$variables = {| scale: number, |}; export type RelayReaderResolverTest20Query$data = {| +me: ?{| - +profile_picture: ?$Call<((...empty[]) => R) => R, typeof userUserProfilePictureUriWithScaleResolverType>, + +profile_picture: ?string, |}, |}; export type RelayReaderResolverTest20Query = {| @@ -74,7 +74,7 @@ return { "selections": [ { "alias": "profile_picture", - "args": null, + "args": [], "fragment": { "args": (v1/*: any*/), "kind": "FragmentSpread", @@ -136,7 +136,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest21Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest21Query.graphql.js index 2ea07755fdde5..30dfd741bea01 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest21Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest21Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<049d2e4bc707c12d548429f4c85a3c5b>> + * @generated SignedSource<<7ae24580e79e0a8c0d65514a5c9dd18b>> * @flow * @lightSyntaxTransform * @nogrep @@ -27,13 +27,13 @@ import {user_profile_picture_uri_with_scale as userUserProfilePictureUriWithScal args: {| scale: ?number, |}, -) => mixed); +) => ?string); export type RelayReaderResolverTest21Query$variables = {| scale: number, |}; export type RelayReaderResolverTest21Query$data = {| +me: ?{| - +profile_picture: ?$Call<((...empty[]) => R) => R, typeof userUserProfilePictureUriWithScaleResolverType>, + +profile_picture: ?string, |}, |}; export type RelayReaderResolverTest21Query = {| @@ -74,7 +74,7 @@ return { "selections": [ { "alias": "profile_picture", - "args": null, + "args": [], "fragment": { "args": (v1/*: any*/), "kind": "FragmentSpread", @@ -136,7 +136,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest22Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest22Query.graphql.js index bb7deffadf6d3..8a71e96361661 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest22Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest22Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<67b327de224974c36ac0582580c2eae4>> * @flow * @lightSyntaxTransform * @nogrep @@ -28,14 +28,14 @@ import {user_profile_picture_uri_with_scale_and_additional_argument as userUserP name: ?string, scale: ?number, |}, -) => mixed); +) => ?string); export type RelayReaderResolverTest22Query$variables = {| name?: ?string, scale: number, |}; export type RelayReaderResolverTest22Query$data = {| +me: ?{| - +profile_picture: ?$Call<((...empty[]) => R) => R, typeof userUserProfilePictureUriWithScaleAndAdditionalArgumentResolverType>, + +profile_picture: ?string, |}, |}; export type RelayReaderResolverTest22Query = {| @@ -154,7 +154,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest2Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest2Query.graphql.js index bbff1ed0a429d..7432c91d32bb8 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest2Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest2Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<99fda8499ec290ac6be8aff8703f737d>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {constant_dependent as userConstantDependentResolverType} from "../resolv // A type error here indicates that the type signature of the resolver module is incorrect. (userConstantDependentResolverType: ( rootKey: UserConstantDependentResolver$key, -) => mixed); +) => ?number); export type RelayReaderResolverTest2Query$variables = {||}; export type RelayReaderResolverTest2Query$data = {| +me: ?{| - +constant_dependent: ?$Call<((...empty[]) => R) => R, typeof userConstantDependentResolverType>, + +constant_dependent: ?number, |}, |}; export type RelayReaderResolverTest2Query = {| @@ -111,7 +111,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "User", @@ -119,7 +119,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest3Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest3Query.graphql.js index 57150b5846264..5a6046f098a88 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest3Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest3Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<84ea7454194b7f00c9465226d7d03823>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {greeting as userGreetingResolverType} from "../resolvers/UserGreetingRes // A type error here indicates that the type signature of the resolver module is incorrect. (userGreetingResolverType: ( rootKey: UserGreetingResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTest3Query$variables = {||}; export type RelayReaderResolverTest3Query$data = {| +me: ?{| - +greeting: ?$Call<((...empty[]) => R) => R, typeof userGreetingResolverType>, + +greeting: ?string, |}, |}; export type RelayReaderResolverTest3Query = {| @@ -105,7 +105,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest4Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest4Query.graphql.js index f5f1450f9f1cf..f3f0c04ae2d2b 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest4Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest4Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<3ef7c024ce57b5050c220a7be771f6a8>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {best_friend_greeting as userBestFriendGreetingResolverType} from "../res // A type error here indicates that the type signature of the resolver module is incorrect. (userBestFriendGreetingResolverType: ( rootKey: UserBestFriendGreetingResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTest4Query$variables = {||}; export type RelayReaderResolverTest4Query$data = {| +me: ?{| - +best_friend_greeting: ?$Call<((...empty[]) => R) => R, typeof userBestFriendGreetingResolverType>, + +best_friend_greeting: ?string, |}, |}; export type RelayReaderResolverTest4Query = {| @@ -160,7 +160,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, (v0/*: any*/) ], diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest5Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest5Query.graphql.js index 3d2e6c4383096..358c96c06bf31 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest5Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest5Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<1731250b70734f9fc3bc978ba4db538e>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {shouted_greeting as userShoutedGreetingResolverType} from "../resolvers/ // A type error here indicates that the type signature of the resolver module is incorrect. (userShoutedGreetingResolverType: ( rootKey: UserShoutedGreetingResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTest5Query$variables = {||}; export type RelayReaderResolverTest5Query$data = {| +me: ?{| - +shouted_greeting: ?$Call<((...empty[]) => R) => R, typeof userShoutedGreetingResolverType>, + +shouted_greeting: ?string, |}, |}; export type RelayReaderResolverTest5Query = {| @@ -111,7 +111,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "User", @@ -119,7 +119,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest6Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest6Query.graphql.js index c6c7fa70220af..5d416fc99b048 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest6Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest6Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<236a7c891b7e807f3d92505482e0e7e6>> + * @generated SignedSource<<4d6078a278ef99dc56d408649f1d9d0a>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {best_friend_shouted_greeting as userBestFriendShoutedGreetingResolverTyp // A type error here indicates that the type signature of the resolver module is incorrect. (userBestFriendShoutedGreetingResolverType: ( rootKey: UserBestFriendShoutedGreetingResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTest6Query$variables = {||}; export type RelayReaderResolverTest6Query$data = {| +me: ?{| - +best_friend_shouted_greeting: ?$Call<((...empty[]) => R) => R, typeof userBestFriendShoutedGreetingResolverType>, + +best_friend_shouted_greeting: ?string, |}, |}; export type RelayReaderResolverTest6Query = {| @@ -156,7 +156,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, (v0/*: any*/) ], @@ -174,7 +174,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, (v0/*: any*/) ], diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest7Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest7Query.graphql.js index 196cc04f9169a..4c2f7547acc83 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest7Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest7Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<551a9179ee723de92a4a9d69e5b17db5>> + * @generated SignedSource<<5fd70c79bbcda93efd15c5c8df5c4864>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {greeting as userGreetingResolverType} from "../resolvers/UserGreetingRes // A type error here indicates that the type signature of the resolver module is incorrect. (userGreetingResolverType: ( rootKey: UserGreetingResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTest7Query$variables = {||}; export type RelayReaderResolverTest7Query$data = {| +me: ?{| - +greeting: ?$Call<((...empty[]) => R) => R, typeof userGreetingResolverType>, + +greeting: ?string, |}, |}; export type RelayReaderResolverTest7Query = {| @@ -105,7 +105,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest8Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest8Query.graphql.js index 188e6fd66bb21..321a3268d0054 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest8Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest8Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<80f23411aff8f7b4427a615b8e946a1b>> + * @generated SignedSource<<295785b03490c71a1aa606b2e6d61f2d>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {name_passthrough as userNamePassthroughResolverType} from "../resolvers/ // A type error here indicates that the type signature of the resolver module is incorrect. (userNamePassthroughResolverType: ( rootKey: UserNamePassthroughResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTest8Query$variables = {||}; export type RelayReaderResolverTest8Query$data = {| +me: ?{| - +name_passthrough: $NonMaybeType<$Call<((...empty[]) => R) => R, typeof userNamePassthroughResolverType>>, + +name_passthrough: $NonMaybeType, |}, |}; export type RelayReaderResolverTest8Query = {| @@ -110,7 +110,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest9Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest9Query.graphql.js index 25a7c688064ef..c456770176659 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest9Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTest9Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<25c7bfc887d7b68c9cb64baeed931996>> + * @generated SignedSource<<0689154c6a247f7ec979665aec5494ba>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {greeting as userGreetingResolverType} from "../resolvers/UserGreetingRes // A type error here indicates that the type signature of the resolver module is incorrect. (userGreetingResolverType: ( rootKey: UserGreetingResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTest9Query$variables = {||}; export type RelayReaderResolverTest9Query$data = {| +me: ?{| - +greeting: ?$Call<((...empty[]) => R) => R, typeof userGreetingResolverType>, + +greeting: ?string, |}, |}; export type RelayReaderResolverTest9Query = {| @@ -105,7 +105,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestCustomGreetingDynamicQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestCustomGreetingDynamicQuery.graphql.js index 8c45976e1ca34..77ead61445c5e 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestCustomGreetingDynamicQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestCustomGreetingDynamicQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<1aba9b07c641d36e60acc46f1a0f1c38>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -27,15 +27,15 @@ import {custom_greeting as userCustomGreetingResolverType} from "../resolvers/Us args: {| salutation: string, |}, -) => mixed); +) => ?string); export type RelayReaderResolverTestCustomGreetingDynamicQuery$variables = {| salutation: string, |}; export type RelayReaderResolverTestCustomGreetingDynamicQuery$data = {| +me: ?{| - +dynamic_greeting: ?$Call<((...empty[]) => R) => R, typeof userCustomGreetingResolverType>, - +greetz: ?$Call<((...empty[]) => R) => R, typeof userCustomGreetingResolverType>, - +willkommen: ?$Call<((...empty[]) => R) => R, typeof userCustomGreetingResolverType>, + +dynamic_greeting: ?string, + +greetz: ?string, + +willkommen: ?string, |}, |}; export type RelayReaderResolverTestCustomGreetingDynamicQuery = {| @@ -161,7 +161,7 @@ return { "fragment": (v5/*: any*/), "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "name": "custom_greeting", @@ -169,7 +169,7 @@ return { "fragment": (v5/*: any*/), "kind": "RelayResolver", "storageKey": "custom_greeting(salutation:\"Greetz\")", - "isOutputType": false + "isOutputType": true }, { "name": "custom_greeting", @@ -177,7 +177,7 @@ return { "fragment": (v5/*: any*/), "kind": "RelayResolver", "storageKey": "custom_greeting(salutation:\"Willkommen\")", - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestFieldError1Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestFieldError1Query.graphql.js new file mode 100644 index 0000000000000..a33ab62d54d27 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestFieldError1Query.graphql.js @@ -0,0 +1,109 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type RelayReaderResolverTestFieldError1Query$variables = {||}; +export type RelayReaderResolverTestFieldError1Query$data = {| + +me: ?{| + +lastName: ?string, + |}, +|}; +export type RelayReaderResolverTestFieldError1Query = {| + response: RelayReaderResolverTestFieldError1Query$data, + variables: RelayReaderResolverTestFieldError1Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "lastName", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderResolverTestFieldError1Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + (v0/*: any*/) + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderResolverTestFieldError1Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "deb8521779c2aea0af0cb01a9adc85d5", + "id": null, + "metadata": {}, + "name": "RelayReaderResolverTestFieldError1Query", + "operationKind": "query", + "text": "query RelayReaderResolverTestFieldError1Query {\n me {\n lastName\n id\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "f5150685e9912be474b120b3d29b5b22"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderResolverTestFieldError1Query$variables, + RelayReaderResolverTestFieldError1Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestFieldErrorQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestFieldErrorQuery.graphql.js new file mode 100644 index 0000000000000..ebd92cbffebd4 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestFieldErrorQuery.graphql.js @@ -0,0 +1,109 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<2b5cbbd3caa82db632145b4bfb7266cc>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type RelayReaderResolverTestFieldErrorQuery$variables = {||}; +export type RelayReaderResolverTestFieldErrorQuery$data = {| + +me: ?{| + +lastName: ?string, + |}, +|}; +export type RelayReaderResolverTestFieldErrorQuery = {| + response: RelayReaderResolverTestFieldErrorQuery$data, + variables: RelayReaderResolverTestFieldErrorQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "lastName", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderResolverTestFieldErrorQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + (v0/*: any*/) + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderResolverTestFieldErrorQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "9b12f6b94bffe358cb62ac3f2216ae87", + "id": null, + "metadata": {}, + "name": "RelayReaderResolverTestFieldErrorQuery", + "operationKind": "query", + "text": "query RelayReaderResolverTestFieldErrorQuery {\n me {\n lastName\n id\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "ae22a10c004b68bb5d7df6f516619f83"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderResolverTestFieldErrorQuery$variables, + RelayReaderResolverTestFieldErrorQuery$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestMarkCleanQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestMarkCleanQuery.graphql.js new file mode 100644 index 0000000000000..ba0446e141391 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestMarkCleanQuery.graphql.js @@ -0,0 +1,153 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { UserConstantDependentResolver$key } from "./../resolvers/__generated__/UserConstantDependentResolver.graphql"; +import {constant_dependent as userConstantDependentResolverType} from "../resolvers/UserConstantDependentResolver.js"; +// Type assertion validating that `userConstantDependentResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(userConstantDependentResolverType: ( + rootKey: UserConstantDependentResolver$key, +) => ?number); +export type RelayReaderResolverTestMarkCleanQuery$variables = {||}; +export type RelayReaderResolverTestMarkCleanQuery$data = {| + +me: ?{| + +constant_dependent: ?number, + |}, +|}; +export type RelayReaderResolverTestMarkCleanQuery = {| + response: RelayReaderResolverTestMarkCleanQuery$data, + variables: RelayReaderResolverTestMarkCleanQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderResolverTestMarkCleanQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "UserConstantDependentResolver" + }, + "kind": "RelayResolver", + "name": "constant_dependent", + "resolverModule": require('./../resolvers/UserConstantDependentResolver').constant_dependent, + "path": "me.constant_dependent" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderResolverTestMarkCleanQuery", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "name": "constant_dependent", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "name": "constant", + "args": null, + "fragment": { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ], + "type": "User", + "abstractKey": null + }, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "7d364ec2e4a00451fe2f21b8bf8d6a34", + "id": null, + "metadata": {}, + "name": "RelayReaderResolverTestMarkCleanQuery", + "operationKind": "query", + "text": "query RelayReaderResolverTestMarkCleanQuery {\n me {\n ...UserConstantDependentResolver\n id\n }\n}\n\nfragment UserConstantDependentResolver on User {\n ...UserConstantResolver\n}\n\nfragment UserConstantResolver on User {\n name\n}\n" + } +}; + +if (__DEV__) { + (node/*: any*/).hash = "a7af7fda9e61cae33b58462f1322e3cd"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderResolverTestMarkCleanQuery$variables, + RelayReaderResolverTestMarkCleanQuery$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestMissingDataQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestMissingDataQuery.graphql.js index f44a8bc621a68..b44bbe2f95fdd 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestMissingDataQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestMissingDataQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<74ea18dca9d931583275fca818bf4474>> + * @generated SignedSource<<93edf3fe17895f5f597ae6a0841855ce>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {greeting as userGreetingResolverType} from "../resolvers/UserGreetingRes // A type error here indicates that the type signature of the resolver module is incorrect. (userGreetingResolverType: ( rootKey: UserGreetingResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTestMissingDataQuery$variables = {||}; export type RelayReaderResolverTestMissingDataQuery$data = {| +me: ?{| - +greeting: ?$Call<((...empty[]) => R) => R, typeof userGreetingResolverType>, + +greeting: ?string, |}, |}; export type RelayReaderResolverTestMissingDataQuery = {| @@ -105,7 +105,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestRequiredQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestRequiredQuery.graphql.js index 1e88e28d51bac..3e4e3ae6c12ae 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestRequiredQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestRequiredQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<9264f772b3f0481a14ef0a1519ee895e>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {required_name as userRequiredNameResolverType} from "../resolvers/UserRe // A type error here indicates that the type signature of the resolver module is incorrect. (userRequiredNameResolverType: ( rootKey: UserRequiredNameResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTestRequiredQuery$variables = {||}; export type RelayReaderResolverTestRequiredQuery$data = {| +me: ?{| - +required_name: ?$Call<((...empty[]) => R) => R, typeof userRequiredNameResolverType>, + +required_name: ?string, |}, |}; export type RelayReaderResolverTestRequiredQuery = {| @@ -105,7 +105,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestRequiredWithParentQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestRequiredWithParentQuery.graphql.js index 875b17602aaf0..cdd905d8af593 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestRequiredWithParentQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderResolverTestRequiredWithParentQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<6686d698e9d0150628d4ac0a7854251d>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,12 +24,12 @@ import {required_name as userRequiredNameResolverType} from "../resolvers/UserRe // A type error here indicates that the type signature of the resolver module is incorrect. (userRequiredNameResolverType: ( rootKey: UserRequiredNameResolver$key, -) => mixed); +) => ?string); export type RelayReaderResolverTestRequiredWithParentQuery$variables = {||}; export type RelayReaderResolverTestRequiredWithParentQuery$data = {| +me: ?{| +lastName: string, - +required_name: ?$Call<((...empty[]) => R) => R, typeof userRequiredNameResolverType>, + +required_name: ?string, |}, |}; export type RelayReaderResolverTestRequiredWithParentQuery = {| @@ -120,7 +120,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, (v0/*: any*/), { diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery.graphql.js deleted file mode 100644 index 48db3ed40546d..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery.graphql.js +++ /dev/null @@ -1,175 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<0e627954c6c51d81c54e0fc321377146>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -// @ReactFlightServerDependency FlightComponent.server - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -export type RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery$variables = {| - count: number, - id: string, -|}; -export type RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery$data = {| - +node: ?{| - +flightComponent?: ?any, - |}, -|}; -export type RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery = {| - response: RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery$data, - variables: RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "count" -}, -v1 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" -}, -v2 = { - "kind": "Variable", - "name": "id", - "variableName": "id" -}, -v3 = [ - (v2/*: any*/) -], -v4 = { - "kind": "InlineFragment", - "selections": [ - { - "alias": "flightComponent", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "FlightComponent.server" - }, - { - "fields": [ - { - "kind": "Literal", - "name": "condition", - "value": true - }, - { - "kind": "Variable", - "name": "count", - "variableName": "count" - }, - (v2/*: any*/) - ], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": null - } - ], - "type": "Story", - "abstractKey": null -}; -return { - "fragment": { - "argumentDefinitions": [ - (v0/*: any*/), - (v1/*: any*/) - ], - "kind": "Fragment", - "metadata": null, - "name": "RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery", - "selections": [ - { - "alias": null, - "args": (v3/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - (v4/*: any*/) - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - (v1/*: any*/), - (v0/*: any*/) - ], - "kind": "Operation", - "name": "RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery", - "selections": [ - { - "alias": null, - "args": (v3/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - (v4/*: any*/), - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "e45c38ecfcd4f1a957657a5a2500d5f4", - "id": null, - "metadata": {}, - "name": "RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery", - "operationKind": "query", - "text": "query RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery(\n $id: ID!\n $count: Int!\n) {\n node(id: $id) {\n __typename\n ... on Story {\n flightComponent: flight(component: \"FlightComponent.server\", props: {condition: true, count: $count, id: $id})\n }\n id\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "4c044218b936033e2034103a2d011f01"; -} - -module.exports = ((node/*: any*/)/*: Query< - RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery$variables, - RelayReaderTestFeatureEnableReactFlightComponentFieldFlightQuery$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderTestWhenMatchDirectiveIsPresentBarFragment.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderTestWhenMatchDirectiveIsPresentBarFragment.graphql.js index 2f23c0eb7147f..e40f7f6005664 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderTestWhenMatchDirectiveIsPresentBarFragment.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderTestWhenMatchDirectiveIsPresentBarFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<8dcada45472d826c073cbf5a54c1a1cb>> * @flow * @lightSyntaxTransform * @nogrep @@ -59,10 +59,7 @@ var node/*: ReaderFragment*/ = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -99,7 +96,7 @@ var node/*: ReaderFragment*/ = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "type": "User", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderTestWhenMatchDirectiveIsPresentBarQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderTestWhenMatchDirectiveIsPresentBarQuery.graphql.js index 898ee78460145..9245dfdd5c770 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReaderTestWhenMatchDirectiveIsPresentBarQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReaderTestWhenMatchDirectiveIsPresentBarQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -87,10 +87,7 @@ var node/*: ConcreteRequest*/ = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -134,7 +131,7 @@ var node/*: ConcreteRequest*/ = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "storageKey": null diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTest3Fragment.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTest3Fragment.graphql.js index 00bdd2f1a82c3..b3b501c99b02a 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTest3Fragment.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTest3Fragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<9677a7b555af8559a64e8592728ff78a>> * @flow * @lightSyntaxTransform * @nogrep @@ -59,10 +59,7 @@ var node/*: ReaderFragment*/ = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -99,7 +96,7 @@ var node/*: ReaderFragment*/ = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "type": "User", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTest5Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTest5Query.graphql.js index 9ba8e789d1ded..fafddc097def5 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTest5Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTest5Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<73b85959aa2fa825f7297da289465d87>> + * @generated SignedSource<<6aeb66a6feca27660bbb6bce58a88f58>> * @flow * @lightSyntaxTransform * @nogrep @@ -115,10 +115,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -156,7 +153,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "type": "User", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestFlightQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestFlightQuery.graphql.js deleted file mode 100644 index cc49b311ac93d..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestFlightQuery.graphql.js +++ /dev/null @@ -1,175 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -// @ReactFlightServerDependency FlightComponent.server - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -export type RelayReferenceMarkerTestFlightQuery$variables = {| - count: number, - id: string, -|}; -export type RelayReferenceMarkerTestFlightQuery$data = {| - +node: ?{| - +flightComponent?: ?any, - |}, -|}; -export type RelayReferenceMarkerTestFlightQuery = {| - response: RelayReferenceMarkerTestFlightQuery$data, - variables: RelayReferenceMarkerTestFlightQuery$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "count" -}, -v1 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" -}, -v2 = { - "kind": "Variable", - "name": "id", - "variableName": "id" -}, -v3 = [ - (v2/*: any*/) -], -v4 = { - "kind": "InlineFragment", - "selections": [ - { - "alias": "flightComponent", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "FlightComponent.server" - }, - { - "fields": [ - { - "kind": "Literal", - "name": "condition", - "value": true - }, - { - "kind": "Variable", - "name": "count", - "variableName": "count" - }, - (v2/*: any*/) - ], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": null - } - ], - "type": "Story", - "abstractKey": null -}; -return { - "fragment": { - "argumentDefinitions": [ - (v0/*: any*/), - (v1/*: any*/) - ], - "kind": "Fragment", - "metadata": null, - "name": "RelayReferenceMarkerTestFlightQuery", - "selections": [ - { - "alias": null, - "args": (v3/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - (v4/*: any*/) - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - (v1/*: any*/), - (v0/*: any*/) - ], - "kind": "Operation", - "name": "RelayReferenceMarkerTestFlightQuery", - "selections": [ - { - "alias": null, - "args": (v3/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - (v4/*: any*/), - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "0159f073be38cdefd11248e5fc61eabd", - "id": null, - "metadata": {}, - "name": "RelayReferenceMarkerTestFlightQuery", - "operationKind": "query", - "text": "query RelayReferenceMarkerTestFlightQuery(\n $id: ID!\n $count: Int!\n) {\n node(id: $id) {\n __typename\n ... on Story {\n flightComponent: flight(component: \"FlightComponent.server\", props: {condition: true, count: $count, id: $id})\n }\n id\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "09d7f9c41cf97f8858a39193930a2f54"; -} - -module.exports = ((node/*: any*/)/*: Query< - RelayReferenceMarkerTestFlightQuery$variables, - RelayReferenceMarkerTestFlightQuery$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestInnerQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestInnerQuery.graphql.js deleted file mode 100644 index b1cbf12647b60..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestInnerQuery.graphql.js +++ /dev/null @@ -1,139 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<1ebd406827b91d3476a93af08db10b9c>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -export type RelayReferenceMarkerTestInnerQuery$variables = {| - id: string, -|}; -export type RelayReferenceMarkerTestInnerQuery$data = {| - +node: ?{| - +name?: ?string, - |}, -|}; -export type RelayReferenceMarkerTestInnerQuery = {| - response: RelayReferenceMarkerTestInnerQuery$data, - variables: RelayReferenceMarkerTestInnerQuery$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } -], -v1 = [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } -], -v2 = { - "kind": "InlineFragment", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - } - ], - "type": "User", - "abstractKey": null -}; -return { - "fragment": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Fragment", - "metadata": null, - "name": "RelayReferenceMarkerTestInnerQuery", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - (v2/*: any*/) - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Operation", - "name": "RelayReferenceMarkerTestInnerQuery", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - (v2/*: any*/), - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "fc5c304dae91ac99c643cf8e8c190b01", - "id": null, - "metadata": {}, - "name": "RelayReferenceMarkerTestInnerQuery", - "operationKind": "query", - "text": "query RelayReferenceMarkerTestInnerQuery(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ... on User {\n name\n }\n id\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "4c6a9934bb55d4c7f8779a65a12773c9"; -} - -module.exports = ((node/*: any*/)/*: Query< - RelayReferenceMarkerTestInnerQuery$variables, - RelayReferenceMarkerTestInnerQuery$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestResolverWithEdgeToClientQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestResolverWithEdgeToClientQuery.graphql.js index c9ae08db4b875..73c9335c0da40 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestResolverWithEdgeToClientQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestResolverWithEdgeToClientQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<3418ca4ec85276525ed889c967bff1d2>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -25,12 +25,12 @@ import {all_astrological_signs as queryAllAstrologicalSignsResolverType} from ". // A type error here indicates that the type signature of the resolver module is incorrect. (queryAllAstrologicalSignsResolverType: ( rootKey: QueryAllAstrologicalSignsResolver$key, -) => ?$ReadOnlyArray ?$ReadOnlyArray<{| +id: DataID, |}>); export type RelayReferenceMarkerTestResolverWithEdgeToClientQuery$variables = {||}; export type RelayReferenceMarkerTestResolverWithEdgeToClientQuery$data = {| - +all_astrological_signs: ?$ReadOnlyArray, |}; @@ -72,6 +72,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "AstrologicalSign", + "modelResolvers": null, "backingField": { "alias": null, "args": null, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestResolverWithFragmentDependencyQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestResolverWithFragmentDependencyQuery.graphql.js index 61da4124c85a2..66c72cd634494 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestResolverWithFragmentDependencyQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestResolverWithFragmentDependencyQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<429c2515c6189a501d77f91883d1a169>> + * @generated SignedSource<<9a95c9971543d55fa343fd66e819268d>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,17 +18,17 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { LiveCounterResolver$key } from "./../resolvers/__generated__/LiveCounterResolver.graphql"; import {counter as queryCounterResolverType} from "../resolvers/LiveCounterResolver.js"; // Type assertion validating that `queryCounterResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryCounterResolverType: ( rootKey: LiveCounterResolver$key, -) => LiveState); +) => LiveState); export type RelayReferenceMarkerTestResolverWithFragmentDependencyQuery$variables = {||}; export type RelayReferenceMarkerTestResolverWithFragmentDependencyQuery$data = {| - +counter: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterResolverType>["read"]>, + +counter: ?number, |}; export type RelayReferenceMarkerTestResolverWithFragmentDependencyQuery = {| response: RelayReferenceMarkerTestResolverWithFragmentDependencyQuery$data, @@ -108,7 +108,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestResolverWithNoFragmentQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestResolverWithNoFragmentQuery.graphql.js index 2cc3d2a1e6e35..adef56a625f7e 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestResolverWithNoFragmentQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayReferenceMarkerTestResolverWithNoFragmentQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<2a82c66f7991fa0b27afb928cf6a7367>> + * @generated SignedSource<<6a9eb24d7116dade424e44e45ec02ae5>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,14 +18,14 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import {counter_no_fragment as queryCounterNoFragmentResolverType} from "../resolvers/LiveCounterNoFragment.js"; // Type assertion validating that `queryCounterNoFragmentResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. -(queryCounterNoFragmentResolverType: () => LiveState); +(queryCounterNoFragmentResolverType: () => LiveState); export type RelayReferenceMarkerTestResolverWithNoFragmentQuery$variables = {||}; export type RelayReferenceMarkerTestResolverWithNoFragmentQuery$data = {| - +counter_no_fragment: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterNoFragmentResolverType>["read"]>, + +counter_no_fragment: ?number, |}; export type RelayReferenceMarkerTestResolverWithNoFragmentQuery = {| response: RelayReferenceMarkerTestResolverWithNoFragmentQuery$data, @@ -73,7 +73,7 @@ var node/*: ClientRequest*/ = { "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest36Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest36Query.graphql.js new file mode 100644 index 0000000000000..3585d8d29e4de --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest36Query.graphql.js @@ -0,0 +1,258 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type RelayResponseNormalizerTest36Query$variables = {| + id: string, +|}; +export type RelayResponseNormalizerTest36Query$data = {| + +node: ?{| + +__typename: string, + +firstName?: ?string, + +friends?: ?{| + +edges: ?$ReadOnlyArray, + |}, + +id: string, + +lastName?: ?string, + |}, +|}; +export type RelayResponseNormalizerTest36Query = {| + response: RelayResponseNormalizerTest36Query$data, + variables: RelayResponseNormalizerTest36Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}, +v3 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}, +v4 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "firstName", + "storageKey": null +}, +v5 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "lastName", + "storageKey": null +}, +v6 = [ + { + "kind": "Literal", + "name": "first", + "value": 3 + } +], +v7 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "cursor", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "RelayResponseNormalizerTest36Query", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + (v3/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + (v4/*: any*/), + (v5/*: any*/), + { + "alias": null, + "args": (v6/*: any*/), + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "friends", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + (v7/*: any*/), + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v4/*: any*/), + (v5/*: any*/) + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": "friends(first:3)" + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResponseNormalizerTest36Query", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + (v3/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + (v4/*: any*/), + (v5/*: any*/), + { + "alias": null, + "args": (v6/*: any*/), + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "friends", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + (v7/*: any*/), + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v4/*: any*/), + (v5/*: any*/), + (v2/*: any*/) + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": "friends(first:3)" + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "16dbfe72418d3b25d6e5d29d7474ea9f", + "id": null, + "metadata": {}, + "name": "RelayResponseNormalizerTest36Query", + "operationKind": "query", + "text": "query RelayResponseNormalizerTest36Query(\n $id: ID!\n) {\n node(id: $id) {\n id\n __typename\n ... on User {\n firstName\n lastName\n friends(first: 3) {\n edges {\n cursor\n node {\n firstName\n lastName\n id\n }\n }\n }\n }\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "8be99800fe67e7921699aa5a0c8c8463"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayResponseNormalizerTest36Query$variables, + RelayResponseNormalizerTest36Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest37Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest37Query.graphql.js new file mode 100644 index 0000000000000..32a7712764d80 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest37Query.graphql.js @@ -0,0 +1,258 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<8426db4f977bb65670f6e9650f0f12a7>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type RelayResponseNormalizerTest37Query$variables = {| + id: string, +|}; +export type RelayResponseNormalizerTest37Query$data = {| + +node: ?{| + +__typename: string, + +firstName?: ?string, + +friends?: ?{| + +edges: ?$ReadOnlyArray, + |}, + +id: string, + +lastName?: ?string, + |}, +|}; +export type RelayResponseNormalizerTest37Query = {| + response: RelayResponseNormalizerTest37Query$data, + variables: RelayResponseNormalizerTest37Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}, +v3 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}, +v4 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "firstName", + "storageKey": null +}, +v5 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "lastName", + "storageKey": null +}, +v6 = [ + { + "kind": "Literal", + "name": "first", + "value": 3 + } +], +v7 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "cursor", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "RelayResponseNormalizerTest37Query", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + (v3/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + (v4/*: any*/), + (v5/*: any*/), + { + "alias": null, + "args": (v6/*: any*/), + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "friends", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + (v7/*: any*/), + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v4/*: any*/), + (v5/*: any*/) + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": "friends(first:3)" + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResponseNormalizerTest37Query", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + (v3/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + (v4/*: any*/), + (v5/*: any*/), + { + "alias": null, + "args": (v6/*: any*/), + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "friends", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + (v7/*: any*/), + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v4/*: any*/), + (v5/*: any*/), + (v2/*: any*/) + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": "friends(first:3)" + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "a46f2c38633722c2c4e220e7f9c6a9c2", + "id": null, + "metadata": {}, + "name": "RelayResponseNormalizerTest37Query", + "operationKind": "query", + "text": "query RelayResponseNormalizerTest37Query(\n $id: ID!\n) {\n node(id: $id) {\n id\n __typename\n ... on User {\n firstName\n lastName\n friends(first: 3) {\n edges {\n cursor\n node {\n firstName\n lastName\n id\n }\n }\n }\n }\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "3aaf1cb92f39ddaa1d9be07fb9fd9331"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayResponseNormalizerTest37Query$variables, + RelayResponseNormalizerTest37Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest38Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest38Query.graphql.js new file mode 100644 index 0000000000000..ae22b56984256 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest38Query.graphql.js @@ -0,0 +1,258 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type RelayResponseNormalizerTest38Query$variables = {| + id: string, +|}; +export type RelayResponseNormalizerTest38Query$data = {| + +node: ?{| + +__typename: string, + +firstName?: ?string, + +friends?: ?{| + +edges: ?$ReadOnlyArray, + |}, + +id: string, + +lastName?: ?string, + |}, +|}; +export type RelayResponseNormalizerTest38Query = {| + response: RelayResponseNormalizerTest38Query$data, + variables: RelayResponseNormalizerTest38Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}, +v3 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}, +v4 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "firstName", + "storageKey": null +}, +v5 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "lastName", + "storageKey": null +}, +v6 = [ + { + "kind": "Literal", + "name": "first", + "value": 3 + } +], +v7 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "cursor", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "RelayResponseNormalizerTest38Query", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + (v3/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + (v4/*: any*/), + (v5/*: any*/), + { + "alias": null, + "args": (v6/*: any*/), + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "friends", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + (v7/*: any*/), + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v4/*: any*/), + (v5/*: any*/) + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": "friends(first:3)" + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayResponseNormalizerTest38Query", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + (v3/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + (v4/*: any*/), + (v5/*: any*/), + { + "alias": null, + "args": (v6/*: any*/), + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "friends", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + (v7/*: any*/), + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v4/*: any*/), + (v5/*: any*/), + (v2/*: any*/) + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": "friends(first:3)" + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "c72726d285bf58dade07bd112bdbbad6", + "id": null, + "metadata": {}, + "name": "RelayResponseNormalizerTest38Query", + "operationKind": "query", + "text": "query RelayResponseNormalizerTest38Query(\n $id: ID!\n) {\n node(id: $id) {\n id\n __typename\n ... on User {\n firstName\n lastName\n friends(first: 3) {\n edges {\n cursor\n node {\n firstName\n lastName\n id\n }\n }\n }\n }\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "9a44481b5a1f547afa45110780a55a33"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayResponseNormalizerTest38Query$variables, + RelayResponseNormalizerTest38Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest4Query.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest4Query.graphql.js index 07aa2bad19df8..0530cfbe1e8d3 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest4Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest4Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<34795562f18eb1288e1a29e9cf5da972>> + * @generated SignedSource<<1fef67311a2b88600ec8009d0f4dbe33>> * @flow * @lightSyntaxTransform * @nogrep @@ -115,10 +115,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -156,7 +153,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "type": "User", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTestFlightQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTestFlightQuery.graphql.js deleted file mode 100644 index ccb69ebb8d237..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTestFlightQuery.graphql.js +++ /dev/null @@ -1,175 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -// @ReactFlightServerDependency FlightComponent.server - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -export type RelayResponseNormalizerTestFlightQuery$variables = {| - count: number, - id: string, -|}; -export type RelayResponseNormalizerTestFlightQuery$data = {| - +node: ?{| - +flightComponent?: ?any, - |}, -|}; -export type RelayResponseNormalizerTestFlightQuery = {| - response: RelayResponseNormalizerTestFlightQuery$data, - variables: RelayResponseNormalizerTestFlightQuery$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "count" -}, -v1 = { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" -}, -v2 = { - "kind": "Variable", - "name": "id", - "variableName": "id" -}, -v3 = [ - (v2/*: any*/) -], -v4 = { - "kind": "InlineFragment", - "selections": [ - { - "alias": "flightComponent", - "args": [ - { - "kind": "Literal", - "name": "component", - "value": "FlightComponent.server" - }, - { - "fields": [ - { - "kind": "Literal", - "name": "condition", - "value": true - }, - { - "kind": "Variable", - "name": "count", - "variableName": "count" - }, - (v2/*: any*/) - ], - "kind": "ObjectValue", - "name": "props" - } - ], - "kind": "FlightField", - "name": "flight", - "storageKey": null - } - ], - "type": "Story", - "abstractKey": null -}; -return { - "fragment": { - "argumentDefinitions": [ - (v0/*: any*/), - (v1/*: any*/) - ], - "kind": "Fragment", - "metadata": null, - "name": "RelayResponseNormalizerTestFlightQuery", - "selections": [ - { - "alias": null, - "args": (v3/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - (v4/*: any*/) - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": [ - (v1/*: any*/), - (v0/*: any*/) - ], - "kind": "Operation", - "name": "RelayResponseNormalizerTestFlightQuery", - "selections": [ - { - "alias": null, - "args": (v3/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - (v4/*: any*/), - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "0330b8661d511af1582b5bd6409a4b93", - "id": null, - "metadata": {}, - "name": "RelayResponseNormalizerTestFlightQuery", - "operationKind": "query", - "text": "query RelayResponseNormalizerTestFlightQuery(\n $id: ID!\n $count: Int!\n) {\n node(id: $id) {\n __typename\n ... on Story {\n flightComponent: flight(component: \"FlightComponent.server\", props: {condition: true, count: $count, id: $id})\n }\n id\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "ff8e6b8262a0c8e39f55eaf9638a7c88"; -} - -module.exports = ((node/*: any*/)/*: Query< - RelayResponseNormalizerTestFlightQuery$variables, - RelayResponseNormalizerTestFlightQuery$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTestFragment.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTestFragment.graphql.js index 50c6042172de6..613c16ec72188 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTestFragment.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTestFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<6b6a036923190fc0bd63075f2076d787>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -59,10 +59,7 @@ var node/*: ReaderFragment*/ = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -99,7 +96,7 @@ var node/*: ReaderFragment*/ = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "type": "User", diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTestServerOrClientQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTestServerOrClientQuery.graphql.js deleted file mode 100644 index ff96959cb6132..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTestServerOrClientQuery.graphql.js +++ /dev/null @@ -1,136 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<8d09d233fde5f03b60adf848e8aa3298>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -// @ReactFlightClientDependency RelayResponseNormalizerTest_clientFragment$normalization.graphql - -/*:: -import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { RelayResponseNormalizerTest_clientFragment$fragmentType } from "./RelayResponseNormalizerTest_clientFragment.graphql"; -export type RelayResponseNormalizerTestServerOrClientQuery$variables = {| - id: string, -|}; -export type RelayResponseNormalizerTestServerOrClientQuery$data = {| - +node: ?{| - +$fragmentSpreads: RelayResponseNormalizerTest_clientFragment$fragmentType, - |}, -|}; -export type RelayResponseNormalizerTestServerOrClientQuery = {| - response: RelayResponseNormalizerTestServerOrClientQuery$data, - variables: RelayResponseNormalizerTestServerOrClientQuery$variables, -|}; -*/ - -var node/*: ConcreteRequest*/ = (function(){ -var v0 = [ - { - "defaultValue": null, - "kind": "LocalArgument", - "name": "id" - } -], -v1 = [ - { - "kind": "Variable", - "name": "id", - "variableName": "id" - } -]; -return { - "fragment": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Fragment", - "metadata": null, - "name": "RelayResponseNormalizerTestServerOrClientQuery", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "args": null, - "kind": "FragmentSpread", - "name": "RelayResponseNormalizerTest_clientFragment" - } - ], - "storageKey": null - } - ], - "type": "Query", - "abstractKey": null - }, - "kind": "Request", - "operation": { - "argumentDefinitions": (v0/*: any*/), - "kind": "Operation", - "name": "RelayResponseNormalizerTestServerOrClientQuery", - "selections": [ - { - "alias": null, - "args": (v1/*: any*/), - "concreteType": null, - "kind": "LinkedField", - "name": "node", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__typename", - "storageKey": null - }, - { - "args": null, - "fragment": require('./RelayResponseNormalizerTest_clientFragment$normalization.graphql'), - "kind": "ClientComponent" - }, - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "id", - "storageKey": null - } - ], - "storageKey": null - } - ] - }, - "params": { - "cacheID": "50369b33d8ddd327912866577ac3722e", - "id": null, - "metadata": {}, - "name": "RelayResponseNormalizerTestServerOrClientQuery", - "operationKind": "query", - "text": "query RelayResponseNormalizerTestServerOrClientQuery(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n ...RelayResponseNormalizerTest_clientFragment @relay_client_component_server(module_id: \"RelayResponseNormalizerTest_clientFragment$normalization.graphql\")\n id\n }\n}\n\nfragment RelayResponseNormalizerTest_clientFragment on Story {\n name\n body {\n text\n }\n}\n" - } -}; -})(); - -if (__DEV__) { - (node/*: any*/).hash = "a0dd4ee40f4cb0fc29cc2dc260f83cde"; -} - -module.exports = ((node/*: any*/)/*: Query< - RelayResponseNormalizerTestServerOrClientQuery$variables, - RelayResponseNormalizerTestServerOrClientQuery$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest_clientFragment$normalization.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest_clientFragment$normalization.graphql.js deleted file mode 100644 index e334c04c7f204..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest_clientFragment$normalization.graphql.js +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<64646c25f8efed87f7bcd6e0d6b47911>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { NormalizationSplitOperation } from 'relay-runtime'; - -*/ - -var node/*: NormalizationSplitOperation*/ = { - "kind": "SplitOperation", - "metadata": {}, - "name": "RelayResponseNormalizerTest_clientFragment$normalization", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - }, - { - "alias": null, - "args": null, - "concreteType": "Text", - "kind": "LinkedField", - "name": "body", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "text", - "storageKey": null - } - ], - "storageKey": null - } - ] -}; - -if (__DEV__) { - (node/*: any*/).hash = "4e927d138eadf9425e552317ba807e5b"; -} - -module.exports = node; diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest_clientFragment.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest_clientFragment.graphql.js deleted file mode 100644 index 238fe88c9be6f..0000000000000 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest_clientFragment.graphql.js +++ /dev/null @@ -1,80 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<7198a0a0403bac51c54a0a274823a6bf>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { Fragment, ReaderFragment } from 'relay-runtime'; -import type { FragmentType } from "relay-runtime"; -declare export opaque type RelayResponseNormalizerTest_clientFragment$fragmentType: FragmentType; -export type RelayResponseNormalizerTest_clientFragment$data = {| - +body: ?{| - +text: ?string, - |}, - +name: ?string, - +$fragmentType: RelayResponseNormalizerTest_clientFragment$fragmentType, -|}; -export type RelayResponseNormalizerTest_clientFragment$key = { - +$data?: RelayResponseNormalizerTest_clientFragment$data, - +$fragmentSpreads: RelayResponseNormalizerTest_clientFragment$fragmentType, - ... -}; -*/ - -var node/*: ReaderFragment*/ = { - "argumentDefinitions": [], - "kind": "Fragment", - "metadata": null, - "name": "RelayResponseNormalizerTest_clientFragment", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "name", - "storageKey": null - }, - { - "alias": null, - "args": null, - "concreteType": "Text", - "kind": "LinkedField", - "name": "body", - "plural": false, - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "text", - "storageKey": null - } - ], - "storageKey": null - } - ], - "type": "Story", - "abstractKey": null -}; - -if (__DEV__) { - (node/*: any*/).hash = "4e927d138eadf9425e552317ba807e5b"; -} - -module.exports = ((node/*: any*/)/*: Fragment< - RelayResponseNormalizerTest_clientFragment$fragmentType, - RelayResponseNormalizerTest_clientFragment$data, ->*/); diff --git a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest_pvQuery.graphql.js b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest_pvQuery.graphql.js index 700ecbcbff4ef..64cb9cd64628e 100644 --- a/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest_pvQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/__generated__/RelayResponseNormalizerTest_pvQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<0bad2ff4004f0eb406a0dbcfa4c86cf5>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -32,21 +32,19 @@ export type RelayResponseNormalizerTest_pvQuery = {| response: RelayResponseNormalizerTest_pvQuery$data, variables: RelayResponseNormalizerTest_pvQuery$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider'), + "__relay_internal__pv__RelayProvider_returnsFalserelayprovider": require('./../RelayProvider_returnsFalse.relayprovider') +}: {| +__relay_internal__pv__RelayProvider_returnsFalserelayprovider: {| +get: () => boolean, |}, +__relay_internal__pv__RelayProvider_returnsTruerelayprovider: {| +get: () => boolean, |}, -|}; +|}); */ -var providedVariablesDefinition/*: ProvidedVariablesType*/ = { - "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider'), - "__relay_internal__pv__RelayProvider_returnsFalserelayprovider": require('./../RelayProvider_returnsFalse.relayprovider') -}; - var node/*: ConcreteRequest*/ = (function(){ var v0 = { "defaultValue": null, @@ -206,7 +204,10 @@ return { "name": "RelayResponseNormalizerTest_pvQuery", "operationKind": "query", "text": "query RelayResponseNormalizerTest_pvQuery(\n $id: ID!\n $__relay_internal__pv__RelayProvider_returnsTruerelayprovider: Boolean!\n $__relay_internal__pv__RelayProvider_returnsFalserelayprovider: Boolean!\n) {\n node(id: $id) {\n __typename\n id\n ...RelayResponseNormalizerTest_pvFragment\n }\n}\n\nfragment RelayResponseNormalizerTest_pvFragment on User {\n name @include(if: $__relay_internal__pv__RelayProvider_returnsTruerelayprovider)\n firstName @include(if: $__relay_internal__pv__RelayProvider_returnsFalserelayprovider)\n lastName @skip(if: $__relay_internal__pv__RelayProvider_returnsFalserelayprovider)\n username @skip(if: $__relay_internal__pv__RelayProvider_returnsTruerelayprovider)\n}\n", - "providedVariables": providedVariablesDefinition + "providedVariables": { + "__relay_internal__pv__RelayProvider_returnsTruerelayprovider": require('./../RelayProvider_returnsTrue.relayprovider'), + "__relay_internal__pv__RelayProvider_returnsFalserelayprovider": require('./../RelayProvider_returnsFalse.relayprovider') + } } }; })(); diff --git a/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest0Query.graphql.js b/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest0Query.graphql.js new file mode 100644 index 0000000000000..3758b46305fa4 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest0Query.graphql.js @@ -0,0 +1,112 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type RelayReaderCatchFieldsTest0Query$variables = {||}; +export type RelayReaderCatchFieldsTest0Query$data = {| + +me: {| + +lastName: string, + |}, +|}; +export type RelayReaderCatchFieldsTest0Query = {| + response: RelayReaderCatchFieldsTest0Query$data, + variables: RelayReaderCatchFieldsTest0Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "lastName", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderCatchFieldsTest0Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "kind": "CatchField", + "field": (v0/*: any*/), + "to": "NULL", + "path": "me.lastName" + } + ], + "storageKey": null, + }, + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderCatchFieldsTest0Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "8cd69a31b3db9176dc76e43d3a795c6f", + "id": null, + "metadata": {}, + "name": "RelayReaderCatchFieldsTest0Query", + "operationKind": "query", + "text": "query RelayReaderCatchFieldsTest0Query {\n me {\n lastName\n id\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "87b6ffdc922687a788965139fef7a707"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderCatchFieldsTest0Query$variables, + RelayReaderCatchFieldsTest0Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest1Query.graphql.js b/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest1Query.graphql.js new file mode 100644 index 0000000000000..57c3ab58198ad --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest1Query.graphql.js @@ -0,0 +1,112 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type RelayReaderCatchFieldsTest1Query$variables = {||}; +export type RelayReaderCatchFieldsTest1Query$data = {| + +me: {| + +lastName: string, + |}, +|}; +export type RelayReaderCatchFieldsTest1Query = {| + response: RelayReaderCatchFieldsTest1Query$data, + variables: RelayReaderCatchFieldsTest1Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "lastName", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderCatchFieldsTest1Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "kind": "CatchField", + "field": (v0/*: any*/), + "to": "RESULT", + "path": "me.lastName" + } + ], + "storageKey": null, + }, + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderCatchFieldsTest1Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "8cd69a31b3db9176dc76e43d3a795c6f", + "id": null, + "metadata": {}, + "name": "RelayReaderCatchFieldsTest1Query", + "operationKind": "query", + "text": "query RelayReaderCatchFieldsTest1Query {\n me {\n lastName\n id\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "87b6ffdc922687a788965139fef7a707"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderCatchFieldsTest1Query$variables, + RelayReaderCatchFieldsTest1Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest2Query.graphql.js b/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest2Query.graphql.js new file mode 100644 index 0000000000000..943305c1a81be --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest2Query.graphql.js @@ -0,0 +1,121 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type RelayReaderCatchFieldsTest2Query$variables = {||}; +export type RelayReaderCatchFieldsTest2Query$data = {| + +me: {| + +lastName: string, + |}, +|}; +export type RelayReaderCatchFieldsTest2Query = {| + response: RelayReaderCatchFieldsTest2Query$data, + variables: RelayReaderCatchFieldsTest2Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "lastName", + "storageKey": null +}; + +var meObj = { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "RequiredField", + "field": (v0/*: any*/), + "action": "THROW", + "path": "me.lastName" + } + ], + "storageKey": null, + } +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderCatchFieldsTest2Query", + "selections": [ + { + "kind": "CatchField", + "field": (meObj/*: any*/), + "to": "RESULT", + "path": "me" + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderCatchFieldsTest2Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + (v0/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "8cd69a31b3db9176dc76e43d3a795c6f", + "id": null, + "metadata": {}, + "name": "RelayReaderCatchFieldsTest2Query", + "operationKind": "query", + "text": "query RelayReaderCatchFieldsTest2Query {\n me {\n lastName\n id\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "87b6ffdc922687a788965139fef7a707"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderCatchFieldsTest2Query$variables, + RelayReaderCatchFieldsTest2Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest3Fragment.graphql.js b/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest3Fragment.graphql.js new file mode 100644 index 0000000000000..281c9767caf3a --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest3Fragment.graphql.js @@ -0,0 +1,70 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayReaderCatchFieldsTest3Fragment$fragmentType: FragmentType; +export type RelayReaderCatchFieldsTest3Fragment$data = {| + +profilePicture: ?{| + +uri: ?string, + |}, + +$fragmentType: RelayReaderCatchFieldsTest3Fragment$fragmentType, +|}; +export type RelayReaderCatchFieldsTest3Fragment$key = { + +$data?: RelayReaderCatchFieldsTest3Fragment$data, + +$fragmentSpreads: RelayReaderCatchFieldsTest3Fragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderCatchFieldsTest3Fragment", + "selections": [ + { + "alias": "profilePicture", + "args": null, + "concreteType": "Image", + "kind": "LinkedField", + "name": "__profilePicture_test", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "uri", + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "9a91f81e017f3267c21ec7f465854acf"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayReaderCatchFieldsTest3Fragment$fragmentType, + RelayReaderCatchFieldsTest3Fragment$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest3Query.graphql.js b/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest3Query.graphql.js new file mode 100644 index 0000000000000..f5e90ec2fd304 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/__mocks__/RelayReaderCatchFieldsTest3Query.graphql.js @@ -0,0 +1,134 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +export type RelayReaderCatchFieldsTest3Query$variables = {||}; +export type RelayReaderCatchFieldsTest3Query$data = {| + +me: ?{| + +profilePicture: ?{| + +uri: ?string, + |}, + |}, +|}; +export type RelayReaderCatchFieldsTest3Query = {| + response: RelayReaderCatchFieldsTest3Query$data, + variables: RelayReaderCatchFieldsTest3Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "kind": "Literal", + "name": "size", + "value": 32 + } +]; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayReaderCatchFieldsTest3Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": (v0/*: any*/), + "concreteType": "Image", + "kind": "LinkedField", + "name": "profilePicture", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayReaderCatchFieldsTest3Fragment" + } + ], + "storageKey": "profilePicture(size:32)" + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayReaderCatchFieldsTest3Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": (v0/*: any*/), + "concreteType": "Image", + "kind": "LinkedField", + "name": "profilePicture", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "uri", + "storageKey": null + } + ], + "storageKey": "profilePicture(size:32)" + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "25d9b9bf8df79f73e41a4671ce1d207d", + "id": null, + "metadata": {}, + "name": "RelayReaderCatchFieldsTest3Query", + "operationKind": "query", + "text": "query RelayReaderCatchFieldsTest3Query {\n me {\n profilePicture(size: 32) {\n uri\n }\n id\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "7cd54b0080d8dab528631b15888562dc"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayReaderCatchFieldsTest3Query$variables, + RelayReaderCatchFieldsTest3Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/no_inline/NoInline-test.js b/packages/relay-runtime/store/__tests__/no_inline/NoInline-test.js index 11241449885c4..3319945f4188a 100644 --- a/packages/relay-runtime/store/__tests__/no_inline/NoInline-test.js +++ b/packages/relay-runtime/store/__tests__/no_inline/NoInline-test.js @@ -13,23 +13,22 @@ const {getRequest} = require('../../../query/GraphQLTag'); const {createReaderSelector} = require('../../../store/RelayModernSelector'); -// We're creating a fragmnet in some file far away from this one to confirm the +// We're creating a fragment in some file far away from this one to confirm the // relative path generated by the compiler is correct. // // We're using the fragment below in `NoInlineTestQuery`, you can look in the artifact // generated for that query to confirm the `require` is correct. Also, this test passing -// is confirmation that the relative pathing works. +// is confirmation that the relative path-ing works. const SomeDeeplyNestedFragment = require('./some/deeply/nested/__generated__/SomeDeeplyNestedFragment.graphql'); const {graphql} = require('relay-runtime/query/GraphQLTag'); const { createOperationDescriptor, } = require('relay-runtime/store/RelayModernOperationDescriptor'); const {createMockEnvironment} = require('relay-test-utils-internal'); - const { - disallowWarnings, disallowConsoleErrors, -} = require(`relay-test-utils-internal`); + disallowWarnings, +} = require('relay-test-utils-internal'); disallowConsoleErrors(); disallowWarnings(); @@ -63,7 +62,7 @@ describe('No Inline w/ Common JS', () => { ); // $FlowFixMe[unclear-type] - const {data} = (environment.lookup(fragmentSelector): any); + const {data}: any = environment.lookup(fragmentSelector); expect(data.name).toEqual('Alice'); }); diff --git a/packages/relay-runtime/store/__tests__/readInlineData-test.js b/packages/relay-runtime/store/__tests__/readInlineData-test.js index 38b071c1d3314..f331e736420b5 100644 --- a/packages/relay-runtime/store/__tests__/readInlineData-test.js +++ b/packages/relay-runtime/store/__tests__/readInlineData-test.js @@ -18,9 +18,9 @@ const { } = require('../RelayModernOperationDescriptor'); const {createMockEnvironment} = require('relay-test-utils-internal'); const { - disallowWarnings, disallowConsoleErrors, -} = require(`relay-test-utils-internal`); + disallowWarnings, +} = require('relay-test-utils-internal'); disallowConsoleErrors(); disallowWarnings(); diff --git a/packages/relay-runtime/store/__tests__/resolvers/AnimalQueryResolvers.js b/packages/relay-runtime/store/__tests__/resolvers/AnimalQueryResolvers.js new file mode 100644 index 0000000000000..b42c628b48beb --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/AnimalQueryResolvers.js @@ -0,0 +1,76 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + * @flow strict-local + * @oncall relay + */ + +import type {DataID} from 'relay-runtime/util/RelayRuntimeTypes'; + +type IAnimalTypeNames = 'Cat' | 'Fish' | 'Chicken'; + +const INVALID_ID = 'invalid_id'; + +type IAnimalType = { + __id: DataID, +}; + +/** + * @RelayResolver IAnimal.greeting: String + */ +function greeting(model: ?IAnimalType): ?string { + if (model == null) { + return null; + } + return `Hello, ${model.__id}!`; +} + +/** + * Returns a single `IAnimal` of a given type and optionally returns an invalid ID. + * + * @RelayResolver Query.animal(request: AnimalRequest!): IAnimal + */ +function animal(args: {request: {ofType: string, returnValidID: boolean}}): { + __typename: IAnimalTypeNames, + id: DataID, +} { + switch (args.request.ofType) { + case 'Cat': { + const id = args.request.returnValidID ? '1234567890' : INVALID_ID; + return {__typename: 'Cat', id}; + } + case 'Fish': { + const id = args.request.returnValidID ? '12redblue' : INVALID_ID; + return {__typename: 'Fish', id}; + } + default: + throw new Error('Unexpected value for "ofType" argument'); + } +} + +/** + * Returns a list of `IAnimal` of a given type and optionally returns an invalid ID. + * + * @RelayResolver Query.animals(requests: [AnimalRequest!]!): [IAnimal] + */ +function animals(args: { + requests: $ReadOnlyArray<{ofType: string, returnValidID: boolean}>, +}): Array<{ + __typename: IAnimalTypeNames, + id: DataID, +}> { + return args.requests.map(request => { + return animal({request}); + }); +} + +module.exports = { + INVALID_ID, + animal, + animals, + greeting, +}; diff --git a/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignHouseResolver.js b/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignHouseResolver.js index 1170e5ffc55d2..a4d817d1d080e 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignHouseResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignHouseResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName house + * @RelayResolver AstrologicalSign.house: Int * @rootFragment AstrologicalSignHouseResolver - * @onType AstrologicalSign * * Re-expose `house` from our client fat `self` object. */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignNameResolver.js b/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignNameResolver.js index 16f348ca2cee6..8a6e4ee2cde92 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignNameResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignNameResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName name + * @RelayResolver AstrologicalSign.name: String * @rootFragment AstrologicalSignNameResolver - * @onType AstrologicalSign * * Re-expose `Name` from our client fat `self` object. */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignOppositeResolver.js b/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignOppositeResolver.js index e74291b8136c8..abb8af6b96b70 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignOppositeResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignOppositeResolver.js @@ -18,11 +18,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName opposite + * @RelayResolver AstrologicalSign.opposite: AstrologicalSign * @rootFragment AstrologicalSignOppositeResolver - * @onType AstrologicalSign - * @edgeTo AstrologicalSign * * Expose a sign's opposite as an edge in the graph. */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignSelfResolver.js b/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignSelfResolver.js index 3cd08ffa3496e..845b10be3096b 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignSelfResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/AstrologicalSignSelfResolver.js @@ -22,10 +22,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName self + * @RelayResolver AstrologicalSign.self: RelayResolverValue * @rootFragment AstrologicalSignSelfResolver - * @onType AstrologicalSign * * Local state knowledge of the user's astrological sign. */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/CatResolvers.js b/packages/relay-runtime/store/__tests__/resolvers/CatResolvers.js new file mode 100644 index 0000000000000..0a88ed49e4bfa --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/CatResolvers.js @@ -0,0 +1,50 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + * @flow strict-local + * @oncall relay + */ + +import type {DataID} from 'relay-runtime/util/RelayRuntimeTypes'; + +const {INVALID_ID} = require('./AnimalQueryResolvers'); + +type CatModel = { + __id: DataID, +}; + +/** + * @RelayResolver Cat implements IAnimal + */ +function Cat(id: DataID): ?CatModel { + if (id === INVALID_ID) { + return null; + } + return { + __id: id, + }; +} + +/** + * @RelayResolver Cat.legs: Int + */ +function legs(cat: CatModel): number { + return 4; +} + +/** + * @RelayResolver Query.cat: Cat + */ +function cat(): {id: DataID} { + return {id: '9'}; +} + +module.exports = { + cat, + Cat, + legs, +}; diff --git a/packages/relay-runtime/store/__tests__/resolvers/CounterPlusOneResolver.js b/packages/relay-runtime/store/__tests__/resolvers/CounterPlusOneResolver.js index e390f328438ac..4f4bdfe28aa37 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/CounterPlusOneResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/CounterPlusOneResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName counter_plus_one + * @RelayResolver Query.counter_plus_one: Int * @rootFragment CounterPlusOneResolver - * @onType Query * * A resolver which reads a @live resolver field (`counter`) to return `counter + 1`. */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/CounterSuspendsWhenOdd.js b/packages/relay-runtime/store/__tests__/resolvers/CounterSuspendsWhenOdd.js index 9e13e641476ce..5b9db4c17d20e 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/CounterSuspendsWhenOdd.js +++ b/packages/relay-runtime/store/__tests__/resolvers/CounterSuspendsWhenOdd.js @@ -12,20 +12,15 @@ 'use strict'; import type {CounterSuspendsWhenOdd$key} from './__generated__/CounterSuspendsWhenOdd.graphql'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; const {GLOBAL_STORE, Selectors} = require('./ExampleExternalStateStore'); -const {graphql} = require('relay-runtime'); -const { - suspenseSentinel, -} = require('relay-runtime/store/experimental-live-resolvers/LiveResolverSuspenseSentinel'); +const {graphql, suspenseSentinel} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName counter_suspends_when_odd + * @RelayResolver Query.counter_suspends_when_odd: Int * @rootFragment CounterSuspendsWhenOdd - * @onType Query * @live * * A Relay Resolver that returns an object implementing the External State diff --git a/packages/relay-runtime/store/__tests__/resolvers/CounterSuspendsWhenOddOnUser.js b/packages/relay-runtime/store/__tests__/resolvers/CounterSuspendsWhenOddOnUser.js index 0c548708ba4a9..114d089653cb4 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/CounterSuspendsWhenOddOnUser.js +++ b/packages/relay-runtime/store/__tests__/resolvers/CounterSuspendsWhenOddOnUser.js @@ -11,17 +11,13 @@ 'use strict'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; const {GLOBAL_STORE, Selectors} = require('./ExampleExternalStateStore'); -const { - suspenseSentinel, -} = require('relay-runtime/store/experimental-live-resolvers/LiveResolverSuspenseSentinel'); +const {suspenseSentinel} = require('relay-runtime'); /** - * @RelayResolver - * @fieldName counter_suspends_when_odd - * @onType User + * @RelayResolver User.counter_suspends_when_odd: Int * @live * * A Relay Resolver that returns an object implementing the External State diff --git a/packages/relay-runtime/store/__tests__/resolvers/ExampleClientObjectResolver.js b/packages/relay-runtime/store/__tests__/resolvers/ExampleClientObjectResolver.js index 8c31abb4bc3de..43974ad0714db 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/ExampleClientObjectResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/ExampleClientObjectResolver.js @@ -14,10 +14,7 @@ import type {Query__example_client_object$normalization as ReturnType} from './__generated__/Query__example_client_object$normalization.graphql'; /** - * @RelayResolver - * @fieldName example_client_object - * @onType Query - * @outputType ClientObject + * @RelayResolver Query.example_client_object: ClientObject */ function example_client_object(): ReturnType { return { diff --git a/packages/relay-runtime/store/__tests__/resolvers/ExampleExternalStateStore.js b/packages/relay-runtime/store/__tests__/resolvers/ExampleExternalStateStore.js index 73819681630cc..81c7bdca672c4 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/ExampleExternalStateStore.js +++ b/packages/relay-runtime/store/__tests__/resolvers/ExampleExternalStateStore.js @@ -44,6 +44,9 @@ class Store { this._state = 0; this._subscriptions = []; } + getSubscriptionsCount(): number { + return this._subscriptions.length; + } } const Selectors = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/ExampleTodoStore.js b/packages/relay-runtime/store/__tests__/resolvers/ExampleTodoStore.js index e963885befd33..3893687cacfea 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/ExampleTodoStore.js +++ b/packages/relay-runtime/store/__tests__/resolvers/ExampleTodoStore.js @@ -13,7 +13,7 @@ import type {LogEvent} from '../../RelayStoreTypes'; -export opaque type TodoID = string; +export opaque type TodoID: string = string; export type TodoItem = { todoID: TodoID, @@ -39,6 +39,13 @@ type ACTION = type: 'REMOVE_TODO', payload: TodoID, } + | { + type: 'CHANGE_TODO_DESCRIPTION', + payload: { + todoID: TodoID, + description: string, + }, + } | { type: 'BLOCKED_BY', payload: { @@ -130,6 +137,20 @@ class TodoStore { this._notify([action.payload.todoID]); break; } + case 'CHANGE_TODO_DESCRIPTION': { + this._state = this._state.map(todo => { + if (todo.todoID === action.payload.todoID) { + return { + ...todo, + description: action.payload.description, + }; + } else { + return todo; + } + }); + this._notify([action.payload.todoID]); + break; + } default: (action.type: empty); } @@ -217,6 +238,16 @@ function blockedBy(todoID: string, blockedBy: string) { }); } +function changeDescription(todoID: string, description: string) { + TODO_STORE.dispatch({ + type: 'CHANGE_TODO_DESCRIPTION', + payload: { + todoID, + description, + }, + }); +} + module.exports = { TODO_STORE, Selectors, @@ -225,4 +256,5 @@ module.exports = { completeTodo, removeTodo, blockedBy, + changeDescription, }; diff --git a/packages/relay-runtime/store/__tests__/resolvers/FishResolvers.js b/packages/relay-runtime/store/__tests__/resolvers/FishResolvers.js new file mode 100644 index 0000000000000..22b7c2af03ed2 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/FishResolvers.js @@ -0,0 +1,50 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + * @flow strict-local + * @oncall relay + */ + +import type {DataID} from 'relay-runtime/util/RelayRuntimeTypes'; + +const {INVALID_ID} = require('./AnimalQueryResolvers'); + +type FishModel = { + __id: DataID, +}; + +/** + * @RelayResolver Fish implements IAnimal + */ +function Fish(id: DataID): ?FishModel { + if (id === INVALID_ID) { + return null; + } + return { + __id: id, + }; +} + +/** + * @RelayResolver Fish.legs: Int + */ +function legs(cat: FishModel): number { + return 0; +} + +/** + * @RelayResolver Query.fish: Fish + */ +function fish(): {id: DataID} { + return {id: '12redblue'}; +} + +module.exports = { + fish, + Fish, + legs, +}; diff --git a/packages/relay-runtime/store/__tests__/resolvers/HelloUserResolver.js b/packages/relay-runtime/store/__tests__/resolvers/HelloUserResolver.js index 4608407069d42..0a79b4e1a0b26 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/HelloUserResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/HelloUserResolver.js @@ -14,10 +14,7 @@ import type {ConcreteClientEdgeResolverReturnType} from 'relay-runtime'; /** - * @RelayResolver - * @fieldName hello_user(id: ID!) - * @edgeTo User - * @onType Query + * @RelayResolver Query.hello_user(id: ID!): User * * This should return the User */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/HelloWorldOptionalResolver.js b/packages/relay-runtime/store/__tests__/resolvers/HelloWorldOptionalResolver.js new file mode 100644 index 0000000000000..39a4f25ed9133 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/HelloWorldOptionalResolver.js @@ -0,0 +1,25 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; + +/** + * @RelayResolver Query.hello_optional_world(world: String): String + * + * Say `Hello ${world}!` with a fallback if world is null + */ +function hello_optional_world(args: {world: ?string}): string { + return `Hello, ${args.world ?? 'Default'}!`; +} + +module.exports = { + hello_optional_world, +}; diff --git a/packages/relay-runtime/store/__tests__/resolvers/HelloWorldProvider.js b/packages/relay-runtime/store/__tests__/resolvers/HelloWorldProvider.relayprovider.js similarity index 100% rename from packages/relay-runtime/store/__tests__/resolvers/HelloWorldProvider.js rename to packages/relay-runtime/store/__tests__/resolvers/HelloWorldProvider.relayprovider.js diff --git a/packages/relay-runtime/store/__tests__/resolvers/HelloWorldResolver.js b/packages/relay-runtime/store/__tests__/resolvers/HelloWorldResolver.js index 5324ff1a66aa1..c6f77d4a6f538 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/HelloWorldResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/HelloWorldResolver.js @@ -12,9 +12,7 @@ 'use strict'; /** - * @RelayResolver - * @fieldName hello(world: String!) - * @onType Query + * @RelayResolver Query.hello(world: String!): String * * Say `Hello ${world}!` */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/HelloWorldResolverWithProvidedVariable.js b/packages/relay-runtime/store/__tests__/resolvers/HelloWorldResolverWithProvidedVariable.js index 196e6e4c8ff5f..4173733680fad 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/HelloWorldResolverWithProvidedVariable.js +++ b/packages/relay-runtime/store/__tests__/resolvers/HelloWorldResolverWithProvidedVariable.js @@ -17,9 +17,7 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName hello_world_with_provided_variable - * @onType Query + * @RelayResolver Query.hello_world_with_provided_variable: String * @rootFragment HelloWorldResolverWithProvidedVariable * * Say `Hello ${world}!` @@ -33,7 +31,7 @@ function hello_world_with_provided_variable( @argumentDefinitions( provided_variable: { type: "String!" - provider: "./HelloWorldProvider.js" + provider: "./HelloWorldProvider.relayprovider" } ) { hello(world: $provided_variable) diff --git a/packages/relay-runtime/store/__tests__/resolvers/InnerResolver.js b/packages/relay-runtime/store/__tests__/resolvers/InnerResolver.js index 8ef1472181427..5a0da95917639 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/InnerResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/InnerResolver.js @@ -12,17 +12,15 @@ 'use strict'; import type {InnerResolver$key} from './__generated__/InnerResolver.graphql'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; const {GLOBAL_STORE, Selectors} = require('./ExampleExternalStateStore'); const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName inner + * @RelayResolver Query.inner: Int * @rootFragment InnerResolver - * @onType Query * @live */ function inner(rootKey: InnerResolver$key): LiveState { diff --git a/packages/relay-runtime/store/__tests__/resolvers/LiveConstantClientEdgeResolver.js b/packages/relay-runtime/store/__tests__/resolvers/LiveConstantClientEdgeResolver.js index 0fd6b24ab020c..ff85ea4bc2ab5 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/LiveConstantClientEdgeResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/LiveConstantClientEdgeResolver.js @@ -12,13 +12,10 @@ 'use strict'; import type {ConcreteClientEdgeResolverReturnType} from 'relay-runtime'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; /** - * @RelayResolver - * @fieldName live_constant_client_edge - * @onType Query - * @edgeTo User + * @RelayResolver Query.live_constant_client_edge: User * @live */ function live_constant_client_edge(): LiveState< diff --git a/packages/relay-runtime/store/__tests__/resolvers/LiveCounterNoFragment.js b/packages/relay-runtime/store/__tests__/resolvers/LiveCounterNoFragment.js index f38239b17a615..ecb75532d40a7 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/LiveCounterNoFragment.js +++ b/packages/relay-runtime/store/__tests__/resolvers/LiveCounterNoFragment.js @@ -11,14 +11,12 @@ 'use strict'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; const {GLOBAL_STORE, Selectors} = require('./ExampleExternalStateStore'); /** - * @RelayResolver - * @fieldName counter_no_fragment - * @onType Query + * @RelayResolver Query.counter_no_fragment: Int * @live * * A Relay Resolver that returns an object implementing the External State diff --git a/packages/relay-runtime/store/__tests__/resolvers/LiveCounterNoFragmentWithArg.js b/packages/relay-runtime/store/__tests__/resolvers/LiveCounterNoFragmentWithArg.js index 05a0675d9e8e9..031c546679234 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/LiveCounterNoFragmentWithArg.js +++ b/packages/relay-runtime/store/__tests__/resolvers/LiveCounterNoFragmentWithArg.js @@ -11,14 +11,12 @@ 'use strict'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; const {GLOBAL_STORE, Selectors} = require('./ExampleExternalStateStore'); /** - * @RelayResolver - * @fieldName counter_no_fragment_with_arg(prefix: String!) - * @onType Query + * @RelayResolver Query.counter_no_fragment_with_arg(prefix: String!): String * @live * * A Relay Resolver that returns an object implementing the External State diff --git a/packages/relay-runtime/store/__tests__/resolvers/LiveCounterResolver.js b/packages/relay-runtime/store/__tests__/resolvers/LiveCounterResolver.js index a53216c86ebff..46614fb137901 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/LiveCounterResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/LiveCounterResolver.js @@ -12,17 +12,15 @@ 'use strict'; import type {LiveCounterResolver$key} from './__generated__/LiveCounterResolver.graphql'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; const {GLOBAL_STORE, Selectors} = require('./ExampleExternalStateStore'); const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName counter + * @RelayResolver Query.counter: Int * @rootFragment LiveCounterResolver - * @onType Query * @live * * A Relay Resolver that returns an object implementing the External State diff --git a/packages/relay-runtime/store/__tests__/resolvers/LiveCounterWithPossibleMissingFragmentDataResolver.js b/packages/relay-runtime/store/__tests__/resolvers/LiveCounterWithPossibleMissingFragmentDataResolver.js index 042650fe04351..b15eb724d2106 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/LiveCounterWithPossibleMissingFragmentDataResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/LiveCounterWithPossibleMissingFragmentDataResolver.js @@ -12,17 +12,15 @@ 'use strict'; import type {LiveCounterWithPossibleMissingFragmentDataResolverFragment$key} from './__generated__/LiveCounterWithPossibleMissingFragmentDataResolverFragment.graphql'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; const {GLOBAL_STORE, Selectors} = require('./ExampleExternalStateStore'); const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName live_counter_with_possible_missing_fragment_data + * @RelayResolver Query.live_counter_with_possible_missing_fragment_data: Int * @rootFragment LiveCounterWithPossibleMissingFragmentDataResolverFragment - * @onType Query * @live */ function live_counter_with_possible_missing_fragment_data( diff --git a/packages/relay-runtime/store/__tests__/resolvers/LiveExternalGreeting.js b/packages/relay-runtime/store/__tests__/resolvers/LiveExternalGreeting.js index 658e47041d994..ecaff3fa89cb1 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/LiveExternalGreeting.js +++ b/packages/relay-runtime/store/__tests__/resolvers/LiveExternalGreeting.js @@ -12,19 +12,14 @@ 'use strict'; import type {LiveExternalGreetingFragment$key} from './__generated__/LiveExternalGreetingFragment.graphql'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; -const {graphql} = require('relay-runtime'); -const { - suspenseSentinel, -} = require('relay-runtime/store/experimental-live-resolvers/LiveResolverSuspenseSentinel'); +const {graphql, suspenseSentinel} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName live_external_greeting + * @RelayResolver Query.live_external_greeting: String * @rootFragment LiveExternalGreetingFragment - * @onType Query * @live */ function live_external_greeting( diff --git a/packages/relay-runtime/store/__tests__/resolvers/LivePingPongResolver.js b/packages/relay-runtime/store/__tests__/resolvers/LivePingPongResolver.js index c4e1f24b1937f..89a719a6323f4 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/LivePingPongResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/LivePingPongResolver.js @@ -12,16 +12,14 @@ 'use strict'; import type {LivePingPongResolver$key} from './__generated__/LivePingPongResolver.graphql'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName ping + * @RelayResolver Query.ping: String * @rootFragment LivePingPongResolver - * @onType Query * @live * * A @live Relay resolver that synchronously triggers an update on initial diff --git a/packages/relay-runtime/store/__tests__/resolvers/LiveResolvers-test.js b/packages/relay-runtime/store/__tests__/resolvers/LiveResolvers-test.js index e82be58f58d0c..650f28cec6d0c 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/LiveResolvers-test.js +++ b/packages/relay-runtime/store/__tests__/resolvers/LiveResolvers-test.js @@ -15,7 +15,7 @@ import type {Snapshot} from '../../RelayStoreTypes'; const { live_external_greeting: LiveExternalGreeting, } = require('./LiveExternalGreeting'); -const {RelayFeatureFlags} = require('relay-runtime'); +const {RelayFeatureFlags, suspenseSentinel} = require('relay-runtime'); const RelayNetwork = require('relay-runtime/network/RelayNetwork'); const {graphql} = require('relay-runtime/query/GraphQLTag'); const { @@ -23,9 +23,6 @@ const { resetStore, } = require('relay-runtime/store/__tests__/resolvers/ExampleExternalStateStore'); const LiveResolverStore = require('relay-runtime/store/experimental-live-resolvers/LiveResolverStore'); -const { - suspenseSentinel, -} = require('relay-runtime/store/experimental-live-resolvers/LiveResolverSuspenseSentinel'); const RelayModernEnvironment = require('relay-runtime/store/RelayModernEnvironment'); const { createOperationDescriptor, @@ -41,13 +38,11 @@ disallowConsoleErrors(); beforeEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = true; resetStore(); }); afterEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = false; }); test('unsubscribe happens when record is updated due to missing data', () => { @@ -150,14 +145,25 @@ test('Updates can be batched', () => { `, {}, ); + const log = jest.fn(); const store = new LiveResolverStore(source, { gcReleaseBufferSize: 0, + log, }); const environment = new RelayModernEnvironment({ network: RelayNetwork.create(jest.fn()), store, + log, }); + function getBatchLogEventNames(): string[] { + return log.mock.calls + .map(log => log[0].name) + .filter(name => { + return name.startsWith('liveresolver.batch'); + }); + } + const snapshot = environment.lookup(operation.fragment); const handler = jest.fn<[Snapshot], void>(); @@ -173,11 +179,18 @@ test('Updates can be batched', () => { let lastCallCount = handler.mock.calls.length; + expect(getBatchLogEventNames()).toEqual([]); + // Update _with_ batching. store.batchLiveStateUpdates(() => { GLOBAL_STORE.dispatch({type: 'INCREMENT'}); }); + expect(getBatchLogEventNames()).toEqual([ + 'liveresolver.batch.start', + 'liveresolver.batch.end', + ]); + // We get notified once per batch! :) expect(handler.mock.calls.length - lastCallCount).toBe(1); @@ -193,6 +206,13 @@ test('Updates can be batched', () => { }); }).toThrowError('An Example Error'); + expect(getBatchLogEventNames()).toEqual([ + 'liveresolver.batch.start', + 'liveresolver.batch.end', + 'liveresolver.batch.start', + 'liveresolver.batch.end', + ]); + // We still notify our subscribers expect(handler.mock.calls.length - lastCallCount).toBe(1); @@ -202,4 +222,119 @@ test('Updates can be batched', () => { store.batchLiveStateUpdates(() => {}); }); }).toThrow('Unexpected nested call to batchLiveStateUpdates.'); + + expect(getBatchLogEventNames()).toEqual([ + 'liveresolver.batch.start', + 'liveresolver.batch.end', + 'liveresolver.batch.start', + 'liveresolver.batch.end', + // Here we can see the nesting + 'liveresolver.batch.start', + 'liveresolver.batch.start', + 'liveresolver.batch.end', + 'liveresolver.batch.end', + ]); +}); + +test('Errors thrown during _initial_ read() are caught as resolver errors', () => { + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + }, + }); + const operation = createOperationDescriptor( + graphql` + query LiveResolversTestHandlesErrorOnReadQuery { + counter_throws_when_odd + } + `, + {}, + ); + const store = new LiveResolverStore(source, { + gcReleaseBufferSize: 0, + }); + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); + + const snapshot = environment.lookup(operation.fragment); + expect(snapshot.relayResolverErrors).toEqual([ + { + error: Error('What?'), + field: { + owner: 'LiveResolversTestHandlesErrorOnReadQuery', + path: 'counter_throws_when_odd', + }, + }, + ]); + const data: $FlowExpectedError = snapshot.data; + expect(data.counter_throws_when_odd).toBe(null); +}); + +test('Errors thrown during read() _after update_ are caught as resolver errors', () => { + const source = RelayRecordSource.create({ + 'client:root': { + __id: 'client:root', + __typename: '__Root', + }, + }); + const operation = createOperationDescriptor( + graphql` + query LiveResolversTestHandlesErrorOnUpdateQuery { + counter_throws_when_odd + } + `, + {}, + ); + const store = new LiveResolverStore(source, { + gcReleaseBufferSize: 0, + }); + const environment = new RelayModernEnvironment({ + network: RelayNetwork.create(jest.fn()), + store, + }); + + const snapshot = environment.lookup(operation.fragment); + + const handler = jest.fn<[Snapshot], void>(); + environment.subscribe(snapshot, handler); + + // Confirm there are no initial errors + expect(snapshot.relayResolverErrors).toEqual([]); + const data: $FlowExpectedError = snapshot.data; + expect(data.counter_throws_when_odd).toBe(0); + + // This should trigger a read that throws + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + + expect(handler).toHaveBeenCalled(); + + const nextSnapshot = handler.mock.calls[0][0]; + + expect(nextSnapshot.relayResolverErrors).toEqual([ + { + error: Error('What?'), + field: { + owner: 'LiveResolversTestHandlesErrorOnUpdateQuery', + path: 'counter_throws_when_odd', + }, + }, + ]); + const nextData: $FlowExpectedError = nextSnapshot.data; + expect(nextData.counter_throws_when_odd).toBe(null); + + handler.mockReset(); + + // Put the live resolver back into a state where it is valid + GLOBAL_STORE.dispatch({type: 'INCREMENT'}); + + const finalSnapshot = handler.mock.calls[0][0]; + + // Confirm there are no initial errors + expect(finalSnapshot.relayResolverErrors).toEqual([]); + const finalData: $FlowExpectedError = finalSnapshot.data; + expect(finalData.counter_throws_when_odd).toBe(2); }); diff --git a/packages/relay-runtime/store/__tests__/resolvers/LiveUserAlwaysSuspendResolver.js b/packages/relay-runtime/store/__tests__/resolvers/LiveUserAlwaysSuspendResolver.js index 799fc96093340..1bdf8e9077c93 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/LiveUserAlwaysSuspendResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/LiveUserAlwaysSuspendResolver.js @@ -12,17 +12,12 @@ 'use strict'; import type {ConcreteClientEdgeResolverReturnType} from 'relay-runtime'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; -const { - suspenseSentinel, -} = require('relay-runtime/store/experimental-live-resolvers/LiveResolverSuspenseSentinel'); +const {suspenseSentinel} = require('relay-runtime'); /** - * @RelayResolver - * @fieldName live_user_resolver_always_suspend - * @onType Query - * @edgeTo User + * @RelayResolver Query.live_user_resolver_always_suspend: User * @live */ function live_user_resolver_always_suspend(): LiveState< diff --git a/packages/relay-runtime/store/__tests__/resolvers/LiveUserSuspendsWhenOdd.js b/packages/relay-runtime/store/__tests__/resolvers/LiveUserSuspendsWhenOdd.js index b4e941243d053..98a2d7121ae19 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/LiveUserSuspendsWhenOdd.js +++ b/packages/relay-runtime/store/__tests__/resolvers/LiveUserSuspendsWhenOdd.js @@ -12,18 +12,13 @@ 'use strict'; import type {DataID} from 'relay-runtime'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; const {GLOBAL_STORE, Selectors} = require('./ExampleExternalStateStore'); -const { - suspenseSentinel, -} = require('relay-runtime/store/experimental-live-resolvers/LiveResolverSuspenseSentinel'); +const {suspenseSentinel} = require('relay-runtime'); /** - * @RelayResolver - * @fieldName live_user_suspends_when_odd - * @edgeTo User - * @onType Query + * @RelayResolver Query.live_user_suspends_when_odd: User * @live */ function live_user_suspends_when_odd(): LiveState<{|+id: DataID|}> { diff --git a/packages/relay-runtime/store/__tests__/resolvers/MutableModel.js b/packages/relay-runtime/store/__tests__/resolvers/MutableModel.js new file mode 100644 index 0000000000000..8ad9107eccd8a --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/MutableModel.js @@ -0,0 +1,93 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; + +import type {LiveState} from 'relay-runtime'; + +type Entity = { + name: string, + type: string, + props: { + battery: string, + }, +}; + +let HUMAN: Entity = { + name: 'Alice', + type: 'human', + props: { + battery: '0', + }, +}; + +let ROBOT: Entity = { + name: 'Bob', + type: 'robot', + props: { + battery: '0', + }, +}; + +const subscriptions: Array<() => void> = []; +let isHuman: boolean = true; +/** + * @RelayResolver Query.mutable_entity: RelayResolverValue + * @live + + */ +function mutable_entity(): LiveState { + return { + read() { + return isHuman ? HUMAN : ROBOT; + }, + subscribe(cb) { + subscriptions.push(cb); + return () => { + subscriptions.filter(x => x !== cb); + }; + }, + }; +} + +function setIsHuman(val: boolean): void { + isHuman = val; + subscriptions.forEach(x => x()); +} + +function chargeBattery(): void { + ROBOT.props.battery = '100'; + subscriptions.forEach(x => x()); +} + +function resetModels(): void { + HUMAN = { + name: 'Alice', + type: 'human', + props: { + battery: '0', + }, + }; + ROBOT = { + name: 'Bob', + type: 'robot', + props: { + battery: '0', + }, + }; +} + +module.exports = { + mutable_entity, + setIsHuman, + chargeBattery, + resetModels, +}; diff --git a/packages/relay-runtime/store/__tests__/resolvers/OuterResolver.js b/packages/relay-runtime/store/__tests__/resolvers/OuterResolver.js index 21bf4fb4f3ca4..5a105863f9dde 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/OuterResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/OuterResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName outer + * @RelayResolver Query.outer: Int * @rootFragment OuterResolver - * @onType Query */ function outer(rootKey: OuterResolver$key): number | null | void { const data = readFragment( diff --git a/packages/relay-runtime/store/__tests__/resolvers/PurpleOctopusResolvers.js b/packages/relay-runtime/store/__tests__/resolvers/PurpleOctopusResolvers.js new file mode 100644 index 0000000000000..f0c8b6d2512c5 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/PurpleOctopusResolvers.js @@ -0,0 +1,39 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +/** + * @RelayResolver PurpleOctopus implements IWeakAnimal + * @weak + */ +export type PurpleOctopus = { + name: string, +}; + +/** + * @RelayResolver PurpleOctopus.color: String + */ +function color(purpleOctopus: PurpleOctopus): ?string { + return 'purple'; +} + +/** + * @RelayResolver Query.purple_octopus: PurpleOctopus + */ +function purpleOctopus(): PurpleOctopus { + return { + name: 'PurpleOctopus', + }; +} + +module.exports = { + color, + purpleOctopus, +}; diff --git a/packages/relay-runtime/store/__tests__/resolvers/QueryAllAstrologicalSignsResolver.js b/packages/relay-runtime/store/__tests__/resolvers/QueryAllAstrologicalSignsResolver.js index 527f686091c4b..2d4423f140a18 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/QueryAllAstrologicalSignsResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/QueryAllAstrologicalSignsResolver.js @@ -20,11 +20,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName all_astrological_signs + * @RelayResolver Query.all_astrological_signs: [AstrologicalSign!] * @rootFragment QueryAllAstrologicalSignsResolver - * @onType Query - * @edgeTo [AstrologicalSign] * * A client edge to a plural client object */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverReturnsUndefined.js b/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverReturnsUndefined.js index 9709f733d7082..ba5925f573fa7 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverReturnsUndefined.js +++ b/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverReturnsUndefined.js @@ -11,18 +11,16 @@ 'use strict'; +import type {LiveState} from 'relay-runtime'; + /** - * @RelayResolver - * @fieldName live_resolver_return_undefined - * @onType Query + * @RelayResolver Query.live_resolver_return_undefined: RelayResolverValue * @live * - * A @live resolver that throws + * A @live resolver that returns undefined */ -import type {LiveState} from '../../experimental-live-resolvers/LiveResolverStore'; - // $FlowFixMe - this resolver returns undefined, but should return LiveState -function live_resolver_return_undefined(): LiveState<> {} +function live_resolver_return_undefined(): LiveState<$FlowFixMe> {} module.exports = { live_resolver_return_undefined, diff --git a/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverThrows.js b/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverThrows.js index fda5822eb9f30..8f06050eabc38 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverThrows.js +++ b/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverThrows.js @@ -11,16 +11,14 @@ 'use strict'; +import type {LiveState} from 'relay-runtime'; + /** - * @RelayResolver - * @fieldName live_resolver_throws - * @onType Query + * @RelayResolver Query.live_resolver_throws: RelayResolverValue * @live * * A @live resolver that throws */ -import type {LiveState} from '../../experimental-live-resolvers/LiveResolverStore'; - function live_resolver_throws(): LiveState { throw new Error('What?'); } diff --git a/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverThrowsOnRead.js b/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverThrowsOnRead.js new file mode 100644 index 0000000000000..84a0d850116b1 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverThrowsOnRead.js @@ -0,0 +1,46 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; + +import type {LiveState} from 'relay-runtime'; + +const {GLOBAL_STORE, Selectors} = require('./ExampleExternalStateStore'); + +/** + * @RelayResolver Query.counter_throws_when_odd: Int + * @live + * + * A @live resolver that throws when counter is odd. Useful for testing + * behavior of live resolvers that throw on read. + */ + +function counter_throws_when_odd(): LiveState { + return { + read() { + const number = Selectors.getNumber(GLOBAL_STORE.getState()); + if (number % 2 !== 0) { + throw new Error('What?'); + } else { + return number; + } + }, + subscribe(cb): () => void { + // Here we could try to run the selector and short-circuit if + // the value has not changed, but for now we'll over-notify. + return GLOBAL_STORE.subscribe(cb); + }, + }; +} + +module.exports = { + counter_throws_when_odd, +}; diff --git a/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverWithBadReturnValue.js b/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverWithBadReturnValue.js index aed51e23abbfd..679d7bcc7cffa 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverWithBadReturnValue.js +++ b/packages/relay-runtime/store/__tests__/resolvers/QueryLiveResolverWithBadReturnValue.js @@ -10,17 +10,14 @@ */ 'use strict'; +import type {LiveState} from 'relay-runtime'; /** - * @RelayResolver - * @fieldName live_resolver_with_bad_return_value - * @onType Query + * @RelayResolver Query.live_resolver_with_bad_return_value: String * @live * * A @live resolver that does not return a LiveObject */ -import type {LiveState} from '../../experimental-live-resolvers/LiveResolverStore'; - function live_resolver_with_bad_return_value(): LiveState { // $FlowFixMe The purpose of this resolver is to test a bad return value. return 'Oops!'; diff --git a/packages/relay-runtime/store/__tests__/resolvers/QueryManyLiveTodos.js b/packages/relay-runtime/store/__tests__/resolvers/QueryManyLiveTodos.js new file mode 100644 index 0000000000000..befada40127ca --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/QueryManyLiveTodos.js @@ -0,0 +1,39 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; +import type {LiveState} from 'relay-runtime'; + +const { + Selectors, + TODO_STORE, +} = require('relay-runtime/store/__tests__/resolvers/ExampleTodoStore'); + +/** + * @RelayResolver Query.many_live_todos: [Todo] + * @live + */ +function many_live_todos(): LiveState<$ReadOnlyArray<{todo_id: string}>> { + return { + read() { + return Selectors.getTodoIDs(TODO_STORE.getState()).map(id => ({ + todo_id: id, + })); + }, + subscribe(cb) { + return TODO_STORE.subscribe(null, cb); + }, + }; +} + +module.exports = { + many_live_todos, +}; diff --git a/packages/relay-runtime/store/__tests__/resolvers/QueryManyTodos.js b/packages/relay-runtime/store/__tests__/resolvers/QueryManyTodos.js index 34fcfefc41efe..a5628a3812c43 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/QueryManyTodos.js +++ b/packages/relay-runtime/store/__tests__/resolvers/QueryManyTodos.js @@ -14,10 +14,7 @@ import type {Query__many_todos$normalization as ReturnType} from './__generated__/Query__many_todos$normalization.graphql'; /** - * @RelayResolver - * @onType Query - * @fieldName many_todos(todo_ids: [ID]!) - * @outputType [Todo] + * @RelayResolver Query.many_todos(todo_ids: [ID]!): [Todo] */ function many_todos(args: { todo_ids: $ReadOnlyArray, diff --git a/packages/relay-runtime/store/__tests__/resolvers/QueryNonLiveResolverWithLiveReturnValue.js b/packages/relay-runtime/store/__tests__/resolvers/QueryNonLiveResolverWithLiveReturnValue.js index 587f72149c066..0db3880e30caf 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/QueryNonLiveResolverWithLiveReturnValue.js +++ b/packages/relay-runtime/store/__tests__/resolvers/QueryNonLiveResolverWithLiveReturnValue.js @@ -12,20 +12,17 @@ 'use strict'; /** - * @RelayResolver - * @fieldName non_live_resolver_with_live_return_value - * @onType Query + * @RelayResolver Query.non_live_resolver_with_live_return_value: String * * A non-@live resolver that returns a LiveObject */ -import type {LiveState} from '../../experimental-live-resolvers/LiveResolverStore'; - -function non_live_resolver_with_live_return_value(): LiveState { +function non_live_resolver_with_live_return_value(): string { + // $FlowFixMe This is an intentionally wrong type to test what happens when you return a LiveObject from a non-@live resolver. return { read() { return 'Oops!'; }, - subscribe(cb) { + subscribe() { return () => {}; }, }; diff --git a/packages/relay-runtime/store/__tests__/resolvers/QueryTodo.js b/packages/relay-runtime/store/__tests__/resolvers/QueryTodo.js index a74f2aa6e5d12..15f3a5ef3df53 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/QueryTodo.js +++ b/packages/relay-runtime/store/__tests__/resolvers/QueryTodo.js @@ -11,18 +11,15 @@ 'use strict'; -import type {LiveState} from '../../experimental-live-resolvers/LiveResolverStore'; import type {Query__todo$normalization as ReturnType} from './__generated__/Query__todo$normalization.graphql'; +import type {LiveState} from 'relay-runtime'; const { Selectors, TODO_STORE, } = require('relay-runtime/store/__tests__/resolvers/ExampleTodoStore'); /** - * @RelayResolver - * @onType Query - * @fieldName todo(todoID: ID!) - * @outputType Todo + * @RelayResolver Query.todo(todoID: ID!): Todo * @live */ function todo(args: {todoID: string}): LiveState { diff --git a/packages/relay-runtime/store/__tests__/resolvers/QueryTodos.js b/packages/relay-runtime/store/__tests__/resolvers/QueryTodos.js index eeb41393141fb..7e3af5b3bec4b 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/QueryTodos.js +++ b/packages/relay-runtime/store/__tests__/resolvers/QueryTodos.js @@ -11,8 +11,8 @@ 'use strict'; -import type {LiveState} from '../../experimental-live-resolvers/LiveResolverStore'; import type {Query__todos$normalization as ReturnType} from './__generated__/Query__todos$normalization.graphql'; +import type {LiveState} from 'relay-runtime'; const { Selectors, @@ -20,10 +20,7 @@ const { } = require('relay-runtime/store/__tests__/resolvers/ExampleTodoStore'); /** - * @RelayResolver - * @onType Query - * @fieldName todos(first: Int, last: Int) - * @outputType TodoConnection + * @RelayResolver Query.todos(first: Int, last: Int): TodoConnection * @live */ function todos(args: {first: ?number, last: ?number}): LiveState { diff --git a/packages/relay-runtime/store/__tests__/resolvers/QueryVirgoLiveSuspendsWhenOddResolver.js b/packages/relay-runtime/store/__tests__/resolvers/QueryVirgoLiveSuspendsWhenOddResolver.js index ad3dab134063e..3bac919583994 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/QueryVirgoLiveSuspendsWhenOddResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/QueryVirgoLiveSuspendsWhenOddResolver.js @@ -12,12 +12,10 @@ 'use strict'; import type {AstrologicalSignID} from './AstrologicalSignUtils'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; const {GLOBAL_STORE, Selectors} = require('./ExampleExternalStateStore'); -const { - suspenseSentinel, -} = require('relay-runtime/store/experimental-live-resolvers/LiveResolverSuspenseSentinel'); +const {suspenseSentinel} = require('relay-runtime'); /** * @RelayResolver Query.virgo_suspends_when_counter_is_odd: AstrologicalSign diff --git a/packages/relay-runtime/store/__tests__/resolvers/RedOctopusResolvers.js b/packages/relay-runtime/store/__tests__/resolvers/RedOctopusResolvers.js new file mode 100644 index 0000000000000..bb0393966fba4 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/RedOctopusResolvers.js @@ -0,0 +1,39 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + * @flow strict-local + * @oncall relay + */ + +/** + * @RelayResolver RedOctopus implements IWeakAnimal + * @weak + */ +export type RedOctopus = { + name: string, +}; + +/** + * @RelayResolver RedOctopus.color: String + */ +function color(red_octopus: RedOctopus): ?string { + return 'red'; +} + +/** + * @RelayResolver Query.red_octopus: RedOctopus + */ +function red_octopus(): RedOctopus { + return { + name: 'RedOctopus', + }; +} + +module.exports = { + red_octopus, + color, +}; diff --git a/packages/relay-runtime/store/__tests__/resolvers/Resolver-test.js b/packages/relay-runtime/store/__tests__/resolvers/Resolver-test.js index fb18548978a37..ab4076ddb6b9a 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/Resolver-test.js +++ b/packages/relay-runtime/store/__tests__/resolvers/Resolver-test.js @@ -71,7 +71,7 @@ describe('Relay Resolver', () => { ); // $FlowFixMe[unclear-type] - const {data} = (environment.lookup(fragmentSelector): any); + const {data}: any = environment.lookup(fragmentSelector); expect(data.greeting).toEqual('Hello, Alice!'); // Resolver result expect(data.name).toEqual(undefined); // Fields needed by resolver's fragment don't end up in the result @@ -99,7 +99,7 @@ describe('Relay Resolver', () => { }); // $FlowFixMe[unclear-type] - const {data} = (environment.lookup(operation.fragment): any); + const {data}: any = environment.lookup(operation.fragment); expect(data.me.greeting).toEqual('Hello, Alice!'); // Resolver result expect(data.me.name).toEqual(undefined); // Fields needed by resolver's fragment don't end up in the result @@ -119,4 +119,26 @@ describe('Relay Resolver', () => { `; expect(clientEdgeRuntimeArtifact.operation.name).toBe('ResolverTest3Query'); }); + + it('When omitting all arguments, resolver still gets passed an `args` object.', () => { + const environment = createMockEnvironment(); + + const FooQuery = graphql` + query ResolverTest4Query { + hello_optional_world + } + `; + + const request = getRequest(FooQuery); + const operation = createOperationDescriptor(request, {}); + + environment.commitPayload(operation, {}); + + // $FlowFixMe[unclear-type] + const {data, relayResolverErrors}: any = environment.lookup( + operation.fragment, + ); + expect(relayResolverErrors).toHaveLength(0); + expect(data.hello_optional_world).toEqual('Hello, Default!'); + }); }); diff --git a/packages/relay-runtime/store/__tests__/resolvers/ResolverGC-test.js b/packages/relay-runtime/store/__tests__/resolvers/ResolverGC-test.js index 12ad5d0e8dad5..4aa9b60b2f0e0 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/ResolverGC-test.js +++ b/packages/relay-runtime/store/__tests__/resolvers/ResolverGC-test.js @@ -51,13 +51,11 @@ disallowConsoleErrors(); beforeEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = true; resetStore(); }); afterEach(() => { RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = false; - RelayFeatureFlags.ENABLE_CLIENT_EDGES = false; }); test('Live Resolver without fragment', async () => { diff --git a/packages/relay-runtime/store/__tests__/resolvers/ResolverThatThrows.js b/packages/relay-runtime/store/__tests__/resolvers/ResolverThatThrows.js index 30e9adb1453c0..793c16858e7cf 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/ResolverThatThrows.js +++ b/packages/relay-runtime/store/__tests__/resolvers/ResolverThatThrows.js @@ -12,16 +12,14 @@ 'use strict'; import type {ResolverThatThrows$key} from './__generated__/ResolverThatThrows.graphql'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName resolver_that_throws + * @RelayResolver User.resolver_that_throws: RelayResolverValue * @rootFragment ResolverThatThrows - * @onType User * @live * * This should always throw. diff --git a/packages/relay-runtime/store/__tests__/resolvers/ThrowBeforeReadResolver.js b/packages/relay-runtime/store/__tests__/resolvers/ThrowBeforeReadResolver.js index f2607bb725a6d..e23372dcd0e77 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/ThrowBeforeReadResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/ThrowBeforeReadResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName throw_before_read + * @RelayResolver Query.throw_before_read: RelayResolverValue * @rootFragment ThrowBeforeReadResolver - * @onType Query * * A resolver that exercises the edge case where a resolver throws before reading. */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/TodoBlockedByResolver.js b/packages/relay-runtime/store/__tests__/resolvers/TodoBlockedByResolver.js index ef205d94c3084..c9dae2d71d989 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/TodoBlockedByResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/TodoBlockedByResolver.js @@ -19,11 +19,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @onType Todo + * @RelayResolver Todo.blocked_by: [Todo] * @rootFragment TodoBlockedByResolverFragment - * @fieldName blocked_by - * @outputType [Todo] */ function blocked_by( rootKey: TodoBlockedByResolverFragment$key, diff --git a/packages/relay-runtime/store/__tests__/resolvers/TodoCompleteResolver.js b/packages/relay-runtime/store/__tests__/resolvers/TodoCompleteResolver.js index 1d2b01c7ae31e..4b67c71621065 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/TodoCompleteResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/TodoCompleteResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @onType Todo + * @RelayResolver Todo.complete: Boolean * @rootFragment TodoCompleteResolverFragment - * @fieldName complete */ function complete(rootKey: TodoCompleteResolverFragment$key): ?boolean { const data = readFragment( diff --git a/packages/relay-runtime/store/__tests__/resolvers/TodoDescription.js b/packages/relay-runtime/store/__tests__/resolvers/TodoDescription.js index 3dc51c3ed2ce9..6d87c96a17764 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/TodoDescription.js +++ b/packages/relay-runtime/store/__tests__/resolvers/TodoDescription.js @@ -9,6 +9,14 @@ * @oncall relay */ +import type {LiveState} from '../../RelayStoreTypes'; +import type {TodoDescription__some_client_type_with_interface$normalization} from './__generated__/TodoDescription__some_client_type_with_interface$normalization.graphql'; +import type {TodoDescription__some_interface$normalization} from './__generated__/TodoDescription__some_interface$normalization.graphql'; +import type {TodoDescription_text_style$key} from './__generated__/TodoDescription_text_style.graphql'; + +const {graphql} = require('relay-runtime'); +const {readFragment} = require('relay-runtime/store/ResolverFragments'); + /** * @RelayResolver TodoDescription * @weak @@ -18,8 +26,14 @@ export opaque type TodoDescription = { color: string, }; -import type {TodoDescription__some_client_type_with_interface$normalization} from './__generated__/TodoDescription__some_client_type_with_interface$normalization.graphql'; -import type {TodoDescription__some_interface$normalization} from './__generated__/TodoDescription__some_interface$normalization.graphql'; +/** + * @RelayResolver TodoDescriptionStyle + * @weak + */ +export opaque type TodoDescriptionStyle = { + color: string, + margin: ?string, +}; // Public constructor for opaque `TodoDescription`. // Other resolvers have to call this function to @@ -37,23 +51,60 @@ function createTodoDescription( /** * @RelayResolver TodoDescription.text: String */ -function text(instance: ?TodoDescription): ?string { - return instance?.text; +function text(instance: TodoDescription): string { + return instance.text; +} + +/** + * @RelayResolver TodoDescription.text_with_prefix(prefix: String!): String + */ +function text_with_prefix( + instance: TodoDescription, + args: {prefix: string}, +): string { + return `${args.prefix} ${instance.text}`; } /** * @RelayResolver TodoDescription.color: RelayResolverValue */ -function color(instance: ?TodoDescription): ?string { - return instance?.color; +function color(instance: TodoDescription): string { + return instance.color; } +const LiveColorSubscriptions = { + activeSubscriptions: [], +} as { + activeSubscriptions: Array<() => void>, +}; + /** - * @RelayResolver TodoDescription.some_interface: ClientInterface! + * @RelayResolver TodoDescription.live_color: RelayResolverValue + * @live + */ +function live_color(instance: TodoDescription): LiveState { + // This is a live field to test the subscription leaks cases + // When defining live fields on weak types + return { + read() { + return instance.color; + }, + subscribe(cb: () => void): () => void { + LiveColorSubscriptions.activeSubscriptions.push(cb); + return () => { + LiveColorSubscriptions.activeSubscriptions = + LiveColorSubscriptions.activeSubscriptions.filter(sub => sub !== cb); + }; + }, + }; +} + +/** + * @RelayResolver TodoDescription.some_interface: ClientInterface */ function some_interface( - instance: ?TodoDescription, -): ?TodoDescription__some_interface$normalization { + instance: TodoDescription, +): TodoDescription__some_interface$normalization { return { __typename: 'ClientTypeImplementingClientInterface', description: 'It was a magical place', @@ -61,11 +112,11 @@ function some_interface( } /** - * @RelayResolver TodoDescription.some_client_type_with_interface: ClientTypeWithNestedClientInterface! + * @RelayResolver TodoDescription.some_client_type_with_interface: ClientTypeWithNestedClientInterface */ function some_client_type_with_interface( - instance: ?TodoDescription, -): ?TodoDescription__some_client_type_with_interface$normalization { + instance: TodoDescription, +): TodoDescription__some_client_type_with_interface$normalization { return { client_interface: { __typename: 'ClientTypeImplementingClientInterface', @@ -74,10 +125,36 @@ function some_client_type_with_interface( }; } +/** + * @RelayResolver TodoDescription.text_style(margin: String): TodoDescriptionStyle + * @rootFragment TodoDescription_text_style + */ +function text_style( + fragmentKey: TodoDescription_text_style$key, + {margin}: {margin?: ?string}, +): TodoDescriptionStyle { + const {color} = readFragment( + graphql` + fragment TodoDescription_text_style on TodoDescription { + color @required(action: THROW) + } + `, + fragmentKey, + ); + return { + color, + margin, + }; +} + module.exports = { + text_style, + text_with_prefix, createTodoDescription, text, color, + live_color, + LiveColorSubscriptions, some_interface, some_client_type_with_interface, }; diff --git a/packages/relay-runtime/store/__tests__/resolvers/TodoModel.js b/packages/relay-runtime/store/__tests__/resolvers/TodoModel.js index cf18be7356dfe..d022ad573c3bb 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/TodoModel.js +++ b/packages/relay-runtime/store/__tests__/resolvers/TodoModel.js @@ -11,22 +11,28 @@ 'use strict'; -import type {LiveState} from '../../experimental-live-resolvers/LiveResolverStore'; +import type {TodoModelCapitalizedID$key} from './__generated__/TodoModelCapitalizedID.graphql'; +import type {TodoModelCapitalizedIDLegacy$key} from './__generated__/TodoModelCapitalizedIDLegacy.graphql'; import type {TodoDescription} from './TodoDescription'; import type {ConcreteClientEdgeResolverReturnType} from 'relay-runtime'; +import type {LiveState} from 'relay-runtime'; import type {TodoItem} from 'relay-runtime/store/__tests__/resolvers/ExampleTodoStore'; +const {readFragment} = require('../../ResolverFragments'); const {createTodoDescription} = require('./TodoDescription'); +const {graphql, suspenseSentinel} = require('relay-runtime'); const { Selectors, TODO_STORE, } = require('relay-runtime/store/__tests__/resolvers/ExampleTodoStore'); +type TodoModelType = ?TodoItem; + /** * @RelayResolver TodoModel * @live */ -function TodoModel(id: string): LiveState { +function TodoModel(id: string): LiveState { return { read() { return Selectors.getTodo(TODO_STORE.getState(), id); @@ -40,25 +46,85 @@ function TodoModel(id: string): LiveState { /** * @RelayResolver TodoModel.description: String */ -function description(model: ?TodoItem): ?string { +function description(model: TodoModelType): ?string { return model?.description; } +/** + * @RelayResolver TodoModel.capitalized_id: String + * @rootFragment TodoModelCapitalizedID + * + * A resolver on a model type that reads its own rootFragment + */ +function capitalized_id(key: TodoModelCapitalizedID$key): ?string { + const todo = readFragment( + graphql` + fragment TodoModelCapitalizedID on TodoModel { + id + } + `, + key, + ); + return todo.id.toUpperCase(); +} + +/** + * @RelayResolver TodoModel.capitalized_id_legacy: String + * @rootFragment TodoModelCapitalizedIDLegacy + * + * Like `capitalized_id`, but implemented using the non-terse legacy syntax + */ +function capitalized_id_legacy(key: TodoModelCapitalizedIDLegacy$key): ?string { + const todo = readFragment( + graphql` + fragment TodoModelCapitalizedIDLegacy on TodoModel { + id + } + `, + key, + ); + return todo.id.toUpperCase(); +} + /** * @RelayResolver TodoModel.fancy_description: TodoDescription */ -function fancy_description(model: ?TodoItem): ?TodoDescription { +function fancy_description(model: TodoModelType): ?TodoDescription { if (model == null) { return null; } return createTodoDescription(model.description, model.isCompleted); } +/** + * @RelayResolver TodoModel.fancy_description_null: TodoDescription + */ +function fancy_description_null(model: TodoModelType): ?TodoDescription { + return null; +} + +/** + * @RelayResolver TodoModel.fancy_description_suspends: TodoDescription + * @live + */ +function fancy_description_suspends( + model: TodoModelType, +): LiveState { + return { + read() { + return suspenseSentinel(); + }, + subscribe() { + return () => {}; + }, + }; +} + /** * @RelayResolver TodoModel.many_fancy_descriptions: [TodoDescription] */ function many_fancy_descriptions( - model: ?TodoItem, + model: TodoModelType, ): $ReadOnlyArray { if (model == null) { return []; @@ -67,6 +133,19 @@ function many_fancy_descriptions( return [createTodoDescription(model.description, model.isCompleted)]; } +/** + * @RelayResolver TodoModel.many_fancy_descriptions_but_some_are_null: [TodoDescription] + */ +function many_fancy_descriptions_but_some_are_null( + model: TodoModelType, +): $ReadOnlyArray { + if (model == null) { + return []; + } + + return [createTodoDescription(model.description, model.isCompleted), null]; +} + /** * @RelayResolver Query.todo_model_null: TodoModel */ @@ -96,10 +175,15 @@ function live_todo_description(args: { } module.exports = { + capitalized_id, + capitalized_id_legacy, todo_model_null, TodoModel, description, fancy_description, + fancy_description_null, + fancy_description_suspends, many_fancy_descriptions, + many_fancy_descriptions_but_some_are_null, live_todo_description, }; diff --git a/packages/relay-runtime/store/__tests__/resolvers/TodoSelfResolver.js b/packages/relay-runtime/store/__tests__/resolvers/TodoSelfResolver.js index dc9509718b23d..69053c3068d1a 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/TodoSelfResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/TodoSelfResolver.js @@ -11,8 +11,8 @@ 'use strict'; -import type {LiveState} from '../../experimental-live-resolvers/LiveResolverStore'; import type {TodoSelfResolverFragment$key} from './__generated__/TodoSelfResolverFragment.graphql'; +import type {LiveState} from 'relay-runtime'; import type {TodoItem} from 'relay-runtime/store/__tests__/resolvers/ExampleTodoStore'; const {graphql} = require('relay-runtime'); @@ -23,10 +23,8 @@ const { const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @onType Todo + * @RelayResolver Todo.self: RelayResolverValue * @rootFragment TodoSelfResolverFragment - * @fieldName self * @live */ function self(rootKey: TodoSelfResolverFragment$key): LiveState { diff --git a/packages/relay-runtime/store/__tests__/resolvers/TodoTextColorResolver.js b/packages/relay-runtime/store/__tests__/resolvers/TodoTextColorResolver.js index dcdfe8d8ed770..5c261b651947f 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/TodoTextColorResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/TodoTextColorResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @onType TodoTextColor + * @RelayResolver TodoTextColor.human_readable_color: String * @rootFragment TodoTextColorResolverFragment - * @fieldName human_readable_color */ function human_readable_color( rootKey: TodoTextColorResolverFragment$key, diff --git a/packages/relay-runtime/store/__tests__/resolvers/TodoTextResolver.js b/packages/relay-runtime/store/__tests__/resolvers/TodoTextResolver.js index 2c7643e0e8f5f..d744beca8f6b4 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/TodoTextResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/TodoTextResolver.js @@ -18,11 +18,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @onType Todo + * @RelayResolver Todo.text: TodoText * @rootFragment TodoTextResolverFragment - * @fieldName text - * @outputType TodoText */ function text(rootKey: TodoTextResolverFragment$key): ?ReturnType { const data = readFragment( diff --git a/packages/relay-runtime/store/__tests__/resolvers/UndefinedFieldResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UndefinedFieldResolver.js index e8f3b9936fff0..869c209f50b6e 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UndefinedFieldResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UndefinedFieldResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName undefined_field + * @RelayResolver Query.undefined_field: RelayResolverValue * @rootFragment UndefinedFieldResolver - * @onType Query * * A resolver that always returns undefined. This is intended to exercise an * edge case in Relay Reader where `undefined` is generally interpreted as diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserAlwaysThrowsResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserAlwaysThrowsResolver.js index 9dfd1cdc80c63..33da654f197f2 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserAlwaysThrowsResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserAlwaysThrowsResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName always_throws + * @RelayResolver User.always_throws: String * @rootFragment UserAlwaysThrowsResolver - * @onType User * * A Relay Resolver that always throws when evaluated. */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserAlwaysThrowsTransitivelyResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserAlwaysThrowsTransitivelyResolver.js index 8b51da2cbb373..8f8c3d2ce6db8 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserAlwaysThrowsTransitivelyResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserAlwaysThrowsTransitivelyResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName always_throws_transitively + * @RelayResolver User.always_throws_transitively: String * @rootFragment UserAlwaysThrowsTransitivelyResolver - * @onType User * * A Relay Resolver that reads another resolver which will always throw. */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserAnotherClientEdgeResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserAnotherClientEdgeResolver.js index 9e3cacb93dd79..691b9ac90ed5e 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserAnotherClientEdgeResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserAnotherClientEdgeResolver.js @@ -18,11 +18,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName another_client_edge + * @RelayResolver User.another_client_edge: User * @rootFragment UserAnotherClientEdgeResolver - * @onType User - * @edgeTo User */ function another_client_edge( rootKey: UserAnotherClientEdgeResolver$key, diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserAstrologicalSignResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserAstrologicalSignResolver.js index 8247f3eb12f85..ce252b2d03247 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserAstrologicalSignResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserAstrologicalSignResolver.js @@ -20,11 +20,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName astrological_sign + * @RelayResolver User.astrological_sign: AstrologicalSign * @rootFragment UserAstrologicalSignResolver - * @onType User - * @edgeTo AstrologicalSign * * A Client Edge that points to a client-defined representation of the user's * star sign. diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserBestFriendGreetingResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserBestFriendGreetingResolver.js index 9a9a44492425a..f770f79964920 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserBestFriendGreetingResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserBestFriendGreetingResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName best_friend_greeting + * @RelayResolver User.best_friend_greeting: String * @rootFragment UserBestFriendGreetingResolver - * @onType User */ function best_friend_greeting( rootKey: UserBestFriendGreetingResolver$key, diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserBestFriendShoutedGreetingResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserBestFriendShoutedGreetingResolver.js index ff9e96389b017..804209e95f878 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserBestFriendShoutedGreetingResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserBestFriendShoutedGreetingResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName best_friend_shouted_greeting + * @RelayResolver User.best_friend_shouted_greeting: String * @rootFragment UserBestFriendShoutedGreetingResolver - * @onType User */ function best_friend_shouted_greeting( rootKey: UserBestFriendShoutedGreetingResolver$key, diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserClientEdgeClientObjectResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserClientEdgeClientObjectResolver.js index 8855fb3c20109..88cd50d121740 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserClientEdgeClientObjectResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserClientEdgeClientObjectResolver.js @@ -11,21 +11,18 @@ 'use strict'; -import type {ConcreteClientEdgeResolverReturnType} from 'relay-runtime'; - /** - * @RelayResolver - * @fieldName client_object(id: ID!) - * @edgeTo ClientObject - * @onType User + * @RelayResolver User.client_object(return_null: Boolean!): ClientObject + * + * Returns a weak ClientObject or null depending upon the argument. */ function client_object(args: { - id: string, -}): ?ConcreteClientEdgeResolverReturnType<> { - if (args.id === '0') { + return_null: boolean, +}): {description: string} | null { + if (args.return_null) { return null; } - return {id: args.id}; + return {description: 'Hello world'}; } module.exports = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserClientEdgeNodeResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserClientEdgeNodeResolver.js index ad84dbe5cf29d..b6cd58bee4be1 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserClientEdgeNodeResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserClientEdgeNodeResolver.js @@ -14,10 +14,7 @@ import type {ConcreteClientEdgeResolverReturnType} from 'relay-runtime'; /** - * @RelayResolver - * @fieldName client_node(id: ID!) - * @edgeTo Node - * @onType User + * @RelayResolver User.client_node(id: ID!): Node */ function client_node(args: { id: string, diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserClientEdgeResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserClientEdgeResolver.js index cca8f0f9968e3..eae948a6f813f 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserClientEdgeResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserClientEdgeResolver.js @@ -18,11 +18,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName client_edge + * @RelayResolver User.client_edge: User * @rootFragment UserClientEdgeResolver - * @edgeTo User - * @onType User */ function client_edge( rootKey: UserClientEdgeResolver$key, diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserConstantDependentResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserConstantDependentResolver.js index d5acb0283e0e1..5086f60205583 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserConstantDependentResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserConstantDependentResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName constant_dependent + * @RelayResolver User.constant_dependent: Int * @rootFragment UserConstantDependentResolver - * @onType User */ function constant_dependent( rootKey: UserConstantDependentResolver$key, diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserConstantResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserConstantResolver.js index 9c04f2b297782..123f5a02c771f 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserConstantResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserConstantResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName constant + * @RelayResolver User.constant: Int * @rootFragment UserConstantResolver - * @onType User * * You thought "one" was the loneliest number? Pffft. Let me introduce you to zero! */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserCustomGreetingResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserCustomGreetingResolver.js index 23c1b86ad36c7..dc8cde143bd43 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserCustomGreetingResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserCustomGreetingResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName custom_greeting(salutation: String!) + * @RelayResolver User.custom_greeting(salutation: String!): String * @rootFragment UserCustomGreetingResolver - * @onType User * * Greet the user with a custom salutation provided via arguments. */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserGreetingResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserGreetingResolver.js index e9d276bb1b8a9..0e5009c2473c3 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserGreetingResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserGreetingResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName greeting + * @RelayResolver User.greeting: String * @rootFragment UserGreetingResolver - * @onType User * * Greet the user. */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserNameAndCounterSuspendsWhenOdd.js b/packages/relay-runtime/store/__tests__/resolvers/UserNameAndCounterSuspendsWhenOdd.js index 8655b97e79172..8343625f48f3b 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserNameAndCounterSuspendsWhenOdd.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserNameAndCounterSuspendsWhenOdd.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName user_name_and_counter_suspends_when_odd + * @RelayResolver Query.user_name_and_counter_suspends_when_odd: String * @rootFragment UserNameAndCounterSuspendsWhenOdd - * @onType Query * */ function user_name_and_counter_suspends_when_odd( diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserNamePassthroughResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserNamePassthroughResolver.js index 34c278bd9e9b7..32e5af8ede7f0 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserNamePassthroughResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserNamePassthroughResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName name_passthrough + * @RelayResolver User.name_passthrough: String * @rootFragment UserNamePassthroughResolver - * @onType User */ function name_passthrough(rootKey: UserNamePassthroughResolver$key): ?string { const user = readFragment( diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserNullClientEdgeResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserNullClientEdgeResolver.js index 91e5563a9419b..f64c7db283cb5 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserNullClientEdgeResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserNullClientEdgeResolver.js @@ -18,11 +18,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName null_client_edge + * @RelayResolver User.null_client_edge: User * @rootFragment UserNullClientEdgeResolver - * @onType User - * @edgeTo User */ function null_client_edge( rootKey: UserNullClientEdgeResolver$key, diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureResolver.js index 826d76e60113d..d8524e6b8b11f 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName user_profile_picture_uri_with_scale + * @RelayResolver User.user_profile_picture_uri_with_scale: String * @rootFragment UserProfilePictureResolver - * @onType User */ function user_profile_picture_uri_with_scale( rootKey: UserProfilePictureResolver$key, diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureUriSuspendsWhenTheCounterIsOdd.js b/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureUriSuspendsWhenTheCounterIsOdd.js index d572d787b1993..0351e17aa0376 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureUriSuspendsWhenTheCounterIsOdd.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureUriSuspendsWhenTheCounterIsOdd.js @@ -12,20 +12,15 @@ 'use strict'; import type {UserProfilePictureUriSuspendsWhenTheCounterIsOdd$key} from './__generated__/UserProfilePictureUriSuspendsWhenTheCounterIsOdd.graphql'; -import type {LiveState} from 'relay-runtime/store/experimental-live-resolvers/LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; const {GLOBAL_STORE, Selectors} = require('./ExampleExternalStateStore'); -const {graphql} = require('relay-runtime'); -const { - suspenseSentinel, -} = require('relay-runtime/store/experimental-live-resolvers/LiveResolverSuspenseSentinel'); +const {graphql, suspenseSentinel} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName user_profile_picture_uri_suspends_when_the_counter_is_odd + * @RelayResolver User.user_profile_picture_uri_suspends_when_the_counter_is_odd: String * @rootFragment UserProfilePictureUriSuspendsWhenTheCounterIsOdd - * @onType User * @live * * This field returns the profile picture url, when the GLOBAL_STORE number is diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureWithDefaultValueResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureWithDefaultValueResolver.js index 48e04a21bfdb9..364214c2e32b2 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureWithDefaultValueResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureWithDefaultValueResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName user_profile_picture_uri_with_scale_and_default_value + * @RelayResolver User.user_profile_picture_uri_with_scale_and_default_value: String * @rootFragment UserProfilePictureWithDefaultValueResolver - * @onType User */ function user_profile_picture_uri_with_scale_and_default_value( rootKey: UserProfilePictureWithDefaultValueResolver$key, diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureWithRuntimeArgumentResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureWithRuntimeArgumentResolver.js index 6e423fe1331f0..d6b53f55de6fa 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureWithRuntimeArgumentResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserProfilePictureWithRuntimeArgumentResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName user_profile_picture_uri_with_scale_and_additional_argument(name: String) + * @RelayResolver User.user_profile_picture_uri_with_scale_and_additional_argument(name: String): String * @rootFragment UserProfilePictureWithRuntimeArgumentResolver - * @onType User */ function user_profile_picture_uri_with_scale_and_additional_argument( rootKey: UserProfilePictureWithRuntimeArgumentResolver$key, diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserReadsClientEdgeResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserReadsClientEdgeResolver.js index 6bbf90b2baed6..2258bb3061362 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserReadsClientEdgeResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserReadsClientEdgeResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName reads_client_edge + * @RelayResolver User.reads_client_edge: String * @rootFragment UserReadsClientEdgeResolver - * @onType User * * Reads a client edge field and then returns a string */ diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserRequiredNameResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserRequiredNameResolver.js index cddfedb1f6777..019cc8e5b74cc 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserRequiredNameResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserRequiredNameResolver.js @@ -18,10 +18,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName required_name + * @RelayResolver User.required_name: String * @rootFragment UserRequiredNameResolver - * @onType User */ function required_name(rootKey: UserRequiredNameResolver$key): string { const user = readFragment( diff --git a/packages/relay-runtime/store/__tests__/resolvers/UserShoutedGreetingResolver.js b/packages/relay-runtime/store/__tests__/resolvers/UserShoutedGreetingResolver.js index 082989d3c59b0..9e6aafaea3a1f 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/UserShoutedGreetingResolver.js +++ b/packages/relay-runtime/store/__tests__/resolvers/UserShoutedGreetingResolver.js @@ -17,10 +17,8 @@ const {graphql} = require('relay-runtime'); const {readFragment} = require('relay-runtime/store/ResolverFragments'); /** - * @RelayResolver - * @fieldName shouted_greeting + * @RelayResolver User.shouted_greeting: String * @rootFragment UserShoutedGreetingResolver - * @onType User */ function shouted_greeting(rootKey: UserShoutedGreetingResolver$key): string { const user = readFragment( diff --git a/packages/relay-runtime/store/__tests__/resolvers/WeakAnimalQueryResolvers.js b/packages/relay-runtime/store/__tests__/resolvers/WeakAnimalQueryResolvers.js new file mode 100644 index 0000000000000..0f1b37bd10562 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/WeakAnimalQueryResolvers.js @@ -0,0 +1,72 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + * @flow strict-local + * @oncall relay + */ + +import type {PurpleOctopus} from './PurpleOctopusResolvers'; +import type {RedOctopus} from './RedOctopusResolvers'; +import type {Query__weak_animal$normalization} from 'relay-runtime/store/__tests__/resolvers/__generated__/Query__weak_animal$normalization.graphql'; + +type IWeakAnimal = RedOctopus | PurpleOctopus; + +/** + * Defines greeting of a `IWeakAnimal`. + * + * @RelayResolver IWeakAnimal.greeting: String + */ +function greeting(instance: IWeakAnimal): string { + return `Hello, ${instance.name}!`; +} + +/** + * Returns a single `IWeakAnimal` of a given type. + * + * @RelayResolver Query.weak_animal(request: WeakAnimalRequest!): IWeakAnimal + */ +function weak_animal(args: { + request: {ofType: string}, +}): Query__weak_animal$normalization { + switch (args.request.ofType) { + case 'RedOctopus': + return { + __relay_model_instance: { + name: 'Shiny', + }, + __typename: 'RedOctopus', + }; + case 'PurpleOctopus': + return { + __relay_model_instance: { + name: 'Glowing', + }, + __typename: 'PurpleOctopus', + }; + default: + throw new Error('Invalid type'); + } +} + +/** + * Returns a list of `IWeakAnimal` of a given type. + * + * @RelayResolver Query.weak_animals(requests: [WeakAnimalRequest!]!): [IWeakAnimal] + */ +function weak_animals(args: { + requests: $ReadOnlyArray<{ofType: string}>, +}): Array { + return args.requests.map(request => { + return weak_animal({request}); + }); +} + +module.exports = { + weak_animal, + weak_animals, + greeting, +}; diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/AstrologicalSignHouseResolver.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/AstrologicalSignHouseResolver.graphql.js index 24c761bbcb552..ae26116cd4e21 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/AstrologicalSignHouseResolver.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/AstrologicalSignHouseResolver.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<1f22073a6d362270dd69398012576067>> * @flow * @lightSyntaxTransform * @nogrep @@ -25,10 +25,10 @@ import {self as astrologicalSignSelfResolverType} from "../AstrologicalSignSelfR // A type error here indicates that the type signature of the resolver module is incorrect. (astrologicalSignSelfResolverType: ( rootKey: AstrologicalSignSelfResolver$key, -) => mixed); +) => ?mixed); declare export opaque type AstrologicalSignHouseResolver$fragmentType: FragmentType; export type AstrologicalSignHouseResolver$data = {| - +self: ?$Call<((...empty[]) => R) => R, typeof astrologicalSignSelfResolverType>, + +self: ?ReturnType, +$fragmentType: AstrologicalSignHouseResolver$fragmentType, |}; export type AstrologicalSignHouseResolver$key = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/AstrologicalSignNameResolver.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/AstrologicalSignNameResolver.graphql.js index 9d78b00efb7a9..7aa56d28fcf3e 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/AstrologicalSignNameResolver.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/AstrologicalSignNameResolver.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<0384f2d8116fd6046b2f9761e7952eca>> + * @generated SignedSource<<7616f29f1b2a35691f571be34944c349>> * @flow * @lightSyntaxTransform * @nogrep @@ -25,10 +25,10 @@ import {self as astrologicalSignSelfResolverType} from "../AstrologicalSignSelfR // A type error here indicates that the type signature of the resolver module is incorrect. (astrologicalSignSelfResolverType: ( rootKey: AstrologicalSignSelfResolver$key, -) => mixed); +) => ?mixed); declare export opaque type AstrologicalSignNameResolver$fragmentType: FragmentType; export type AstrologicalSignNameResolver$data = {| - +self: ?$Call<((...empty[]) => R) => R, typeof astrologicalSignSelfResolverType>, + +self: ?ReturnType, +$fragmentType: AstrologicalSignNameResolver$fragmentType, |}; export type AstrologicalSignNameResolver$key = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/AstrologicalSignOppositeResolver.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/AstrologicalSignOppositeResolver.graphql.js index 37123a9bf1ff3..b10ceac491e51 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/AstrologicalSignOppositeResolver.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/AstrologicalSignOppositeResolver.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -25,10 +25,10 @@ import {self as astrologicalSignSelfResolverType} from "../AstrologicalSignSelfR // A type error here indicates that the type signature of the resolver module is incorrect. (astrologicalSignSelfResolverType: ( rootKey: AstrologicalSignSelfResolver$key, -) => mixed); +) => ?mixed); declare export opaque type AstrologicalSignOppositeResolver$fragmentType: FragmentType; export type AstrologicalSignOppositeResolver$data = {| - +self: ?$Call<((...empty[]) => R) => R, typeof astrologicalSignSelfResolverType>, + +self: ?ReturnType, +$fragmentType: AstrologicalSignOppositeResolver$fragmentType, |}; export type AstrologicalSignOppositeResolver$key = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/Cat____relay_model_instance.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Cat____relay_model_instance.graphql.js new file mode 100644 index 0000000000000..edacceb955f81 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Cat____relay_model_instance.graphql.js @@ -0,0 +1,68 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<8e0341bf38ecf7a722ac413f3e30a062>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Cat__id$data } from "./Cat__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Cat as catRelayModelInstanceResolverType} from "../CatResolvers.js"; +// Type assertion validating that `catRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(catRelayModelInstanceResolverType: ( + id: Cat__id$data['id'], +) => mixed); +declare export opaque type Cat____relay_model_instance$fragmentType: FragmentType; +export type Cat____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Cat____relay_model_instance$fragmentType, +|}; +export type Cat____relay_model_instance$key = { + +$data?: Cat____relay_model_instance$data, + +$fragmentSpreads: Cat____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Cat____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Cat__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./Cat__id.graphql'), require('./../CatResolvers').Cat, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Cat", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Cat____relay_model_instance$fragmentType, + Cat____relay_model_instance$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/Cat__id.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Cat__id.graphql.js new file mode 100644 index 0000000000000..24013b6537251 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Cat__id.graphql.js @@ -0,0 +1,60 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<9636d86db4beea4fdb8a20bb27fb9e85>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Cat__id$fragmentType: FragmentType; +export type Cat__id$data = {| + +id: string, + +$fragmentType: Cat__id$fragmentType, +|}; +export type Cat__id$key = { + +$data?: Cat__id$data, + +$fragmentSpreads: Cat__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Cat__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Cat", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Cat__id$fragmentType, + Cat__id$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/CounterPlusOneResolver.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/CounterPlusOneResolver.graphql.js index 28142d9daefa9..adf3a1c65724a 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/CounterPlusOneResolver.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/CounterPlusOneResolver.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<70975cfa585217d068cfae8feccc3285>> * @flow * @lightSyntaxTransform * @nogrep @@ -19,17 +19,16 @@ /*:: import type { Fragment, ReaderFragment } from 'relay-runtime'; import type { LiveCounterResolver$key } from "./LiveCounterResolver.graphql"; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { FragmentType } from "relay-runtime"; +import type { LiveState, FragmentType } from "relay-runtime"; import {counter as queryCounterResolverType} from "../LiveCounterResolver.js"; // Type assertion validating that `queryCounterResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryCounterResolverType: ( rootKey: LiveCounterResolver$key, -) => LiveState); +) => LiveState); declare export opaque type CounterPlusOneResolver$fragmentType: FragmentType; export type CounterPlusOneResolver$data = {| - +counter: $NonMaybeType<$Call<$Call<((...empty[]) => R) => R, typeof queryCounterResolverType>["read"]>>, + +counter: $NonMaybeType, +$fragmentType: CounterPlusOneResolver$fragmentType, |}; export type CounterPlusOneResolver$key = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/Fish____relay_model_instance.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Fish____relay_model_instance.graphql.js new file mode 100644 index 0000000000000..fbac624f07183 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Fish____relay_model_instance.graphql.js @@ -0,0 +1,68 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { Fish__id$data } from "./Fish__id.graphql"; +import type { FragmentType } from "relay-runtime"; +import {Fish as fishRelayModelInstanceResolverType} from "../FishResolvers.js"; +// Type assertion validating that `fishRelayModelInstanceResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(fishRelayModelInstanceResolverType: ( + id: Fish__id$data['id'], +) => mixed); +declare export opaque type Fish____relay_model_instance$fragmentType: FragmentType; +export type Fish____relay_model_instance$data = {| + +__relay_model_instance: $NonMaybeType>, + +$fragmentType: Fish____relay_model_instance$fragmentType, +|}; +export type Fish____relay_model_instance$key = { + +$data?: Fish____relay_model_instance$data, + +$fragmentSpreads: Fish____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Fish____relay_model_instance", + "selections": [ + { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "Fish__id" + }, + "kind": "RelayResolver", + "name": "__relay_model_instance", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./Fish__id.graphql'), require('./../FishResolvers').Fish, 'id', true), + "path": "__relay_model_instance" + } + ], + "type": "Fish", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Fish____relay_model_instance$fragmentType, + Fish____relay_model_instance$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/Fish__id.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Fish__id.graphql.js new file mode 100644 index 0000000000000..9b5c255f268fa --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Fish__id.graphql.js @@ -0,0 +1,60 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<47ead1ae3d880ee8b9569a3aafda536a>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type Fish__id$fragmentType: FragmentType; +export type Fish__id$data = {| + +id: string, + +$fragmentType: Fish__id$fragmentType, +|}; +export type Fish__id$key = { + +$data?: Fish__id$data, + +$fragmentSpreads: Fish__id$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "Fish__id", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "Fish", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + Fish__id$fragmentType, + Fish__id$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/HelloWorldResolverWithProvidedVariable.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/HelloWorldResolverWithProvidedVariable.graphql.js index 96975a55dc2b3..93ad92b0041d9 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/HelloWorldResolverWithProvidedVariable.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/HelloWorldResolverWithProvidedVariable.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<15ed106f46f6c3c809e7b14187e2320b>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -26,10 +26,10 @@ import {hello as queryHelloResolverType} from "../HelloWorldResolver.js"; args: {| world: string, |}, -) => mixed); +) => ?string); declare export opaque type HelloWorldResolverWithProvidedVariable$fragmentType: FragmentType; export type HelloWorldResolverWithProvidedVariable$data = {| - +hello: ?$Call<((...empty[]) => R) => R, typeof queryHelloResolverType>, + +hello: ?string, +$fragmentType: HelloWorldResolverWithProvidedVariable$fragmentType, |}; export type HelloWorldResolverWithProvidedVariable$key = { @@ -43,7 +43,7 @@ var node/*: ReaderFragment*/ = { "argumentDefinitions": [ { "kind": "RootArgument", - "name": "__relay_internal__pv__HelloWorldProviderjs" + "name": "__relay_internal__pv__HelloWorldProviderrelayprovider" } ], "kind": "Fragment", @@ -59,7 +59,7 @@ var node/*: ReaderFragment*/ = { { "kind": "Variable", "name": "world", - "variableName": "__relay_internal__pv__HelloWorldProviderjs" + "variableName": "__relay_internal__pv__HelloWorldProviderrelayprovider" } ], "fragment": null, @@ -76,7 +76,7 @@ var node/*: ReaderFragment*/ = { }; if (__DEV__) { - (node/*: any*/).hash = "eaa3cd07d00f67f243afb816ae3dc3dd"; + (node/*: any*/).hash = "9f94df55099df09e6d33779b83f732fc"; } module.exports = ((node/*: any*/)/*: Fragment< diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveExternalGreetingFragment.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveExternalGreetingFragment.graphql.js index f4df95a6c9cd9..5e474d7a9379b 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveExternalGreetingFragment.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveExternalGreetingFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<7b8755c0195c7f28da377eb79b22e7de>> + * @generated SignedSource<<62f3c8051cfe17199571c43727101d27>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,8 +18,7 @@ /*:: import type { Fragment, ReaderFragment } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { FragmentType, DataID } from "relay-runtime"; +import type { LiveState, FragmentType, DataID } from "relay-runtime"; import {live_user_suspends_when_odd as queryLiveUserSuspendsWhenOddResolverType} from "../LiveUserSuspendsWhenOdd.js"; // Type assertion validating that `queryLiveUserSuspendsWhenOddResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveResolversTestBatchingQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveResolversTestBatchingQuery.graphql.js index 2addd99ea901f..101dc1b7fa2e1 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveResolversTestBatchingQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveResolversTestBatchingQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<0ac01f7ef483d22b380b5f25e45c7538>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,11 +18,11 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import {counter_no_fragment as queryCounterNoFragmentResolverType} from "../LiveCounterNoFragment.js"; // Type assertion validating that `queryCounterNoFragmentResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. -(queryCounterNoFragmentResolverType: () => LiveState); +(queryCounterNoFragmentResolverType: () => LiveState); import {counter_no_fragment_with_arg as queryCounterNoFragmentWithArgResolverType} from "../LiveCounterNoFragmentWithArg.js"; // Type assertion validating that `queryCounterNoFragmentWithArgResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. @@ -30,11 +30,11 @@ import {counter_no_fragment_with_arg as queryCounterNoFragmentWithArgResolverTyp args: {| prefix: string, |}, -) => LiveState); +) => LiveState); export type LiveResolversTestBatchingQuery$variables = {||}; export type LiveResolversTestBatchingQuery$data = {| - +counter_no_fragment: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterNoFragmentResolverType>["read"]>, - +counter_no_fragment_with_arg: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterNoFragmentWithArgResolverType>["read"]>, + +counter_no_fragment: ?number, + +counter_no_fragment_with_arg: ?string, |}; export type LiveResolversTestBatchingQuery = {| response: LiveResolversTestBatchingQuery$data, @@ -99,7 +99,7 @@ return { "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "name": "counter_no_fragment_with_arg", @@ -107,7 +107,7 @@ return { "fragment": null, "kind": "RelayResolver", "storageKey": "counter_no_fragment_with_arg(prefix:\"sup\")", - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveResolversTestHandlesErrorOnReadQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveResolversTestHandlesErrorOnReadQuery.graphql.js new file mode 100644 index 0000000000000..7eccf9608ac4d --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveResolversTestHandlesErrorOnReadQuery.graphql.js @@ -0,0 +1,99 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<2b4289691ad58384a61d41abd48c29ac>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { LiveState } from "relay-runtime"; +import {counter_throws_when_odd as queryCounterThrowsWhenOddResolverType} from "../QueryLiveResolverThrowsOnRead.js"; +// Type assertion validating that `queryCounterThrowsWhenOddResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryCounterThrowsWhenOddResolverType: () => LiveState); +export type LiveResolversTestHandlesErrorOnReadQuery$variables = {||}; +export type LiveResolversTestHandlesErrorOnReadQuery$data = {| + +counter_throws_when_odd: ?number, +|}; +export type LiveResolversTestHandlesErrorOnReadQuery = {| + response: LiveResolversTestHandlesErrorOnReadQuery$data, + variables: LiveResolversTestHandlesErrorOnReadQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "LiveResolversTestHandlesErrorOnReadQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayLiveResolver", + "name": "counter_throws_when_odd", + "resolverModule": require('./../QueryLiveResolverThrowsOnRead').counter_throws_when_odd, + "path": "counter_throws_when_odd" + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "LiveResolversTestHandlesErrorOnReadQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "counter_throws_when_odd", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ] + } + ] + }, + "params": { + "cacheID": "592ff0894b0f30b3727bc99191081c3e", + "id": null, + "metadata": {}, + "name": "LiveResolversTestHandlesErrorOnReadQuery", + "operationKind": "query", + "text": null + } +}; + +if (__DEV__) { + (node/*: any*/).hash = "47643627cf996a71e53ba0dfbbfdef54"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + LiveResolversTestHandlesErrorOnReadQuery$variables, + LiveResolversTestHandlesErrorOnReadQuery$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveResolversTestHandlesErrorOnUpdateQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveResolversTestHandlesErrorOnUpdateQuery.graphql.js new file mode 100644 index 0000000000000..1252f954c3a06 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveResolversTestHandlesErrorOnUpdateQuery.graphql.js @@ -0,0 +1,99 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<84dd07289f52b49c224048d9caeb07fb>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import type { LiveState } from "relay-runtime"; +import {counter_throws_when_odd as queryCounterThrowsWhenOddResolverType} from "../QueryLiveResolverThrowsOnRead.js"; +// Type assertion validating that `queryCounterThrowsWhenOddResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryCounterThrowsWhenOddResolverType: () => LiveState); +export type LiveResolversTestHandlesErrorOnUpdateQuery$variables = {||}; +export type LiveResolversTestHandlesErrorOnUpdateQuery$data = {| + +counter_throws_when_odd: ?number, +|}; +export type LiveResolversTestHandlesErrorOnUpdateQuery = {| + response: LiveResolversTestHandlesErrorOnUpdateQuery$data, + variables: LiveResolversTestHandlesErrorOnUpdateQuery$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "LiveResolversTestHandlesErrorOnUpdateQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "fragment": null, + "kind": "RelayLiveResolver", + "name": "counter_throws_when_odd", + "resolverModule": require('./../QueryLiveResolverThrowsOnRead').counter_throws_when_odd, + "path": "counter_throws_when_odd" + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "LiveResolversTestHandlesErrorOnUpdateQuery", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "counter_throws_when_odd", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ] + } + ] + }, + "params": { + "cacheID": "da6bc143a73ab417777b9f52ab85616f", + "id": null, + "metadata": {}, + "name": "LiveResolversTestHandlesErrorOnUpdateQuery", + "operationKind": "query", + "text": null + } +}; + +if (__DEV__) { + (node/*: any*/).hash = "b0fe01bebd0ba17a2b27b256f6391a2d"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + LiveResolversTestHandlesErrorOnUpdateQuery$variables, + LiveResolversTestHandlesErrorOnUpdateQuery$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveResolversTestUnsubscribesWhenSuspendsQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveResolversTestUnsubscribesWhenSuspendsQuery.graphql.js index 964b0832fc5dd..6fb127f77e244 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveResolversTestUnsubscribesWhenSuspendsQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/LiveResolversTestUnsubscribesWhenSuspendsQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,15 +18,14 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { DataID } from "relay-runtime"; +import type { LiveState, DataID } from "relay-runtime"; import type { LiveExternalGreetingFragment$key } from "./LiveExternalGreetingFragment.graphql"; import {live_external_greeting as queryLiveExternalGreetingResolverType} from "../LiveExternalGreeting.js"; // Type assertion validating that `queryLiveExternalGreetingResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryLiveExternalGreetingResolverType: ( rootKey: LiveExternalGreetingFragment$key, -) => LiveState); +) => LiveState); import {live_user_suspends_when_odd as queryLiveUserSuspendsWhenOddResolverType} from "../LiveUserSuspendsWhenOdd.js"; // Type assertion validating that `queryLiveUserSuspendsWhenOddResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. @@ -35,7 +34,7 @@ import {live_user_suspends_when_odd as queryLiveUserSuspendsWhenOddResolverType} |}>); export type LiveResolversTestUnsubscribesWhenSuspendsQuery$variables = {||}; export type LiveResolversTestUnsubscribesWhenSuspendsQuery$data = {| - +greeting: ?$Call<$Call<((...empty[]) => R) => R, typeof queryLiveExternalGreetingResolverType>["read"]>, + +greeting: ?string, +user: ?{| +id: string, |}, @@ -132,7 +131,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/OuterResolver.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/OuterResolver.graphql.js index 2b2121edcb66e..60084a0936e4b 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/OuterResolver.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/OuterResolver.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<5f534b7564467882d48081cac840d8c9>> * @flow * @lightSyntaxTransform * @nogrep @@ -19,17 +19,16 @@ /*:: import type { Fragment, ReaderFragment } from 'relay-runtime'; import type { InnerResolver$key } from "./InnerResolver.graphql"; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { FragmentType } from "relay-runtime"; +import type { LiveState, FragmentType } from "relay-runtime"; import {inner as queryInnerResolverType} from "../InnerResolver.js"; // Type assertion validating that `queryInnerResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryInnerResolverType: ( rootKey: InnerResolver$key, -) => LiveState); +) => LiveState); declare export opaque type OuterResolver$fragmentType: FragmentType; export type OuterResolver$data = {| - +inner: ?$Call<$Call<((...empty[]) => R) => R, typeof queryInnerResolverType>["read"]>, + +inner: ?number, +$fragmentType: OuterResolver$fragmentType, |}; export type OuterResolver$key = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/PurpleOctopus____relay_model_instance.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/PurpleOctopus____relay_model_instance.graphql.js new file mode 100644 index 0000000000000..79d9dfd57e900 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/PurpleOctopus____relay_model_instance.graphql.js @@ -0,0 +1,61 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { PurpleOctopus } from "../PurpleOctopusResolvers.js"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type PurpleOctopus____relay_model_instance$fragmentType: FragmentType; +export type PurpleOctopus____relay_model_instance$data = {| + +__relay_model_instance: PurpleOctopus, + +$fragmentType: PurpleOctopus____relay_model_instance$fragmentType, +|}; +export type PurpleOctopus____relay_model_instance$key = { + +$data?: PurpleOctopus____relay_model_instance$data, + +$fragmentSpreads: PurpleOctopus____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "PurpleOctopus____relay_model_instance", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ] + } + ], + "type": "PurpleOctopus", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + PurpleOctopus____relay_model_instance$fragmentType, + PurpleOctopus____relay_model_instance$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/Query__live_todo_description$normalization.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Query__live_todo_description$normalization.graphql.js deleted file mode 100644 index 7373b83d8d0cf..0000000000000 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/Query__live_todo_description$normalization.graphql.js +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { NormalizationSplitOperation } from 'relay-runtime'; - -import type { TodoDescription } from "../TodoDescription.js"; -export type Query__live_todo_description$normalization = {| - +__relay_model_instance: ?TodoDescription, -|}; - -*/ - -var node/*: NormalizationSplitOperation*/ = { - "kind": "SplitOperation", - "metadata": {}, - "name": "Query__live_todo_description$normalization", - "selections": [ - { - "kind": "ClientExtension", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__relay_model_instance", - "storageKey": null - } - ] - } - ] -}; - -module.exports = node; diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/Query__many_live_todos$normalization.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Query__many_live_todos$normalization.graphql.js new file mode 100644 index 0000000000000..42b692cb0eec8 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Query__many_live_todos$normalization.graphql.js @@ -0,0 +1,48 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<74fd0ae7e5c35857bcd216ec356d5ceb>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +export type Query__many_live_todos$normalization = {| + +todo_id: string, +|}; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "kind": "SplitOperation", + "metadata": {}, + "name": "Query__many_live_todos$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "todo_id", + "storageKey": null + } + ] + } + ] +}; + +module.exports = node; diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/Query__weak_animal$normalization.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Query__weak_animal$normalization.graphql.js new file mode 100644 index 0000000000000..8238d1d74f0c8 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Query__weak_animal$normalization.graphql.js @@ -0,0 +1,77 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<31d6ff1e0c9d59015ae3c217b6c8de59>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +import type { PurpleOctopus } from "../PurpleOctopusResolvers.js"; +import type { RedOctopus } from "../RedOctopusResolvers.js"; +export type Query__weak_animal$normalization = {| + +__typename: "PurpleOctopus", + +__relay_model_instance: PurpleOctopus, +|} | {| + +__typename: "RedOctopus", + +__relay_model_instance: RedOctopus, +|}; + +*/ + +var node/*: NormalizationSplitOperation*/ = (function(){ +var v0 = [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + } +]; +return { + "kind": "SplitOperation", + "metadata": {}, + "name": "Query__weak_animal$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": (v0/*: any*/), + "type": "PurpleOctopus", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": (v0/*: any*/), + "type": "RedOctopus", + "abstractKey": null + } + ] + } + ] +}; +})(); + +module.exports = node; diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/Query__weak_animals$normalization.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Query__weak_animals$normalization.graphql.js new file mode 100644 index 0000000000000..d0bdd86c02b86 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/Query__weak_animals$normalization.graphql.js @@ -0,0 +1,77 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<3ce5190eb007feeb3e5fe946464105d8>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +import type { PurpleOctopus } from "../PurpleOctopusResolvers.js"; +import type { RedOctopus } from "../RedOctopusResolvers.js"; +export type Query__weak_animals$normalization = {| + +__typename: "PurpleOctopus", + +__relay_model_instance: PurpleOctopus, +|} | {| + +__typename: "RedOctopus", + +__relay_model_instance: RedOctopus, +|}; + +*/ + +var node/*: NormalizationSplitOperation*/ = (function(){ +var v0 = [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + } +]; +return { + "kind": "SplitOperation", + "metadata": {}, + "name": "Query__weak_animals$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "kind": "InlineFragment", + "selections": (v0/*: any*/), + "type": "PurpleOctopus", + "abstractKey": null + }, + { + "kind": "InlineFragment", + "selections": (v0/*: any*/), + "type": "RedOctopus", + "abstractKey": null + } + ] + } + ] +}; +})(); + +module.exports = node; diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RedOctopus____relay_model_instance.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RedOctopus____relay_model_instance.graphql.js new file mode 100644 index 0000000000000..8f739623b6aa6 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RedOctopus____relay_model_instance.graphql.js @@ -0,0 +1,61 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<751fc00d0d418c2e811a86dc7eed794a>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { RedOctopus } from "../RedOctopusResolvers.js"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RedOctopus____relay_model_instance$fragmentType: FragmentType; +export type RedOctopus____relay_model_instance$data = {| + +__relay_model_instance: RedOctopus, + +$fragmentType: RedOctopus____relay_model_instance$fragmentType, +|}; +export type RedOctopus____relay_model_instance$key = { + +$data?: RedOctopus____relay_model_instance$data, + +$fragmentSpreads: RedOctopus____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RedOctopus____relay_model_instance", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ] + } + ], + "type": "RedOctopus", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + RedOctopus____relay_model_instance$fragmentType, + RedOctopus____relay_model_instance$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_LiveExternalGreetingFragment_user.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_LiveExternalGreetingFragment_user.graphql.js index d8c7eb643f408..e5f262118240f 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_LiveExternalGreetingFragment_user.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_LiveExternalGreetingFragment_user.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<97b6fd7bad5fcac105510cd2b235f965>> + * @generated SignedSource<<28f257fe1aeac1511dcfef10e4fdcaf8>> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_LiveExternalGreetingFragment_user.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_LiveExternalGreetingFragment_user", diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_LiveResolversTestUnsubscribesWhenSuspendsQuery_user.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_LiveResolversTestUnsubscribesWhenSuspendsQuery_user.graphql.js index 9565882bb332e..07f7b14762d4c 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_LiveResolversTestUnsubscribesWhenSuspendsQuery_user.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_LiveResolversTestUnsubscribesWhenSuspendsQuery_user.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<02f06f3b82d93c55847bce5d5f4da3ba>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -42,7 +42,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_LiveResolversTestUnsubscribesWhenSuspendsQuery_user.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_LiveResolversTestUnsubscribesWhenSuspendsQuery_user", diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerQuery_me__client_edge.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerQuery_me__client_edge.graphql.js index cdbf55e84990b..e85e993df4a18 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerQuery_me__client_edge.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerQuery_me__client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -62,7 +62,10 @@ return { "node" ], "operation": require('./ClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerQuery_me__client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerQuery_me__client_edge", diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerRecursiveQuery_me__client_edge.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerRecursiveQuery_me__client_edge.graphql.js index 2cead50b3092f..8f756745fc480 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerRecursiveQuery_me__client_edge.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerRecursiveQuery_me__client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<0bb6f8d46d9b4cd925a53212bcd6b7a5>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -72,7 +72,10 @@ return { "node" ], "operation": require('./ClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerRecursiveQuery_me__client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerRecursiveQuery_me__client_edge", diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerRecursiveQuery_me__client_edge__another_client_edge.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerRecursiveQuery_me__client_edge__another_client_edge.graphql.js index eb3a77855be1b..bfc2ce6474f0c 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerRecursiveQuery_me__client_edge__another_client_edge.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerRecursiveQuery_me__client_edge__another_client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<69a75fdd1599dd19df56af20f9a1c99c>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerRecursiveQuery_me__client_edge__another_client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_ResolverGCTestResolverClientEdgeToServerRecursiveQuery_me__client_edge__another_client_edge", diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverTest3Query_me__client_edge.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverTest3Query_me__client_edge.graphql.js index cc9aa4016ff7c..f713fac94188b 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverTest3Query_me__client_edge.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_ResolverTest3Query_me__client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<9f9dc98e2dea2aa91bff4a3e6b91c837>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_ResolverTest3Query_me__client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_ResolverTest3Query_me__client_edge", diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_UserReadsClientEdgeResolver_client_edge.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_UserReadsClientEdgeResolver_client_edge.graphql.js index c5e9c4e49a1c6..549a9c6f997db 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_UserReadsClientEdgeResolver_client_edge.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/RefetchableClientEdgeQuery_UserReadsClientEdgeResolver_client_edge.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<18ca59fec6559882d9d29737a41b85fd>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -43,7 +43,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ClientEdgeQuery_UserReadsClientEdgeResolver_client_edge.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "RefetchableClientEdgeQuery_UserReadsClientEdgeResolver_client_edge", diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestLiveWithRootFragmentQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestLiveWithRootFragmentQuery.graphql.js index b2e447f45b83f..28aa1ce2150c8 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestLiveWithRootFragmentQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestLiveWithRootFragmentQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -18,17 +18,17 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import type { LiveCounterResolver$key } from "./LiveCounterResolver.graphql"; import {counter as queryCounterResolverType} from "../LiveCounterResolver.js"; // Type assertion validating that `queryCounterResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryCounterResolverType: ( rootKey: LiveCounterResolver$key, -) => LiveState); +) => LiveState); export type ResolverGCTestLiveWithRootFragmentQuery$variables = {||}; export type ResolverGCTestLiveWithRootFragmentQuery$data = {| - +counter: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterResolverType>["read"]>, + +counter: ?number, |}; export type ResolverGCTestLiveWithRootFragmentQuery = {| response: ResolverGCTestLiveWithRootFragmentQuery$data, @@ -108,7 +108,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestNoFragmentDynamicArgsQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestNoFragmentDynamicArgsQuery.graphql.js index 4b8029ac23ea6..94477e373ff40 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestNoFragmentDynamicArgsQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestNoFragmentDynamicArgsQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<479292e81ddc185bf8e31dea5a31dc2b>> * @flow * @lightSyntaxTransform * @nogrep @@ -25,12 +25,12 @@ import {hello as queryHelloResolverType} from "../HelloWorldResolver.js"; args: {| world: string, |}, -) => mixed); +) => ?string); export type ResolverGCTestNoFragmentDynamicArgsQuery$variables = {| world: string, |}; export type ResolverGCTestNoFragmentDynamicArgsQuery$data = {| - +hello: ?$Call<((...empty[]) => R) => R, typeof queryHelloResolverType>, + +hello: ?string, |}; export type ResolverGCTestNoFragmentDynamicArgsQuery = {| response: ResolverGCTestNoFragmentDynamicArgsQuery$data, @@ -93,7 +93,7 @@ return { "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestNoFragmentStaticArgsQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestNoFragmentStaticArgsQuery.graphql.js index 00d2e3f6f4a34..5a95cb176f8a4 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestNoFragmentStaticArgsQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestNoFragmentStaticArgsQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<31d453b47a28f158da3436aa01ac7d34>> + * @generated SignedSource<<4ad3a30f5086fc273a1dba408c58dcdd>> * @flow * @lightSyntaxTransform * @nogrep @@ -25,10 +25,10 @@ import {hello as queryHelloResolverType} from "../HelloWorldResolver.js"; args: {| world: string, |}, -) => mixed); +) => ?string); export type ResolverGCTestNoFragmentStaticArgsQuery$variables = {||}; export type ResolverGCTestNoFragmentStaticArgsQuery$data = {| - +hello: ?$Call<((...empty[]) => R) => R, typeof queryHelloResolverType>, + +hello: ?string, |}; export type ResolverGCTestNoFragmentStaticArgsQuery = {| response: ResolverGCTestNoFragmentStaticArgsQuery$data, @@ -84,7 +84,7 @@ return { "fragment": null, "kind": "RelayResolver", "storageKey": "hello(world:\"Planet\")", - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestNonLiveWithFragmentQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestNonLiveWithFragmentQuery.graphql.js index cdb73b6984a88..7585a554583f8 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestNonLiveWithFragmentQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestNonLiveWithFragmentQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<8d6ecb4106402f67d446393478f0375d>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {greeting as userGreetingResolverType} from "../UserGreetingResolver.js"; // A type error here indicates that the type signature of the resolver module is incorrect. (userGreetingResolverType: ( rootKey: UserGreetingResolver$key, -) => mixed); +) => ?string); export type ResolverGCTestNonLiveWithFragmentQuery$variables = {||}; export type ResolverGCTestNonLiveWithFragmentQuery$data = {| +me: ?{| - +greeting: ?$Call<((...empty[]) => R) => R, typeof userGreetingResolverType>, + +greeting: ?string, |}, |}; export type ResolverGCTestNonLiveWithFragmentQuery = {| @@ -105,7 +105,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestRegularReadsLiveQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestRegularReadsLiveQuery.graphql.js index edba28d912c71..8890192246eff 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestRegularReadsLiveQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestRegularReadsLiveQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<981b807cf2101d110c4ed39b32dddad0>> + * @generated SignedSource<<2c7955ad5e93b9499d7c2a51f2af0bb3>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,10 +24,10 @@ import {counter_plus_one as queryCounterPlusOneResolverType} from "../CounterPlu // A type error here indicates that the type signature of the resolver module is incorrect. (queryCounterPlusOneResolverType: ( rootKey: CounterPlusOneResolver$key, -) => mixed); +) => ?number); export type ResolverGCTestRegularReadsLiveQuery$variables = {||}; export type ResolverGCTestRegularReadsLiveQuery$data = {| - +counter_plus_one: ?$Call<((...empty[]) => R) => R, typeof queryCounterPlusOneResolverType>, + +counter_plus_one: ?number, |}; export type ResolverGCTestRegularReadsLiveQuery = {| response: ResolverGCTestRegularReadsLiveQuery$data, @@ -113,7 +113,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "Query", @@ -121,7 +121,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] }, diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientDirtyQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientDirtyQuery.graphql.js index 1d0e75bf8c556..f1bc25c2b7b2c 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientDirtyQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientDirtyQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<526141687082bbb8d393c2eaa175fbdb>> * @flow * @lightSyntaxTransform * @nogrep @@ -26,7 +26,7 @@ import {name as astrologicalSignNameResolverType} from "../AstrologicalSignNameR // A type error here indicates that the type signature of the resolver module is incorrect. (astrologicalSignNameResolverType: ( rootKey: AstrologicalSignNameResolver$key, -) => mixed); +) => ?string); import {astrological_sign as userAstrologicalSignResolverType} from "../UserAstrologicalSignResolver.js"; // Type assertion validating that `userAstrologicalSignResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. @@ -39,7 +39,7 @@ export type ResolverGCTestResolverClientEdgeToClientDirtyQuery$variables = {||}; export type ResolverGCTestResolverClientEdgeToClientDirtyQuery$data = {| +me: ?{| +astrological_sign: ?{| - +name: ?$Call<((...empty[]) => R) => R, typeof astrologicalSignNameResolverType>, + +name: ?string, |}, |}, |}; @@ -77,6 +77,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "AstrologicalSign", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -109,7 +110,7 @@ return { "kind": "RelayResolver", "name": "name", "resolverModule": require('./../AstrologicalSignNameResolver').name, - "path": "me.name" + "path": "me.astrological_sign.name" } ], "storageKey": null @@ -204,7 +205,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "AstrologicalSign", @@ -212,7 +213,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, (v0/*: any*/) ], diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientQuery.graphql.js index d7b255a3cec79..24306a5f96473 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<85091a242547c53d799be61aa402331c>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -26,7 +26,7 @@ import {name as astrologicalSignNameResolverType} from "../AstrologicalSignNameR // A type error here indicates that the type signature of the resolver module is incorrect. (astrologicalSignNameResolverType: ( rootKey: AstrologicalSignNameResolver$key, -) => mixed); +) => ?string); import {astrological_sign as userAstrologicalSignResolverType} from "../UserAstrologicalSignResolver.js"; // Type assertion validating that `userAstrologicalSignResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. @@ -39,7 +39,7 @@ export type ResolverGCTestResolverClientEdgeToClientQuery$variables = {||}; export type ResolverGCTestResolverClientEdgeToClientQuery$data = {| +me: ?{| +astrological_sign: ?{| - +name: ?$Call<((...empty[]) => R) => R, typeof astrologicalSignNameResolverType>, + +name: ?string, |}, |}, |}; @@ -77,6 +77,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "AstrologicalSign", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -109,7 +110,7 @@ return { "kind": "RelayResolver", "name": "name", "resolverModule": require('./../AstrologicalSignNameResolver').name, - "path": "me.name" + "path": "me.astrological_sign.name" } ], "storageKey": null @@ -204,7 +205,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "AstrologicalSign", @@ -212,7 +213,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, (v0/*: any*/) ], diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientRecursiveQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientRecursiveQuery.graphql.js index f3169e4f560f2..c033c6cb3b15f 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientRecursiveQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientRecursiveQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<3983ea2457c99cb128c5e472f2360785>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -27,7 +27,7 @@ import {name as astrologicalSignNameResolverType} from "../AstrologicalSignNameR // A type error here indicates that the type signature of the resolver module is incorrect. (astrologicalSignNameResolverType: ( rootKey: AstrologicalSignNameResolver$key, -) => mixed); +) => ?string); import {opposite as astrologicalSignOppositeResolverType} from "../AstrologicalSignOppositeResolver.js"; // Type assertion validating that `astrologicalSignOppositeResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. @@ -48,9 +48,9 @@ export type ResolverGCTestResolverClientEdgeToClientRecursiveQuery$variables = { export type ResolverGCTestResolverClientEdgeToClientRecursiveQuery$data = {| +me: ?{| +astrological_sign: ?{| - +name: ?$Call<((...empty[]) => R) => R, typeof astrologicalSignNameResolverType>, + +name: ?string, +opposite: ?{| - +name: ?$Call<((...empty[]) => R) => R, typeof astrologicalSignNameResolverType>, + +name: ?string, |}, |}, |}, @@ -63,17 +63,9 @@ export type ResolverGCTestResolverClientEdgeToClientRecursiveQuery = {| var node/*: ConcreteRequest*/ = (function(){ var v0 = { - "alias": null, "args": null, - "fragment": { - "args": null, - "kind": "FragmentSpread", - "name": "AstrologicalSignNameResolver" - }, - "kind": "RelayResolver", - "name": "name", - "resolverModule": require('./../AstrologicalSignNameResolver').name, - "path": "me.name" + "kind": "FragmentSpread", + "name": "AstrologicalSignNameResolver" }, v1 = { "alias": null, @@ -98,7 +90,7 @@ v2 = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "AstrologicalSign", @@ -110,7 +102,7 @@ v3 = { "fragment": (v2/*: any*/), "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }; return { "fragment": { @@ -132,6 +124,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "AstrologicalSign", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -153,10 +146,19 @@ return { "name": "astrological_sign", "plural": false, "selections": [ - (v0/*: any*/), + { + "alias": null, + "args": null, + "fragment": (v0/*: any*/), + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('./../AstrologicalSignNameResolver').name, + "path": "me.astrological_sign.name" + }, { "kind": "ClientEdgeToClientObject", "concreteType": "AstrologicalSign", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -168,7 +170,7 @@ return { "kind": "RelayResolver", "name": "opposite", "resolverModule": require('./../AstrologicalSignOppositeResolver').opposite, - "path": "me.opposite" + "path": "me.astrological_sign.opposite" }, "linkedField": { "alias": null, @@ -178,7 +180,15 @@ return { "name": "opposite", "plural": false, "selections": [ - (v0/*: any*/) + { + "alias": null, + "args": null, + "fragment": (v0/*: any*/), + "kind": "RelayResolver", + "name": "name", + "resolverModule": require('./../AstrologicalSignNameResolver').name, + "path": "me.astrological_sign.opposite.name" + } ], "storageKey": null } diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientSuspendedQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientSuspendedQuery.graphql.js index 10bc6aa956472..04e8b2bd64c7f 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientSuspendedQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToClientSuspendedQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<5adf3b6239e3a00c98a2127e00a4ec6a>> + * @generated SignedSource<<2935b9600729c90bf8148a5deba4ec77>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,15 +18,14 @@ /*:: import type { ConcreteRequest, Query } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { DataID } from "relay-runtime"; +import type { LiveState, DataID } from "relay-runtime"; import type { AstrologicalSignNameResolver$key } from "./AstrologicalSignNameResolver.graphql"; import {name as astrologicalSignNameResolverType} from "../AstrologicalSignNameResolver.js"; // Type assertion validating that `astrologicalSignNameResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (astrologicalSignNameResolverType: ( rootKey: AstrologicalSignNameResolver$key, -) => mixed); +) => ?string); import {virgo_suspends_when_counter_is_odd as queryVirgoSuspendsWhenCounterIsOddResolverType} from "../QueryVirgoLiveSuspendsWhenOddResolver.js"; // Type assertion validating that `queryVirgoSuspendsWhenCounterIsOddResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. @@ -39,7 +38,7 @@ export type ResolverGCTestResolverClientEdgeToClientSuspendedQuery$data = {| +__typename: "User", |}, +virgo_suspends_when_counter_is_odd: ?{| - +name: ?$Call<((...empty[]) => R) => R, typeof astrologicalSignNameResolverType>, + +name: ?string, |}, |}; export type ResolverGCTestResolverClientEdgeToClientSuspendedQuery = {| @@ -75,6 +74,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "AstrologicalSign", + "modelResolvers": null, "backingField": { "alias": null, "args": null, @@ -103,7 +103,7 @@ return { "kind": "RelayResolver", "name": "name", "resolverModule": require('./../AstrologicalSignNameResolver').name, - "path": "name" + "path": "virgo_suspends_when_counter_is_odd.name" } ], "storageKey": null @@ -168,7 +168,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "AstrologicalSign", @@ -176,7 +176,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, (v1/*: any*/) ], diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToPluralClientQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToPluralClientQuery.graphql.js index cf18ebdf531f9..8e0cfe5768446 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToPluralClientQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToPluralClientQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<5b37e8122d1f8cbc3d144e5fbb7fb54b>> + * @generated SignedSource<<2c6ea0d0c8b520149edec78ffb8050df>> * @flow * @lightSyntaxTransform * @nogrep @@ -25,12 +25,12 @@ import {all_astrological_signs as queryAllAstrologicalSignsResolverType} from ". // A type error here indicates that the type signature of the resolver module is incorrect. (queryAllAstrologicalSignsResolverType: ( rootKey: QueryAllAstrologicalSignsResolver$key, -) => ?$ReadOnlyArray ?$ReadOnlyArray<{| +id: DataID, |}>); export type ResolverGCTestResolverClientEdgeToPluralClientQuery$variables = {||}; export type ResolverGCTestResolverClientEdgeToPluralClientQuery$data = {| - +all_astrological_signs: ?$ReadOnlyArray, |}; @@ -67,6 +67,7 @@ return { { "kind": "ClientEdgeToClientObject", "concreteType": "AstrologicalSign", + "modelResolvers": null, "backingField": { "alias": null, "args": null, diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToServerRecursiveQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToServerRecursiveQuery.graphql.js index dfa239e8347c6..89efca7f1640e 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToServerRecursiveQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestResolverClientEdgeToServerRecursiveQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<7f6ed073ea74b712f612f40dcb22e8ab>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -130,7 +130,7 @@ return { "kind": "RelayResolver", "name": "another_client_edge", "resolverModule": require('./../UserAnotherClientEdgeResolver').another_client_edge, - "path": "me.another_client_edge" + "path": "me.client_edge.another_client_edge" }, "linkedField": { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestWithoutFragmentQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestWithoutFragmentQuery.graphql.js index b9231a3f2c6aa..22596f2cec583 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestWithoutFragmentQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverGCTestWithoutFragmentQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<697bfeb0df888401d7799167008091f4>> * @flow * @lightSyntaxTransform * @nogrep @@ -18,14 +18,14 @@ /*:: import type { ClientRequest, ClientQuery } from 'relay-runtime'; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; +import type { LiveState } from "relay-runtime"; import {counter_no_fragment as queryCounterNoFragmentResolverType} from "../LiveCounterNoFragment.js"; // Type assertion validating that `queryCounterNoFragmentResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. -(queryCounterNoFragmentResolverType: () => LiveState); +(queryCounterNoFragmentResolverType: () => LiveState); export type ResolverGCTestWithoutFragmentQuery$variables = {||}; export type ResolverGCTestWithoutFragmentQuery$data = {| - +counter_no_fragment: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterNoFragmentResolverType>["read"]>, + +counter_no_fragment: ?number, |}; export type ResolverGCTestWithoutFragmentQuery = {| response: ResolverGCTestWithoutFragmentQuery$data, @@ -73,7 +73,7 @@ var node/*: ClientRequest*/ = { "fragment": null, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ] } diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest1FragmentRefetchableQuery.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest1FragmentRefetchableQuery.graphql.js index 56242ab0e7fc9..ebff096ecfaec 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest1FragmentRefetchableQuery.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest1FragmentRefetchableQuery.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<55a0e477034b80dbbf8043dec3bd8e14>> + * @generated SignedSource<<8a86f79dfa13ce5ed3c3c7cd1b551788>> * @flow * @lightSyntaxTransform * @nogrep @@ -126,7 +126,7 @@ return { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true } ], "type": "User", diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest1Query.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest1Query.graphql.js index 2bf860841d2ce..bd18812d72e80 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest1Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest1Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<369388c2b96cc113bdd0bae64d5699e3>> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {greeting as userGreetingResolverType} from "../UserGreetingResolver.js"; // A type error here indicates that the type signature of the resolver module is incorrect. (userGreetingResolverType: ( rootKey: UserGreetingResolver$key, -) => mixed); +) => ?string); export type ResolverTest1Query$variables = {||}; export type ResolverTest1Query$data = {| +me: ?{| - +greeting: ?$Call<((...empty[]) => R) => R, typeof userGreetingResolverType>, + +greeting: ?string, |}, |}; export type ResolverTest1Query = {| @@ -105,7 +105,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest2Fragment.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest2Fragment.graphql.js index 373180604a9fc..0a85221c62315 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest2Fragment.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest2Fragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<53b0ceb6d8eee75e5095d64c1c90d2b2>> + * @generated SignedSource<<4e8978f900bfe569ce4768ceeb469c48>> * @flow * @lightSyntaxTransform * @nogrep @@ -25,11 +25,11 @@ import {greeting as userGreetingResolverType} from "../UserGreetingResolver.js"; // A type error here indicates that the type signature of the resolver module is incorrect. (userGreetingResolverType: ( rootKey: UserGreetingResolver$key, -) => mixed); +) => ?string); declare export opaque type ResolverTest2Fragment$fragmentType: FragmentType; type ResolverTest1FragmentRefetchableQuery$variables = any; export type ResolverTest2Fragment$data = {| - +greeting: ?$Call<((...empty[]) => R) => R, typeof userGreetingResolverType>, + +greeting: ?string, +id: string, +$fragmentType: ResolverTest2Fragment$fragmentType, |}; @@ -50,7 +50,10 @@ var node/*: ReaderFragment*/ = { "node" ], "operation": require('./ResolverTest1FragmentRefetchableQuery.graphql'), - "identifierField": "id" + "identifierInfo": { + "identifierField": "id", + "identifierQueryVariableName": "id" + } } }, "name": "ResolverTest2Fragment", diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest2Query.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest2Query.graphql.js index b2373a6f76526..5e1499c7bddd2 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest2Query.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest2Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<8e24c8fd2cb3df145cd6d92530df3d79>> + * @generated SignedSource<<9e26cc074469bf4718109c45af99d516>> * @flow * @lightSyntaxTransform * @nogrep @@ -91,7 +91,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest4Query.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest4Query.graphql.js new file mode 100644 index 0000000000000..7d3fb01cc0426 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/ResolverTest4Query.graphql.js @@ -0,0 +1,102 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<3790f2252c4bb388f9632fa6120d1ffc>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ClientRequest, ClientQuery } from 'relay-runtime'; +import {hello_optional_world as queryHelloOptionalWorldResolverType} from "../HelloWorldOptionalResolver.js"; +// Type assertion validating that `queryHelloOptionalWorldResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(queryHelloOptionalWorldResolverType: ( + args: {| + world: ?string, + |}, +) => ?string); +export type ResolverTest4Query$variables = {||}; +export type ResolverTest4Query$data = {| + +hello_optional_world: ?string, +|}; +export type ResolverTest4Query = {| + response: ResolverTest4Query$data, + variables: ResolverTest4Query$variables, +|}; +*/ + +var node/*: ClientRequest*/ = { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "ResolverTest4Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": [], + "fragment": null, + "kind": "RelayResolver", + "name": "hello_optional_world", + "resolverModule": require('./../HelloWorldOptionalResolver').hello_optional_world, + "path": "hello_optional_world" + } + ] + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "ResolverTest4Query", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "name": "hello_optional_world", + "args": null, + "fragment": null, + "kind": "RelayResolver", + "storageKey": null, + "isOutputType": true + } + ] + } + ] + }, + "params": { + "cacheID": "92f8b77326cde6e1f67d081155d0dfc7", + "id": null, + "metadata": {}, + "name": "ResolverTest4Query", + "operationKind": "query", + "text": null + } +}; + +if (__DEV__) { + (node/*: any*/).hash = "9f62459f73e4ec513cae290b387e5e13"; +} + +module.exports = ((node/*: any*/)/*: ClientQuery< + ResolverTest4Query$variables, + ResolverTest4Query$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoBlockedByResolverFragment.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoBlockedByResolverFragment.graphql.js index d9d06bb3c80bd..ea6fecab2436c 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoBlockedByResolverFragment.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoBlockedByResolverFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<0ef603cdad4a83418167ae3aeaa07aa7>> + * @generated SignedSource<<31dede260e294905a28597464fa14b14>> * @flow * @lightSyntaxTransform * @nogrep @@ -19,17 +19,16 @@ /*:: import type { Fragment, ReaderFragment } from 'relay-runtime'; import type { TodoSelfResolverFragment$key } from "./TodoSelfResolverFragment.graphql"; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { FragmentType } from "relay-runtime"; +import type { LiveState, FragmentType } from "relay-runtime"; import {self as todoSelfResolverType} from "../TodoSelfResolver.js"; // Type assertion validating that `todoSelfResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (todoSelfResolverType: ( rootKey: TodoSelfResolverFragment$key, -) => LiveState); +) => LiveState); declare export opaque type TodoBlockedByResolverFragment$fragmentType: FragmentType; export type TodoBlockedByResolverFragment$data = {| - +self: ?$Call<$Call<((...empty[]) => R) => R, typeof todoSelfResolverType>["read"]>, + +self: ?ReturnType["read"]>, +$fragmentType: TodoBlockedByResolverFragment$fragmentType, |}; export type TodoBlockedByResolverFragment$key = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoCompleteResolverFragment.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoCompleteResolverFragment.graphql.js index 86855fce27d11..772221c1d9a93 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoCompleteResolverFragment.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoCompleteResolverFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<2144d1aeb2ede941021805fa1e517d3b>> + * @generated SignedSource<<06be9606e2d922971a1baa73de79fddb>> * @flow * @lightSyntaxTransform * @nogrep @@ -19,17 +19,16 @@ /*:: import type { Fragment, ReaderFragment } from 'relay-runtime'; import type { TodoSelfResolverFragment$key } from "./TodoSelfResolverFragment.graphql"; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { FragmentType } from "relay-runtime"; +import type { LiveState, FragmentType } from "relay-runtime"; import {self as todoSelfResolverType} from "../TodoSelfResolver.js"; // Type assertion validating that `todoSelfResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (todoSelfResolverType: ( rootKey: TodoSelfResolverFragment$key, -) => LiveState); +) => LiveState); declare export opaque type TodoCompleteResolverFragment$fragmentType: FragmentType; export type TodoCompleteResolverFragment$data = {| - +self: ?$Call<$Call<((...empty[]) => R) => R, typeof todoSelfResolverType>["read"]>, + +self: ?ReturnType["read"]>, +$fragmentType: TodoCompleteResolverFragment$fragmentType, |}; export type TodoCompleteResolverFragment$key = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoDescriptionStyle____relay_model_instance.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoDescriptionStyle____relay_model_instance.graphql.js new file mode 100644 index 0000000000000..87aadd3f9bd39 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoDescriptionStyle____relay_model_instance.graphql.js @@ -0,0 +1,61 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { TodoDescriptionStyle } from "../TodoDescription.js"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type TodoDescriptionStyle____relay_model_instance$fragmentType: FragmentType; +export type TodoDescriptionStyle____relay_model_instance$data = {| + +__relay_model_instance: TodoDescriptionStyle, + +$fragmentType: TodoDescriptionStyle____relay_model_instance$fragmentType, +|}; +export type TodoDescriptionStyle____relay_model_instance$key = { + +$data?: TodoDescriptionStyle____relay_model_instance$data, + +$fragmentSpreads: TodoDescriptionStyle____relay_model_instance$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "TodoDescriptionStyle____relay_model_instance", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__relay_model_instance", + "storageKey": null + } + ] + } + ], + "type": "TodoDescriptionStyle", + "abstractKey": null +}; + +module.exports = ((node/*: any*/)/*: Fragment< + TodoDescriptionStyle____relay_model_instance$fragmentType, + TodoDescriptionStyle____relay_model_instance$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql.js index 88e527d7706d2..0d7f2dd486e03 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription____relay_model_instance.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -22,7 +22,7 @@ import type { TodoDescription } from "../TodoDescription.js"; import type { FragmentType } from "relay-runtime"; declare export opaque type TodoDescription____relay_model_instance$fragmentType: FragmentType; export type TodoDescription____relay_model_instance$data = {| - +__relay_model_instance: ?TodoDescription, + +__relay_model_instance: TodoDescription, +$fragmentType: TodoDescription____relay_model_instance$fragmentType, |}; export type TodoDescription____relay_model_instance$key = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription_text_style.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription_text_style.graphql.js new file mode 100644 index 0000000000000..79eda6ca7b0db --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoDescription_text_style.graphql.js @@ -0,0 +1,77 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<084478a42918e37aabf6f3535c970f4b>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { TodoDescription____relay_model_instance$data } from "./TodoDescription____relay_model_instance.graphql"; +import type { FragmentType } from "relay-runtime"; +import {color as todoDescriptionColorResolverType} from "../TodoDescription.js"; +// Type assertion validating that `todoDescriptionColorResolverType` resolver is correctly implemented. +// A type error here indicates that the type signature of the resolver module is incorrect. +(todoDescriptionColorResolverType: ( + __relay_model_instance: TodoDescription____relay_model_instance$data['__relay_model_instance'], +) => ?mixed); +declare export opaque type TodoDescription_text_style$fragmentType: FragmentType; +export type TodoDescription_text_style$data = {| + +color: $NonMaybeType>, + +$fragmentType: TodoDescription_text_style$fragmentType, +|}; +export type TodoDescription_text_style$key = { + +$data?: TodoDescription_text_style$data, + +$fragmentSpreads: TodoDescription_text_style$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "TodoDescription_text_style", + "selections": [ + { + "kind": "RequiredField", + "field": { + "alias": null, + "args": null, + "fragment": { + "args": null, + "kind": "FragmentSpread", + "name": "TodoDescription____relay_model_instance" + }, + "kind": "RelayResolver", + "name": "color", + "resolverModule": require('relay-runtime/experimental').resolverDataInjector(require('./TodoDescription____relay_model_instance.graphql'), require('./../TodoDescription').color, '__relay_model_instance', true), + "path": "color" + }, + "action": "THROW", + "path": "color" + } + ], + "type": "TodoDescription", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "52e426266439c85da8dce2dda6133fe2"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + TodoDescription_text_style$fragmentType, + TodoDescription_text_style$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModelCapitalizedID.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModelCapitalizedID.graphql.js new file mode 100644 index 0000000000000..9d655762370c5 --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModelCapitalizedID.graphql.js @@ -0,0 +1,64 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<8560e5228aa392e8a499147a4649ee70>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type TodoModelCapitalizedID$fragmentType: FragmentType; +export type TodoModelCapitalizedID$data = {| + +id: string, + +$fragmentType: TodoModelCapitalizedID$fragmentType, +|}; +export type TodoModelCapitalizedID$key = { + +$data?: TodoModelCapitalizedID$data, + +$fragmentSpreads: TodoModelCapitalizedID$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "TodoModelCapitalizedID", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "TodoModel", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "3ccdcd3cdec1ef56528d736c7f311176"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + TodoModelCapitalizedID$fragmentType, + TodoModelCapitalizedID$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModelCapitalizedIDLegacy.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModelCapitalizedIDLegacy.graphql.js new file mode 100644 index 0000000000000..99258e868c06c --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModelCapitalizedIDLegacy.graphql.js @@ -0,0 +1,64 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<23a3fddb74208364d3a70a8a329b4537>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type TodoModelCapitalizedIDLegacy$fragmentType: FragmentType; +export type TodoModelCapitalizedIDLegacy$data = {| + +id: string, + +$fragmentType: TodoModelCapitalizedIDLegacy$fragmentType, +|}; +export type TodoModelCapitalizedIDLegacy$key = { + +$data?: TodoModelCapitalizedIDLegacy$data, + +$fragmentSpreads: TodoModelCapitalizedIDLegacy$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "TodoModelCapitalizedIDLegacy", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ] + } + ], + "type": "TodoModel", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "77e5adaeb6123b858f391b8f3442675c"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + TodoModelCapitalizedIDLegacy$fragmentType, + TodoModelCapitalizedIDLegacy$data, +>*/); diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql.js index 1a9a68b4744a2..2c5523fce0469 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModel____relay_model_instance.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<9772158c21f86b0aae601fcb550cc634>> + * @generated SignedSource<<5907090a31d5a7bf8e5b42c2376ef7cf>> * @flow * @lightSyntaxTransform * @nogrep @@ -19,8 +19,7 @@ /*:: import type { Fragment, ReaderFragment } from 'relay-runtime'; import type { TodoModel__id$data } from "./TodoModel__id.graphql"; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { FragmentType } from "relay-runtime"; +import type { LiveState, FragmentType } from "relay-runtime"; import {TodoModel as todoModelRelayModelInstanceResolverType} from "../TodoModel.js"; // Type assertion validating that `todoModelRelayModelInstanceResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. @@ -29,7 +28,7 @@ import {TodoModel as todoModelRelayModelInstanceResolverType} from "../TodoModel ) => LiveState); declare export opaque type TodoModel____relay_model_instance$fragmentType: FragmentType; export type TodoModel____relay_model_instance$data = {| - +__relay_model_instance: ?$Call<$Call<((...empty[]) => R) => R, typeof todoModelRelayModelInstanceResolverType>["read"]>, + +__relay_model_instance: $NonMaybeType["read"]>>, +$fragmentType: TodoModel____relay_model_instance$fragmentType, |}; export type TodoModel____relay_model_instance$key = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__fancy_description$normalization.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__fancy_description$normalization.graphql.js deleted file mode 100644 index f178c10d9e13a..0000000000000 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__fancy_description$normalization.graphql.js +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<2e2c61de46e38c2f8a80fc8387a3585e>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { NormalizationSplitOperation } from 'relay-runtime'; - -import type { TodoDescription } from "../TodoDescription.js"; -export type TodoModel__fancy_description$normalization = {| - +__relay_model_instance: ?TodoDescription, -|}; - -*/ - -var node/*: NormalizationSplitOperation*/ = { - "kind": "SplitOperation", - "metadata": {}, - "name": "TodoModel__fancy_description$normalization", - "selections": [ - { - "kind": "ClientExtension", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__relay_model_instance", - "storageKey": null - } - ] - } - ] -}; - -module.exports = node; diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__many_fancy_descriptions$normalization.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__many_fancy_descriptions$normalization.graphql.js deleted file mode 100644 index 05786197b2db0..0000000000000 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoModel__many_fancy_descriptions$normalization.graphql.js +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @oncall relay - * - * @generated SignedSource<<584a7fa65dd767305bedbe9bce359adf>> - * @flow - * @lightSyntaxTransform - * @nogrep - */ - -/* eslint-disable */ - -'use strict'; - -/*:: -import type { NormalizationSplitOperation } from 'relay-runtime'; - -import type { TodoDescription } from "../TodoDescription.js"; -export type TodoModel__many_fancy_descriptions$normalization = {| - +__relay_model_instance: ?TodoDescription, -|}; - -*/ - -var node/*: NormalizationSplitOperation*/ = { - "kind": "SplitOperation", - "metadata": {}, - "name": "TodoModel__many_fancy_descriptions$normalization", - "selections": [ - { - "kind": "ClientExtension", - "selections": [ - { - "alias": null, - "args": null, - "kind": "ScalarField", - "name": "__relay_model_instance", - "storageKey": null - } - ] - } - ] -}; - -module.exports = node; diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoTextResolverFragment.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoTextResolverFragment.graphql.js index cf6d92a930044..d15b8fce27908 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoTextResolverFragment.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/TodoTextResolverFragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<87ee8568fe0f1a1dc9fb8fe37fb86f54>> + * @generated SignedSource<<922430240481d7cdd5440ddf824c6772>> * @flow * @lightSyntaxTransform * @nogrep @@ -19,17 +19,16 @@ /*:: import type { Fragment, ReaderFragment } from 'relay-runtime'; import type { TodoSelfResolverFragment$key } from "./TodoSelfResolverFragment.graphql"; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { FragmentType } from "relay-runtime"; +import type { LiveState, FragmentType } from "relay-runtime"; import {self as todoSelfResolverType} from "../TodoSelfResolver.js"; // Type assertion validating that `todoSelfResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (todoSelfResolverType: ( rootKey: TodoSelfResolverFragment$key, -) => LiveState); +) => LiveState); declare export opaque type TodoTextResolverFragment$fragmentType: FragmentType; export type TodoTextResolverFragment$data = {| - +self: ?$Call<$Call<((...empty[]) => R) => R, typeof todoSelfResolverType>["read"]>, + +self: ?ReturnType["read"]>, +$fragmentType: TodoTextResolverFragment$fragmentType, |}; export type TodoTextResolverFragment$key = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserAlwaysThrowsTransitivelyResolver.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserAlwaysThrowsTransitivelyResolver.graphql.js index e587c676fd91e..d66441a6c9753 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserAlwaysThrowsTransitivelyResolver.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserAlwaysThrowsTransitivelyResolver.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -25,10 +25,10 @@ import {always_throws as userAlwaysThrowsResolverType} from "../UserAlwaysThrows // A type error here indicates that the type signature of the resolver module is incorrect. (userAlwaysThrowsResolverType: ( rootKey: UserAlwaysThrowsResolver$key, -) => mixed); +) => ?string); declare export opaque type UserAlwaysThrowsTransitivelyResolver$fragmentType: FragmentType; export type UserAlwaysThrowsTransitivelyResolver$data = {| - +always_throws: ?$Call<((...empty[]) => R) => R, typeof userAlwaysThrowsResolverType>, + +always_throws: ?string, +$fragmentType: UserAlwaysThrowsTransitivelyResolver$fragmentType, |}; export type UserAlwaysThrowsTransitivelyResolver$key = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserBestFriendShoutedGreetingResolver.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserBestFriendShoutedGreetingResolver.graphql.js index 338d125b51869..e19ea8fd9a07b 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserBestFriendShoutedGreetingResolver.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserBestFriendShoutedGreetingResolver.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<037c019c59698cc341904069b290c776>> * @flow * @lightSyntaxTransform * @nogrep @@ -25,14 +25,14 @@ import {greeting as userGreetingResolverType} from "../UserGreetingResolver.js"; // A type error here indicates that the type signature of the resolver module is incorrect. (userGreetingResolverType: ( rootKey: UserGreetingResolver$key, -) => mixed); +) => ?string); declare export opaque type UserBestFriendShoutedGreetingResolver$fragmentType: FragmentType; export type UserBestFriendShoutedGreetingResolver$data = {| +friends: ?{| +edges: ?$ReadOnlyArray((...empty[]) => R) => R, typeof userGreetingResolverType>, + +greeting: ?string, |}, |}>, |}, diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserConstantDependentResolver.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserConstantDependentResolver.graphql.js index b6609f30dcc28..a05e13bca0623 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserConstantDependentResolver.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserConstantDependentResolver.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -25,10 +25,10 @@ import {constant as userConstantResolverType} from "../UserConstantResolver.js"; // A type error here indicates that the type signature of the resolver module is incorrect. (userConstantResolverType: ( rootKey: UserConstantResolver$key, -) => mixed); +) => ?number); declare export opaque type UserConstantDependentResolver$fragmentType: FragmentType; export type UserConstantDependentResolver$data = {| - +constant: ?$Call<((...empty[]) => R) => R, typeof userConstantResolverType>, + +constant: ?number, +$fragmentType: UserConstantDependentResolver$fragmentType, |}; export type UserConstantDependentResolver$key = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserNameAndCounterSuspendsWhenOdd.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserNameAndCounterSuspendsWhenOdd.graphql.js index 2880be729be5f..72e301eb1db76 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserNameAndCounterSuspendsWhenOdd.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserNameAndCounterSuspendsWhenOdd.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<043d2d856d351546d18a2a871f2c29ed>> + * @generated SignedSource<<2cfb4a04b6dc7ebece41e36dfd9ba534>> * @flow * @lightSyntaxTransform * @nogrep @@ -19,17 +19,16 @@ /*:: import type { Fragment, ReaderFragment } from 'relay-runtime'; import type { CounterSuspendsWhenOdd$key } from "./CounterSuspendsWhenOdd.graphql"; -import type { LiveState } from "relay-runtime/store/experimental-live-resolvers/LiveResolverStore"; -import type { FragmentType } from "relay-runtime"; +import type { LiveState, FragmentType } from "relay-runtime"; import {counter_suspends_when_odd as queryCounterSuspendsWhenOddResolverType} from "../CounterSuspendsWhenOdd.js"; // Type assertion validating that `queryCounterSuspendsWhenOddResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. (queryCounterSuspendsWhenOddResolverType: ( rootKey: CounterSuspendsWhenOdd$key, -) => LiveState); +) => LiveState); declare export opaque type UserNameAndCounterSuspendsWhenOdd$fragmentType: FragmentType; export type UserNameAndCounterSuspendsWhenOdd$data = {| - +counter_suspends_when_odd: ?$Call<$Call<((...empty[]) => R) => R, typeof queryCounterSuspendsWhenOddResolverType>["read"]>, + +counter_suspends_when_odd: ?number, +me: ?{| +name: ?string, |}, diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserProfilePictureUriSuspendsWhenTheCounterIsOdd.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserProfilePictureUriSuspendsWhenTheCounterIsOdd.graphql.js index e519d36ea6737..c3393e9c8a768 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserProfilePictureUriSuspendsWhenTheCounterIsOdd.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserProfilePictureUriSuspendsWhenTheCounterIsOdd.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<7d7c22d5cbddf70142ec990fb92c1f89>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -26,7 +26,7 @@ import {greeting as userGreetingResolverType} from "../UserGreetingResolver.js"; // A type error here indicates that the type signature of the resolver module is incorrect. (userGreetingResolverType: ( rootKey: UserGreetingResolver$key, -) => mixed); +) => ?string); import {user_profile_picture_uri_with_scale as userUserProfilePictureUriWithScaleResolverType} from "../UserProfilePictureResolver.js"; // Type assertion validating that `userUserProfilePictureUriWithScaleResolverType` resolver is correctly implemented. // A type error here indicates that the type signature of the resolver module is incorrect. @@ -35,11 +35,11 @@ import {user_profile_picture_uri_with_scale as userUserProfilePictureUriWithScal args: {| scale: ?number, |}, -) => mixed); +) => ?string); declare export opaque type UserProfilePictureUriSuspendsWhenTheCounterIsOdd$fragmentType: FragmentType; export type UserProfilePictureUriSuspendsWhenTheCounterIsOdd$data = {| - +greeting: ?$Call<((...empty[]) => R) => R, typeof userGreetingResolverType>, - +uri: ?$Call<((...empty[]) => R) => R, typeof userUserProfilePictureUriWithScaleResolverType>, + +greeting: ?string, + +uri: ?string, +$fragmentType: UserProfilePictureUriSuspendsWhenTheCounterIsOdd$fragmentType, |}; export type UserProfilePictureUriSuspendsWhenTheCounterIsOdd$key = { @@ -76,7 +76,7 @@ var node/*: ReaderFragment*/ = { }, { "alias": "uri", - "args": null, + "args": [], "fragment": { "args": [ { diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserShoutedGreetingResolver.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserShoutedGreetingResolver.graphql.js index a1dbcee85df3b..8011946048e01 100644 --- a/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserShoutedGreetingResolver.graphql.js +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/UserShoutedGreetingResolver.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<2cc8134935e56665b110562006cf1846>> * @flow * @lightSyntaxTransform * @nogrep @@ -25,10 +25,10 @@ import {greeting as userGreetingResolverType} from "../UserGreetingResolver.js"; // A type error here indicates that the type signature of the resolver module is incorrect. (userGreetingResolverType: ( rootKey: UserGreetingResolver$key, -) => mixed); +) => ?string); declare export opaque type UserShoutedGreetingResolver$fragmentType: FragmentType; export type UserShoutedGreetingResolver$data = {| - +greeting: ?$Call<((...empty[]) => R) => R, typeof userGreetingResolverType>, + +greeting: ?string, +$fragmentType: UserShoutedGreetingResolver$fragmentType, |}; export type UserShoutedGreetingResolver$key = { diff --git a/packages/relay-runtime/store/__tests__/resolvers/__generated__/User__client_object$normalization.graphql.js b/packages/relay-runtime/store/__tests__/resolvers/__generated__/User__client_object$normalization.graphql.js new file mode 100644 index 0000000000000..4b5ca7165bfbf --- /dev/null +++ b/packages/relay-runtime/store/__tests__/resolvers/__generated__/User__client_object$normalization.graphql.js @@ -0,0 +1,48 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<22dc25b8f52894b80d06618f7066a3ce>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +export type User__client_object$normalization = {| + +description: ?string, +|}; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "kind": "SplitOperation", + "metadata": {}, + "name": "User__client_object$normalization", + "selections": [ + { + "kind": "ClientExtension", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "description", + "storageKey": null + } + ] + } + ] +}; + +module.exports = node; diff --git a/packages/relay-runtime/store/createRelayContext.js b/packages/relay-runtime/store/createRelayContext.js index e1bd06510675f..87074907d1a10 100644 --- a/packages/relay-runtime/store/createRelayContext.js +++ b/packages/relay-runtime/store/createRelayContext.js @@ -18,11 +18,11 @@ const invariant = require('invariant'); // Ideally, we'd just import the type of the react module, but this causes Flow // problems. -type React = { +type React = $ReadOnly<{ createContext: createContext, version: string, ... -}; +}>; let relayContext: ?React$Context; let firstReact: ?React; diff --git a/packages/relay-runtime/store/defaultRelayFieldLogger.js b/packages/relay-runtime/store/defaultRelayFieldLogger.js new file mode 100644 index 0000000000000..689992ef965c0 --- /dev/null +++ b/packages/relay-runtime/store/defaultRelayFieldLogger.js @@ -0,0 +1,24 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; + +import type {RelayFieldLogger} from './RelayStoreTypes'; + +const defaultRelayFieldLogger: RelayFieldLogger = event => { + if (__DEV__ && event.kind === 'missing_field.log') { + throw new Error( + 'Relay Environment Configuration Error (dev only): `@required(action: LOG)` requires that the Relay Environment be configured with a `relayFieldLogger`.', + ); + } +}; + +module.exports = defaultRelayFieldLogger; diff --git a/packages/relay-runtime/store/defaultRequiredFieldLogger.js b/packages/relay-runtime/store/defaultRequiredFieldLogger.js deleted file mode 100644 index 025a127194e92..0000000000000 --- a/packages/relay-runtime/store/defaultRequiredFieldLogger.js +++ /dev/null @@ -1,24 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; - -import type {RequiredFieldLogger} from './RelayStoreTypes'; - -const defaultRequiredFieldLogger: RequiredFieldLogger = event => { - if (__DEV__ && event.kind === 'missing_field.log') { - throw new Error( - 'Relay Environment Configuration Error (dev only): `@required(action: LOG)` requires that the Relay Environment be configured with a `requiredFieldLogger`.', - ); - } -}; - -module.exports = defaultRequiredFieldLogger; diff --git a/packages/relay-runtime/store/experimental-live-resolvers/LiveResolverCache.js b/packages/relay-runtime/store/experimental-live-resolvers/LiveResolverCache.js index 715e1209fb9da..c339bfd35fbdc 100644 --- a/packages/relay-runtime/store/experimental-live-resolvers/LiveResolverCache.js +++ b/packages/relay-runtime/store/experimental-live-resolvers/LiveResolverCache.js @@ -17,13 +17,11 @@ import type { ResolverNormalizationInfo, } from '../../util/ReaderNode'; import type {DataID, Variables} from '../../util/RelayRuntimeTypes'; -import type {NormalizationOptions} from '../RelayResponseNormalizer'; import type { DataIDSet, MutableRecordSource, Record, RecordSource, - RelayResolverError, SingularReaderSelector, Snapshot, } from '../RelayStoreTypes'; @@ -33,10 +31,12 @@ import type { ResolverCache, } from '../ResolverCache'; import type LiveResolverStore from './LiveResolverStore'; -import type {LiveState} from './LiveResolverStore'; +import type {LiveState} from 'relay-runtime'; const recycleNodesInto = require('../../util/recycleNodesInto'); const {RELAY_LIVE_RESOLVER} = require('../../util/RelayConcreteNode'); +const RelayFeatureFlags = require('../../util/RelayFeatureFlags'); +const shallowFreeze = require('../../util/shallowFreeze'); const {generateClientID, generateClientObjectClientID} = require('../ClientID'); const RelayModernRecord = require('../RelayModernRecord'); const {createNormalizationSelector} = require('../RelayModernSelector'); @@ -63,6 +63,7 @@ const RELAY_RESOLVER_LIVE_STATE_SUBSCRIPTION_KEY = const RELAY_RESOLVER_LIVE_STATE_VALUE = '__resolverLiveStateValue'; const RELAY_RESOLVER_LIVE_STATE_DIRTY = '__resolverLiveStateDirty'; const RELAY_RESOLVER_RECORD_TYPENAME = '__RELAY_RESOLVER__'; +const MODEL_PROPERTY_NAME = '__relay_model_instance'; /** * An experimental fork of store/ResolverCache.js intended to let us experiment @@ -115,7 +116,7 @@ class LiveResolverCache implements ResolverCache { ): [ ?T /* Answer */, ?DataID /* Seen record */, - ?RelayResolverError, + ?Error, ?Snapshot, ?DataID /* ID of record containing a suspended Live field */, ?DataIDSet /** Set of dirty records after read */, @@ -144,7 +145,7 @@ class LiveResolverCache implements ResolverCache { // Clean up any existing subscriptions before creating the new subscription // to avoid being double subscribed, or having a dangling subscription in // the event of an error during subscription. - this._maybeUnsubscribeFromLiveState(linkedRecord); + maybeUnsubscribeFromLiveState(linkedRecord); } linkedID = linkedID ?? generateClientID(recordID, storageKey); linkedRecord = RelayModernRecord.create( @@ -154,8 +155,19 @@ class LiveResolverCache implements ResolverCache { const evaluationResult = evaluate(); + RelayModernRecord.setValue( + linkedRecord, + RELAY_RESOLVER_SNAPSHOT_KEY, + evaluationResult.snapshot, + ); + RelayModernRecord.setValue( + linkedRecord, + RELAY_RESOLVER_ERROR_KEY, + evaluationResult.error, + ); + if (field.kind === RELAY_LIVE_RESOLVER) { - if (evaluationResult.resolverResult != undefined) { + if (evaluationResult.resolverResult != null) { if (__DEV__) { invariant( isLiveStateValue(evaluationResult.resolverResult), @@ -164,6 +176,10 @@ class LiveResolverCache implements ResolverCache { field.path, ); } + invariant( + evaluationResult.error == null, + 'Did not expect resolver to have both a value and an error.', + ); const liveState: LiveState = // $FlowFixMe[incompatible-type] - casting mixed evaluationResult.resolverResult; @@ -203,16 +219,7 @@ class LiveResolverCache implements ResolverCache { variables, ); } - RelayModernRecord.setValue( - linkedRecord, - RELAY_RESOLVER_SNAPSHOT_KEY, - evaluationResult.snapshot, - ); - RelayModernRecord.setValue( - linkedRecord, - RELAY_RESOLVER_ERROR_KEY, - evaluationResult.error, - ); + recordSource.set(linkedID, linkedRecord); // Link the resolver value record to the resolver field of the record being read: @@ -269,9 +276,9 @@ class LiveResolverCache implements ResolverCache { ); } - updatedDataIDs = this._setResolverValue( + updatedDataIDs = this._setLiveResolverValue( linkedRecord, - liveState.read(), + liveState, field, variables, ); @@ -290,9 +297,15 @@ class LiveResolverCache implements ResolverCache { const answer: T = this._getResolverValue(linkedRecord); // $FlowFixMe[incompatible-type] - casting mixed - const snapshot: ?Snapshot = linkedRecord[RELAY_RESOLVER_SNAPSHOT_KEY]; + const snapshot: ?Snapshot = RelayModernRecord.getValue( + linkedRecord, + RELAY_RESOLVER_SNAPSHOT_KEY, + ); // $FlowFixMe[incompatible-type] - casting mixed - const error: ?RelayResolverError = linkedRecord[RELAY_RESOLVER_ERROR_KEY]; + const error: ?Error = RelayModernRecord.getValue( + linkedRecord, + RELAY_RESOLVER_ERROR_KEY, + ); let suspenseID = null; @@ -326,18 +339,6 @@ class LiveResolverCache implements ResolverCache { }); } - _maybeUnsubscribeFromLiveState(linkedRecord: Record) { - // If there's an existing subscription, unsubscribe. - // $FlowFixMe[incompatible-type] - casting mixed - const previousUnsubscribe: () => void = RelayModernRecord.getValue( - linkedRecord, - RELAY_RESOLVER_LIVE_STATE_SUBSCRIPTION_KEY, - ); - if (previousUnsubscribe != null) { - previousUnsubscribe(); - } - } - // Register a new Live State object in the store, subscribing to future // updates. _setLiveStateValue( @@ -362,9 +363,9 @@ class LiveResolverCache implements ResolverCache { ); // Store the current value, for this read, and future cached reads. - const updatedDataIDs = this._setResolverValue( + const updatedDataIDs = this._setLiveResolverValue( linkedRecord, - liveState.read(), + liveState, field, variables, ); @@ -400,7 +401,12 @@ class LiveResolverCache implements ResolverCache { return; } - if (!(RELAY_RESOLVER_LIVE_STATE_VALUE in currentRecord)) { + if ( + !RelayModernRecord.hasValue( + currentRecord, + RELAY_RESOLVER_LIVE_STATE_VALUE, + ) + ) { warning( false, 'Unexpected callback for a incomplete live resolver record (__id: `%s`). The record has missing live state value. ' + @@ -467,6 +473,28 @@ class LiveResolverCache implements ResolverCache { } } + _setLiveResolverValue( + resolverRecord: Record, + liveValue: LiveState, + field: ReaderRelayResolver | ReaderRelayLiveResolver, + variables: Variables, + ): DataIDSet | null { + let value: null | mixed = null; + let resolverError: null | mixed = null; + try { + value = liveValue.read(); + } catch (e) { + resolverError = e; + } + + RelayModernRecord.setValue( + resolverRecord, + RELAY_RESOLVER_ERROR_KEY, + resolverError, + ); + return this._setResolverValue(resolverRecord, value, field, variables); + } + _setResolverValue( resolverRecord: Record, value: mixed, @@ -475,7 +503,11 @@ class LiveResolverCache implements ResolverCache { ): DataIDSet | null { const normalizationInfo = field.normalizationInfo; let updatedDataIDs = null; - if (value != null && normalizationInfo != null) { + if ( + value != null && + normalizationInfo != null && + !isSuspenseSentinel(value) + ) { let resolverValue: DataID | Array; const prevOutputTypeRecordIDs = getOutputTypeRecordIDs(resolverRecord); @@ -495,13 +527,15 @@ class LiveResolverCache implements ResolverCache { const nextSource = RelayRecordSource.create(); for (let ii = 0; ii < value.length; ii++) { const currentValue = value[ii]; + // TODO: T184433715 We currently break with the GraphQL spec and filter out null items in lists. if (currentValue == null) { continue; } invariant( - typeof currentValue == 'object', + typeof currentValue === 'object', '_setResolverValue: Expected object value as the payload for the @outputType resolver.', ); + // The `id` of the nested object (@outputType resolver) // is localized to it's resolver record. To ensure that // there is only one path to the records created from the @@ -514,14 +548,16 @@ class LiveResolverCache implements ResolverCache { RelayModernRecord.getDataID(resolverRecord), ii, ); - const source = normalizeOutputTypeValue( + + const source = this._normalizeOutputTypeValue( outputTypeDataID, currentValue, variables, normalizationInfo, - this._store.__getNormalizationOptions([field.path, String(ii)]), + [field.path, String(ii)], typename, ); + for (const recordID of source.getRecordIDs()) { // For plural case we'll keep adding the `item` records to the `nextSource` // so we can publish all of them at the same time: clean up all records, @@ -541,7 +577,7 @@ class LiveResolverCache implements ResolverCache { ); } else { invariant( - typeof value == 'object', + typeof value === 'object', '_setResolverValue: Expected object value as the payload for the @outputType resolver.', ); const typename = getConcreteTypename(normalizationInfo, value); @@ -550,12 +586,12 @@ class LiveResolverCache implements ResolverCache { typename, RelayModernRecord.getDataID(resolverRecord), ); - const nextSource = normalizeOutputTypeValue( + const nextSource = this._normalizeOutputTypeValue( outputTypeDataID, value, variables, normalizationInfo, - this._store.__getNormalizationOptions([field.path]), + [field.path], typename, ); for (const recordID of nextSource.getRecordIDs()) { @@ -578,12 +614,14 @@ class LiveResolverCache implements ResolverCache { nextOutputTypeRecordIDs, ); + shallowFreeze(resolverValue); RelayModernRecord.setValue( resolverRecord, RELAY_RESOLVER_VALUE_KEY, resolverValue, ); } else { + shallowFreeze(value); // For "classic" resolvers (or if the value is nullish), we are just setting their // value as is. RelayModernRecord.setValue( @@ -624,7 +662,7 @@ class LiveResolverCache implements ResolverCache { continue; } for (const anotherRecordID of recordSet) { - this._markInvalidatedResolverRecord(anotherRecordID, recordSource); + markInvalidatedResolverRecord(anotherRecordID, recordSource); if (!visited.has(anotherRecordID)) { recordsToVisit.push(anotherRecordID); } @@ -634,28 +672,6 @@ class LiveResolverCache implements ResolverCache { } } - _markInvalidatedResolverRecord( - dataID: DataID, - recordSource: MutableRecordSource, // Written to - ) { - const record = recordSource.get(dataID); - if (!record) { - warning( - false, - 'Expected a resolver record with ID %s, but it was missing.', - dataID, - ); - return; - } - const nextRecord = RelayModernRecord.clone(record); - RelayModernRecord.setValue( - nextRecord, - RELAY_RESOLVER_INVALIDATION_KEY, - true, - ); - recordSource.set(dataID, nextRecord); - } - _isInvalid( record: Record, getDataForResolverFragment: GetDataForResolverFragmentFn, @@ -684,9 +700,85 @@ class LiveResolverCache implements ResolverCache { if (recycled !== originalInputs) { return true; } + + if (RelayFeatureFlags.MARK_RESOLVER_VALUES_AS_CLEAN_AFTER_FRAGMENT_REREAD) { + // This record does not need to be recomputed, we can reuse the cached value. + // For subsequent reads we can mark this record as "clean" so that they will + // not need to re-read the fragment. + const nextRecord = RelayModernRecord.clone(record); + RelayModernRecord.setValue( + nextRecord, + RELAY_RESOLVER_INVALIDATION_KEY, + false, + ); + + const recordSource = this._getRecordSource(); + recordSource.set(RelayModernRecord.getDataID(record), nextRecord); + } + return false; } + // Returns a normalized version (RecordSource) of the @outputType, + // containing only "weak" records. + _normalizeOutputTypeValue( + outputTypeDataID: DataID, + value: {+[key: string]: mixed}, + variables: Variables, + normalizationInfo: ResolverNormalizationInfo, + fieldPath: Array, + typename: string, + ): RecordSource { + const source = RelayRecordSource.create(); + + switch (normalizationInfo.kind) { + case 'OutputType': { + const record = RelayModernRecord.create(outputTypeDataID, typename); + source.set(outputTypeDataID, record); + const selector = createNormalizationSelector( + normalizationInfo.normalizationNode, + outputTypeDataID, + variables, + ); + + const normalizationOptions = + this._store.__getNormalizationOptions(fieldPath); + // The resulted `source` is the normalized version of the + // resolver's (@outputType) value. + // All records in the `source` should have IDs that + // is "prefix-ed" with the parent resolver record `ID` + // and they don't expect to have a "strong" identifier. + return normalize( + source, + selector, + // normalize does not mutate values, but it's impractical to type this + // argument as readonly. For now we'll excuse ourselves and pass a + // read only type + // $FlowFixMe[incompatible-variance] + value, + normalizationOptions, + ).source; + } + // For weak models we have a simpler case. We simply need to update a + // single field on the record. + case 'WeakModel': { + const record = RelayModernRecord.create(outputTypeDataID, typename); + + RelayModernRecord.setValue(record, MODEL_PROPERTY_NAME, value); + + source.set(outputTypeDataID, record); + return source; + } + default: + (normalizationInfo.kind: empty); + invariant( + false, + 'LiveResolverCache: Unexpected normalization info kind `%s`.', + normalizationInfo.kind, + ); + } + } + // If a given record does not exist, creates an empty record consisting of // just an `id` field, along with a namespaced `__id` field and insert it into // the store. @@ -702,19 +794,10 @@ class LiveResolverCache implements ResolverCache { } unsubscribeFromLiveResolverRecords(invalidatedDataIDs: Set): void { - if (invalidatedDataIDs.size === 0) { - return; - } - - for (const dataID of invalidatedDataIDs) { - const record = this._getRecordSource().get(dataID); - if ( - record != null && - RelayModernRecord.getType(record) === RELAY_RESOLVER_RECORD_TYPENAME - ) { - this._maybeUnsubscribeFromLiveState(record); - } - } + return unsubscribeFromLiveResolverRecordsImpl( + this._getRecordSource(), + invalidatedDataIDs, + ); } // Given the set of possible invalidated DataID @@ -728,45 +811,13 @@ class LiveResolverCache implements ResolverCache { for (const dataID of invalidatedDataIDs) { const record = this._getRecordSource().get(dataID); - if ( - record != null && - RelayModernRecord.getType(record) === RELAY_RESOLVER_RECORD_TYPENAME - ) { + if (record != null && isResolverRecord(record)) { this._getRecordSource().delete(dataID); } } } } -// Returns a normalized version (RecordSource) of the @outputType, -// containing only "weak" records. -function normalizeOutputTypeValue( - outputTypeDataID: DataID, - value: {...}, - variables: Variables, - resolverNormalizationInfo: ResolverNormalizationInfo, - normalizationOptions: NormalizationOptions, - typename: string, -): RecordSource { - const source = RelayRecordSource.create(); - source.set( - outputTypeDataID, - RelayModernRecord.create(outputTypeDataID, typename), - ); - const selector = createNormalizationSelector( - resolverNormalizationInfo.normalizationNode, - outputTypeDataID, - variables, - ); - - // The resulted `source` is the normalized version of the - // resolver's (@outputType) value. - // All records in the `source` should have IDs that - // is "prefix-ed" with the parent resolver record `ID` - // and they don't expect to have a "strong" identifier. - return normalize(source, selector, value, normalizationOptions).source; -} - // Update the `currentSource` with the set of new records from the // resolver with @outputType. // This method will return a set of `updatedDataIDs` IDs. @@ -806,7 +857,11 @@ function updateCurrentSource( const updatedRecord = RelayModernRecord.update(currentRecord, nextRecord); if (updatedRecord !== currentRecord) { updatedDataIDs.add(recordID); - currentSource.set(recordID, nextRecord); + currentSource.set(recordID, updatedRecord); + // We also need to mark all linked records from the current record as invalidated, + // so that the next time these records are accessed in RelayReader, + // they will be re-read and re-evaluated by the LiveResolverCache and re-subscribed. + markInvalidatedLinkedResolverRecords(currentRecord, currentSource); } } else { currentSource.set(recordID, nextRecord); @@ -816,6 +871,91 @@ function updateCurrentSource( return updatedDataIDs; } +function getAllLinkedRecordIds(record: Record): DataIDSet { + const linkedRecordIDs = new Set(); + RelayModernRecord.getFields(record).forEach(field => { + if (RelayModernRecord.hasLinkedRecordID(record, field)) { + const linkedRecordID = RelayModernRecord.getLinkedRecordID(record, field); + if (linkedRecordID != null) { + linkedRecordIDs.add(linkedRecordID); + } + } else if (RelayModernRecord.hasLinkedRecordIDs(record, field)) { + RelayModernRecord.getLinkedRecordIDs(record, field)?.forEach( + linkedRecordID => { + if (linkedRecordID != null) { + linkedRecordIDs.add(linkedRecordID); + } + }, + ); + } + }); + + return linkedRecordIDs; +} + +function markInvalidatedResolverRecord( + dataID: DataID, + recordSource: MutableRecordSource, // Written to +) { + const record = recordSource.get(dataID); + if (!record) { + warning( + false, + 'Expected a resolver record with ID %s, but it was missing.', + dataID, + ); + return; + } + const nextRecord = RelayModernRecord.clone(record); + RelayModernRecord.setValue(nextRecord, RELAY_RESOLVER_INVALIDATION_KEY, true); + recordSource.set(dataID, nextRecord); +} + +function markInvalidatedLinkedResolverRecords( + record: Record, + recordSource: MutableRecordSource, +): void { + const currentLinkedDataIDs = getAllLinkedRecordIds(record); + for (const recordID of currentLinkedDataIDs) { + const record = recordSource.get(recordID); + if (record != null && isResolverRecord(record)) { + markInvalidatedResolverRecord(recordID, recordSource); + } + } +} + +function unsubscribeFromLiveResolverRecordsImpl( + recordSource: RecordSource, + invalidatedDataIDs: $ReadOnlySet, +): void { + if (invalidatedDataIDs.size === 0) { + return; + } + + for (const dataID of invalidatedDataIDs) { + const record = recordSource.get(dataID); + if (record != null && isResolverRecord(record)) { + maybeUnsubscribeFromLiveState(record); + } + } +} + +function isResolverRecord(record: Record): boolean { + return RelayModernRecord.getType(record) === RELAY_RESOLVER_RECORD_TYPENAME; +} + +function maybeUnsubscribeFromLiveState(linkedRecord: Record): void { + // If there's an existing subscription, unsubscribe. + // $FlowFixMe[incompatible-type] - casting mixed + const previousUnsubscribe: () => void = RelayModernRecord.getValue( + linkedRecord, + RELAY_RESOLVER_LIVE_STATE_SUBSCRIPTION_KEY, + ); + if (previousUnsubscribe != null) { + previousUnsubscribe(); + } +} + function expectRecord(source: RecordSource, recordID: DataID): Record { const record = source.get(recordID); invariant( @@ -853,4 +993,5 @@ function getConcreteTypename( module.exports = { LiveResolverCache, getUpdatedDataIDs, + RELAY_RESOLVER_LIVE_STATE_SUBSCRIPTION_KEY, }; diff --git a/packages/relay-runtime/store/experimental-live-resolvers/LiveResolverStore.js b/packages/relay-runtime/store/experimental-live-resolvers/LiveResolverStore.js index 37ae14b044ac2..4ce54a606a471 100644 --- a/packages/relay-runtime/store/experimental-live-resolvers/LiveResolverStore.js +++ b/packages/relay-runtime/store/experimental-live-resolvers/LiveResolverStore.js @@ -23,8 +23,6 @@ import type { OperationAvailability, OperationDescriptor, OperationLoader, - ReactFlightPayloadDeserializer, - ReactFlightServerErrorHandler, RecordSource, RequestDescriptor, Scheduler, @@ -47,17 +45,18 @@ const RelayModernRecord = require('../RelayModernRecord'); const RelayOptimisticRecordSource = require('../RelayOptimisticRecordSource'); const RelayReader = require('../RelayReader'); const RelayReferenceMarker = require('../RelayReferenceMarker'); -const RelayStoreReactFlightUtils = require('../RelayStoreReactFlightUtils'); const RelayStoreSubscriptions = require('../RelayStoreSubscriptions'); const RelayStoreUtils = require('../RelayStoreUtils'); const {ROOT_ID, ROOT_TYPE} = require('../RelayStoreUtils'); -const {LiveResolverCache, getUpdatedDataIDs} = require('./LiveResolverCache'); +const { + LiveResolverCache, + RELAY_RESOLVER_LIVE_STATE_SUBSCRIPTION_KEY, + getUpdatedDataIDs, +} = require('./LiveResolverCache'); const invariant = require('invariant'); -export type LiveState<+T> = { - read(): T, - subscribe(cb: () => void): () => void, -}; +// Provided for backward compatibility. Prefer using the version exported from 'relay-runtime'. +export type {LiveState} from '../RelayStoreTypes'; // HACK // The type of Store is defined using an opaque type that only RelayModernStore @@ -111,8 +110,6 @@ class LiveResolverStore implements Store { _updatedRecordIDs: DataIDSet; _actorIdentifier: ?ActorIdentifier; _treatMissingFieldsAsNull: boolean; - _reactFlightPayloadDeserializer: ?ReactFlightPayloadDeserializer; - _reactFlightServerErrorHandler: ?ReactFlightServerErrorHandler; _shouldProcessClientComponents: boolean; constructor( @@ -125,8 +122,6 @@ class LiveResolverStore implements Store { log?: ?LogFunction, operationLoader?: ?OperationLoader, queryCacheExpirationTime?: ?number, - reactFlightPayloadDeserializer?: ?ReactFlightPayloadDeserializer, - reactFlightServerErrorHandler?: ?ReactFlightServerErrorHandler, shouldProcessClientComponents?: ?boolean, treatMissingFieldsAsNull?: ?boolean, }, @@ -170,10 +165,6 @@ class LiveResolverStore implements Store { this._updatedRecordIDs = new Set(); this._treatMissingFieldsAsNull = options?.treatMissingFieldsAsNull ?? false; this._actorIdentifier = options?.actorIdentifier; - this._reactFlightPayloadDeserializer = - options?.reactFlightPayloadDeserializer; - this._reactFlightServerErrorHandler = - options?.reactFlightServerErrorHandler; this._shouldProcessClientComponents = options?.shouldProcessClientComponents ?? false; @@ -212,7 +203,16 @@ class LiveResolverStore implements Store { * fluxStore.dispatch = wrapped; */ batchLiveStateUpdates(callback: () => void) { - this._resolverCache.batchLiveStateUpdates(callback); + if (this.__log != null) { + this.__log({name: 'liveresolver.batch.start'}); + } + try { + this._resolverCache.batchLiveStateUpdates(callback); + } finally { + if (this.__log != null) { + this.__log({name: 'liveresolver.batch.end'}); + } + } } check( @@ -708,6 +708,17 @@ class LiveResolverStore implements Store { for (let ii = 0; ii < storeIDs.length; ii++) { const dataID = storeIDs[ii]; if (!references.has(dataID)) { + const record = this._recordSource.get(dataID); + if (record != null) { + const maybeResolverSubscription = RelayModernRecord.getValue( + record, + RELAY_RESOLVER_LIVE_STATE_SUBSCRIPTION_KEY, + ); + if (maybeResolverSubscription != null) { + // $FlowFixMe - this value if it is not null, it is a function + maybeResolverSubscription(); + } + } this._recordSource.remove(dataID); } } @@ -724,8 +735,6 @@ class LiveResolverStore implements Store { path, getDataID: this._getDataID, treatMissingFieldsAsNull: this._treatMissingFieldsAsNull, - reactFlightPayloadDeserializer: this._reactFlightPayloadDeserializer, - reactFlightServerErrorHandler: this._reactFlightServerErrorHandler, shouldProcessClientComponents: this._shouldProcessClientComponents, actorIdentifier: this._actorIdentifier, }; @@ -821,15 +830,7 @@ function updateTargetFromSource( } } if (sourceRecord && targetRecord) { - // ReactFlightClientResponses are lazy and only materialize when readRoot - // is called when we read the field, so if the record is a Flight field - // we always use the new record's data regardless of whether - // it actually changed. Let React take care of reconciliation instead. - const nextRecord = - RelayModernRecord.getType(targetRecord) === - RelayStoreReactFlightUtils.REACT_FLIGHT_TYPE_NAME - ? sourceRecord - : RelayModernRecord.update(targetRecord, sourceRecord); + const nextRecord = RelayModernRecord.update(targetRecord, sourceRecord); if (nextRecord !== targetRecord) { // Prevent mutation of a record from outside the store. if (__DEV__) { diff --git a/packages/relay-runtime/store/experimental-live-resolvers/isLiveStateValue.js b/packages/relay-runtime/store/experimental-live-resolvers/isLiveStateValue.js index 838224a6819a5..31156322a83b8 100644 --- a/packages/relay-runtime/store/experimental-live-resolvers/isLiveStateValue.js +++ b/packages/relay-runtime/store/experimental-live-resolvers/isLiveStateValue.js @@ -11,11 +11,17 @@ 'use strict'; -module.exports = function isLiveStateValue(v: mixed): boolean { +import type {LiveState} from '../RelayStoreTypes'; + +declare function isLiveStateValue>(v: mixed): v is U; + +function isLiveStateValue(v: mixed) { return ( v != null && typeof v === 'object' && typeof v.read === 'function' && typeof v.subscribe === 'function' ); -}; +} + +module.exports = isLiveStateValue; diff --git a/packages/relay-runtime/store/experimental-live-resolvers/resolverDataInjector.js b/packages/relay-runtime/store/experimental-live-resolvers/resolverDataInjector.js index 1ea8b4c9b57a6..09521911a7df0 100644 --- a/packages/relay-runtime/store/experimental-live-resolvers/resolverDataInjector.js +++ b/packages/relay-runtime/store/experimental-live-resolvers/resolverDataInjector.js @@ -6,6 +6,7 @@ * * @flow strict-local * @format + * @oncall relay */ 'use strict'; @@ -16,9 +17,11 @@ import type {FragmentType} from '../RelayStoreTypes'; const {readFragment} = require('../ResolverFragments'); const invariant = require('invariant'); +type ResolverFn = ($FlowFixMe, ?$FlowFixMe) => mixed; + /** * - * This a High order function that returns a relay resolver that can read the data for + * This a higher order function that returns a relay resolver that can read the data for * the fragment`. * * - fragment: contains fragment Reader AST with resolver's data dependencies. @@ -28,16 +31,15 @@ const invariant = require('invariant'); * This will not call the `resolverFn` if the fragment data for it is null/undefined. * The the compiler generates calls to this function, ensuring the correct set of arguments. */ -function resolverDataInjector< - TFragmentType: FragmentType, - TData: ?{...}, - TResolverFn: ($FlowFixMe, ?$FlowFixMe) => mixed, ->( +function resolverDataInjector( fragment: Fragment, - resolverFn: TResolverFn, + // Resolvers have their own type assertions, we don't want to confuse users + // with a type error in their generated code at this point. + _resolverFn: $FlowFixMe, fieldName?: string, isRequiredField?: boolean, ): (fragmentKey: TFragmentType, args: mixed) => mixed { + const resolverFn: ResolverFn = _resolverFn; return (fragmentKey: TFragmentType, args: mixed): mixed => { const data = readFragment(fragment, fragmentKey); if (fieldName != null) { @@ -45,8 +47,9 @@ function resolverDataInjector< if (isRequiredField === true) { invariant( false, - 'Expected required resolver field `%` to be present. But resolvers fragment data is null/undefined.', + 'Expected required resolver field `%s` in fragment `%s` to be present. But resolvers fragment data is null/undefined.', fieldName, + fragment.name, ); } else { return resolverFn(null, args); @@ -58,16 +61,25 @@ function resolverDataInjector< if (fieldName in data) { if (isRequiredField === true) { invariant( + // $FlowFixMe[invalid-computed-prop] data[fieldName] != null, - 'Expected required resolver field `%` to be non-null.', + 'Expected required resolver field `%s` in fragment `%s` to be non-null.', + fieldName, + fragment.name, ); } + // $FlowFixMe[invalid-computed-prop] return resolverFn(data[fieldName], args); } else { // If both `data` and `fieldName` is available, we expect the // `fieldName` field in the `data` object. - invariant(false, 'Missing field `%` in resolver response.', fieldName); + invariant( + false, + 'Missing field `%s` in fragment `%s` in resolver response.', + fieldName, + fragment.name, + ); } } else { // By default we will pass the full set of the fragment data to the resolver diff --git a/packages/relay-runtime/store/experimental-live-resolvers/weakObjectWrapper.js b/packages/relay-runtime/store/experimental-live-resolvers/weakObjectWrapper.js deleted file mode 100644 index ebd5696c42520..0000000000000 --- a/packages/relay-runtime/store/experimental-live-resolvers/weakObjectWrapper.js +++ /dev/null @@ -1,75 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - */ - -'use strict'; - -const isLiveStateValue = require('./isLiveStateValue'); -const invariant = require('invariant'); - -/** - * Wrap the return `value` of the @live resolver that return @weak - * object into {`key`: `value`} object. - */ -function weakObjectWrapperLive( - resolverFn: (key: TKey, args?: TArgs) => mixed, - key: string, - isPlural: boolean, -): (key: TKey, args?: TArgs) => mixed { - return (...args) => { - const liveState = resolverFn.apply(null, args); - invariant( - isLiveStateValue(liveState), - 'Resolver is expected to return a LiveState value.', - ); - return { - ...liveState, - read: weakObjectWrapper( - () => { - return (liveState: $FlowFixMe).read(); - }, - key, - isPlural, - ), - }; - }; -} - -/** - * Wrap the return `value` of the resolver that return @weak - * object into {`key`: `value`} object. - */ -function weakObjectWrapper( - resolverFn: (key: TKey, args?: TArgs) => mixed, - key: string, - isPlural: boolean, -): (key: TKey, args?: TArgs) => mixed { - return (...args) => { - const data = resolverFn.apply(null, args); - if (isPlural) { - invariant( - Array.isArray(data), - 'Resolver is expected to return a plural value.', - ); - - return data.map(item => ({ - [key]: item, - })); - } else { - return { - [key]: data, - }; - } - }; -} - -module.exports = { - weakObjectWrapperLive, - weakObjectWrapper, -}; diff --git a/packages/relay-runtime/store/hasSignificantOverlappingIDs.js b/packages/relay-runtime/store/hasSignificantOverlappingIDs.js new file mode 100644 index 0000000000000..e8285204c5d84 --- /dev/null +++ b/packages/relay-runtime/store/hasSignificantOverlappingIDs.js @@ -0,0 +1,38 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; + +import type {DataIDSet} from './RelayStoreTypes'; + +const {ROOT_ID} = require('./RelayStoreUtils'); +const {VIEWER_ID} = require('./ViewerPattern'); + +const ITERATOR_KEY = Symbol.iterator; + +function hasSignificantOverlappingIDs( + seenRecords: DataIDSet, + updatedRecordIDs: DataIDSet, +): boolean { + // $FlowFixMe[incompatible-use]: Set is an iterable type, accessing its iterator is allowed. + const iterator = seenRecords[ITERATOR_KEY](); + let next = iterator.next(); + while (!next.done) { + const key = next.value; + if (updatedRecordIDs.has(key) && key !== ROOT_ID && key !== VIEWER_ID) { + return true; + } + next = iterator.next(); + } + return false; +} + +module.exports = hasSignificantOverlappingIDs; diff --git a/packages/relay-runtime/store/normalizeRelayPayload.js b/packages/relay-runtime/store/normalizeRelayPayload.js deleted file mode 100644 index 1a18f9ad42e4f..0000000000000 --- a/packages/relay-runtime/store/normalizeRelayPayload.js +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Copyright (c) Meta Platforms, Inc. and affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - * - * @flow strict-local - * @format - * @oncall relay - */ - -'use strict'; - -import type {PayloadData, PayloadError} from '../network/RelayNetworkTypes'; -import type {NormalizationOptions} from './RelayResponseNormalizer'; -import type { - NormalizationSelector, - RelayResponsePayload, -} from './RelayStoreTypes'; - -const RelayModernRecord = require('./RelayModernRecord'); -const RelayRecordSource = require('./RelayRecordSource'); -const RelayResponseNormalizer = require('./RelayResponseNormalizer'); -const {ROOT_TYPE} = require('./RelayStoreUtils'); - -function normalizeRelayPayload( - selector: NormalizationSelector, - payload: PayloadData, - errors: ?Array, - options: NormalizationOptions, -): RelayResponsePayload { - const source = RelayRecordSource.create(); - source.set( - selector.dataID, - RelayModernRecord.create(selector.dataID, ROOT_TYPE), - ); - const relayPayload = RelayResponseNormalizer.normalize( - source, - selector, - payload, - options, - ); - return { - ...relayPayload, - errors, - }; -} - -module.exports = normalizeRelayPayload; diff --git a/packages/relay-runtime/store/normalizeResponse.js b/packages/relay-runtime/store/normalizeResponse.js new file mode 100644 index 0000000000000..4f89a47278797 --- /dev/null +++ b/packages/relay-runtime/store/normalizeResponse.js @@ -0,0 +1,48 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict-local + * @format + * @oncall relay + */ + +'use strict'; + +import type {GraphQLResponseWithData} from '../network/RelayNetworkTypes'; +import type {NormalizationOptions} from './RelayResponseNormalizer'; +import type { + NormalizationSelector, + RelayResponsePayload, +} from './RelayStoreTypes'; + +import RelayModernRecord from './RelayModernRecord'; +import RelayRecordSource from './RelayRecordSource'; +import RelayResponseNormalizer from './RelayResponseNormalizer'; + +function normalizeResponse( + response: GraphQLResponseWithData, + selector: NormalizationSelector, + typeName: string, + options: NormalizationOptions, +): RelayResponsePayload { + const {data, errors} = response; + const source = RelayRecordSource.create(); + const record = RelayModernRecord.create(selector.dataID, typeName); + source.set(selector.dataID, record); + const relayPayload = RelayResponseNormalizer.normalize( + source, + selector, + data, + options, + errors, + ); + return { + ...relayPayload, + isFinal: response.extensions?.is_final === true, + }; +} + +module.exports = normalizeResponse; diff --git a/packages/relay-runtime/store/readInlineData.js b/packages/relay-runtime/store/readInlineData.js index 260c0343b3e02..764f209e03753 100644 --- a/packages/relay-runtime/store/readInlineData.js +++ b/packages/relay-runtime/store/readInlineData.js @@ -33,8 +33,8 @@ declare function readInlineData( ): TData; declare function readInlineData( fragment: InlineFragment, - key: null | void, -): null | void; + key: ?HasSpread, +): ?TData; function readInlineData( fragment: GraphQLTaggedNode, diff --git a/packages/relay-runtime/subscription/__tests__/__generated__/requestSubscriptionTestSubscription.graphql.js b/packages/relay-runtime/subscription/__tests__/__generated__/requestSubscriptionTestSubscription.graphql.js index f2332c8a5a3ed..f9ab23b58fe9e 100644 --- a/packages/relay-runtime/subscription/__tests__/__generated__/requestSubscriptionTestSubscription.graphql.js +++ b/packages/relay-runtime/subscription/__tests__/__generated__/requestSubscriptionTestSubscription.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<8d7a88db11028c9923cf70cd19641ea2>> * @flow * @lightSyntaxTransform * @nogrep @@ -74,9 +74,7 @@ v3 = [ { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer" - ] + "value": "1AwQS7" } ], v4 = { @@ -149,7 +147,7 @@ return { "selections": [ (v4/*: any*/) ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"1AwQS7\")" } ], "storageKey": null @@ -207,7 +205,7 @@ return { (v5/*: any*/), (v4/*: any*/) ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"1AwQS7\")" }, (v6/*: any*/) ], diff --git a/packages/relay-runtime/subscription/__tests__/requestSubscription-test.js b/packages/relay-runtime/subscription/__tests__/requestSubscription-test.js index 174f644c9da12..3aed57af6c441 100644 --- a/packages/relay-runtime/subscription/__tests__/requestSubscription-test.js +++ b/packages/relay-runtime/subscription/__tests__/requestSubscription-test.js @@ -380,10 +380,13 @@ describe('requestSubscription-test', () => { config: { name: 'Mark', __id: expect.any(String), - __fragments: {requestSubscriptionTestExtraFragment: {}}, + __fragments: { + requestSubscriptionTestExtraFragment: { + // TODO T96653810: Correctly detect reading from root of mutation/subscription + $isWithinUnmatchedTypeRefinement: true, // should be false + }, + }, __fragmentOwner: expect.any(Object), - // TODO T96653810: Correctly detect reading from root of mutation/subscription - __isWithinUnmatchedTypeRefinement: true, // should be false }, }, }); @@ -421,9 +424,12 @@ describe('requestSubscription-test', () => { config: { name: 'Zuck', __id: expect.any(String), - __fragments: {requestSubscriptionTestExtraFragment: {}}, + __fragments: { + requestSubscriptionTestExtraFragment: { + $isWithinUnmatchedTypeRefinement: true, + }, + }, __fragmentOwner: expect.any(Object), - __isWithinUnmatchedTypeRefinement: true, }, }, }); diff --git a/packages/relay-runtime/util/NormalizationNode.js b/packages/relay-runtime/util/NormalizationNode.js index 9cec34e328ac5..c2f190bde9136 100644 --- a/packages/relay-runtime/util/NormalizationNode.js +++ b/packages/relay-runtime/util/NormalizationNode.js @@ -11,6 +11,7 @@ 'use strict'; +import type {ResolverFunction, ResolverModule} from './ReaderNode'; import type {ConcreteRequest} from './RelayConcreteNode'; import type {JSResourceReference} from 'JSResourceReference'; @@ -78,7 +79,7 @@ export type NormalizationClientExtension = { export type NormalizationField = | NormalizationResolverField - | NormalizationFlightField + | NormalizationLiveResolverField | NormalizationScalarField | NormalizationLinkedField; @@ -164,27 +165,45 @@ export type NormalizationScalarField = { +storageKey?: ?string, }; -export type NormalizationFlightField = { - +kind: 'FlightField', - +alias: ?string, - +name: string, - +args: ?$ReadOnlyArray, - +storageKey: ?string, +export type ResolverReference = { + +fieldType: string, + +resolverFunctionName: string, +}; + +export type ResolverInfo = { + +resolverFunction: ResolverFunction, + +rootFragment?: ?NormalizationSplitOperation, }; +type ResolverData = + | {+resolverModule?: ResolverModule} + | {+resolverReference?: ResolverReference} + | {+resolverInfo?: ResolverInfo}; + export type NormalizationResolverField = { +kind: 'RelayResolver', +name: string, - +args: ?$ReadOnlyArray, - +fragment: ?NormalizationInlineFragment, - +storageKey: ?string, + +args?: ?$ReadOnlyArray, + +fragment?: ?NormalizationInlineFragment, + +storageKey?: ?string, + +isOutputType: boolean, + ...ResolverData, +}; + +export type NormalizationLiveResolverField = { + +kind: 'RelayLiveResolver', + +name: string, + +args?: ?$ReadOnlyArray, + +fragment?: ?NormalizationInlineFragment, + +storageKey?: ?string, +isOutputType: boolean, + ...ResolverData, }; export type NormalizationClientEdgeToClientObject = { +kind: 'ClientEdgeToClientObject', +linkedField: NormalizationLinkedField, - +backingField: NormalizationResolverField, + +backingField: NormalizationResolverField | NormalizationLiveResolverField, }; export type NormalizationClientComponent = { @@ -205,7 +224,6 @@ export type NormalizationSelection = | NormalizationClientEdgeToClientObject | NormalizationDefer | NormalizationField - | NormalizationFlightField | NormalizationFragmentSpread | NormalizationHandle | NormalizationInlineFragment @@ -218,7 +236,7 @@ export type NormalizationSplitOperation = { +argumentDefinitions?: $ReadOnlyArray, +kind: 'SplitOperation', +name: string, - +metadata: ?{+[key: string]: mixed, ...}, + +metadata?: ?{+[key: string]: mixed, ...}, +selections: $ReadOnlyArray, }; diff --git a/packages/relay-runtime/util/ReaderNode.js b/packages/relay-runtime/util/ReaderNode.js index c625c25b1a783..3b6c625e1bb92 100644 --- a/packages/relay-runtime/util/ReaderNode.js +++ b/packages/relay-runtime/util/ReaderNode.js @@ -33,7 +33,7 @@ export type ReaderInlineDataFragmentSpread = { +kind: 'InlineDataFragmentSpread', +name: string, +selections: $ReadOnlyArray, - +args: ?$ReadOnlyArray, + +args?: ?$ReadOnlyArray, +argumentDefinitions: $ReadOnlyArray, }; @@ -44,6 +44,7 @@ export type ReaderFragment = { +abstractKey?: ?string, +metadata?: ?{ +connection?: $ReadOnlyArray, + +throwOnFieldError?: boolean, +hasClientEdges?: boolean, +mask?: boolean, +plural?: boolean, @@ -76,11 +77,16 @@ export type ReaderPaginationFragment = { }, }; +export type RefetchableIdentifierInfo = { + +identifierField: string, + +identifierQueryVariableName: string, +}; + export type ReaderRefetchMetadata = { +connection?: ?ReaderPaginationMetadata, +operation: string | ConcreteRequest, +fragmentPathInResult: Array, - +identifierField?: ?string, + +identifierInfo?: ?RefetchableIdentifierInfo, }; // Stricter form of ConnectionMetadata @@ -135,7 +141,7 @@ export type ReaderRootArgument = { export type ReaderInlineFragment = { +kind: 'InlineFragment', +selections: $ReadOnlyArray, - +type: string, + +type: ?string, +abstractKey?: ?string, }; @@ -212,14 +218,6 @@ export type ReaderScalarField = { +storageKey?: ?string, }; -export type ReaderFlightField = { - +kind: 'FlightField', - +alias: ?string, - +name: string, - +args: ?$ReadOnlyArray, - +storageKey: ?string, -}; - export type ReaderDefer = { +kind: 'Defer', +selections: $ReadOnlyArray, @@ -234,30 +232,47 @@ export type RequiredFieldAction = 'NONE' | 'LOG' | 'THROW'; export type ReaderRequiredField = { +kind: 'RequiredField', - +field: - | ReaderField - | ReaderClientEdgeToClientObject - | ReaderClientEdgeToServerObject, + +field: ReaderField | ReaderClientEdge, +action: RequiredFieldAction, +path: string, }; -type ResolverFunction = (...args: Array) => mixed; // flowlint-line unclear-type:off +export type CatchFieldTo = 'RESULT' | 'NULL'; + +export type ReaderCatchField = { + +kind: 'CatchField', + +field: ReaderField | ReaderClientEdge, + +to: CatchFieldTo, + +path: string, +}; + +export type ResolverFunction = (...args: Array) => mixed; // flowlint-line unclear-type:off // With ES6 imports, a resolver function might be exported under the `default` key. -type ResolverModule = ResolverFunction | {default: ResolverFunction}; +export type ResolverModule = ResolverFunction | {default: ResolverFunction}; + +export type ResolverNormalizationInfo = + | ResolverOutputTypeNormalizationInfo + | ResolverWeakModelNormalizationInfo; -export type ResolverNormalizationInfo = { +export type ResolverOutputTypeNormalizationInfo = { + +kind: 'OutputType', +concreteType: string | null, +plural: boolean, +normalizationNode: NormalizationSelectableNode, }; +export type ResolverWeakModelNormalizationInfo = { + +kind: 'WeakModel', + +concreteType: string | null, + +plural: boolean, +}; + export type ReaderRelayResolver = { +kind: 'RelayResolver', - +alias: ?string, + +alias?: ?string, +name: string, - +args: ?$ReadOnlyArray, - +fragment: ?ReaderFragmentSpread, + +args?: ?$ReadOnlyArray, + +fragment?: ?ReaderFragmentSpread, +path: string, +resolverModule: ResolverModule, +normalizationInfo?: ResolverNormalizationInfo, @@ -265,10 +280,10 @@ export type ReaderRelayResolver = { export type ReaderRelayLiveResolver = { +kind: 'RelayLiveResolver', - +alias: ?string, + +alias?: ?string, +name: string, - +args: ?$ReadOnlyArray, - +fragment: ?ReaderFragmentSpread, + +args?: ?$ReadOnlyArray, + +fragment?: ?ReaderFragmentSpread, +path: string, +resolverModule: ResolverModule, +normalizationInfo?: ResolverNormalizationInfo, @@ -277,6 +292,9 @@ export type ReaderRelayLiveResolver = { export type ReaderClientEdgeToClientObject = { +kind: 'ClientEdgeToClientObject', +concreteType: string | null, + +modelResolvers: { + [string]: ReaderRelayResolver | ReaderRelayLiveResolver, + } | null, +linkedField: ReaderLinkedField, +backingField: | ReaderRelayResolver @@ -294,15 +312,17 @@ export type ReaderClientEdgeToServerObject = { | ReaderClientExtension, }; +export type ReaderClientEdge = + | ReaderClientEdgeToClientObject + | ReaderClientEdgeToServerObject; + export type ReaderSelection = | ReaderCondition - | ReaderClientEdgeToClientObject - | ReaderClientEdgeToServerObject + | ReaderClientEdge | ReaderClientExtension | ReaderDefer | ReaderField | ReaderActorChange - | ReaderFlightField | ReaderFragmentSpread | ReaderAliasedFragmentSpread | ReaderInlineDataFragmentSpread @@ -310,6 +330,7 @@ export type ReaderSelection = | ReaderInlineFragment | ReaderModuleImport | ReaderStream + | ReaderCatchField | ReaderRequiredField | ReaderRelayResolver; diff --git a/packages/relay-runtime/util/RelayConcreteNode.js b/packages/relay-runtime/util/RelayConcreteNode.js index 97e25f8edb1d7..a6d900f3989c6 100644 --- a/packages/relay-runtime/util/RelayConcreteNode.js +++ b/packages/relay-runtime/util/RelayConcreteNode.js @@ -43,14 +43,14 @@ export type ProvidedVariablesType = {+[key: string]: {get(): mixed}}; /** * Contains the parameters required for executing a GraphQL request. - * The operation can either be provided as a persisted `id` or `text`. If given - * in `text` format, a `cacheID` as a hash of the text should be set to be used - * for local caching. + * The operation can either be provided as a persisted `id` or `text` or both. + * If `text` format is provided, a `cacheID` as a hash of the text should be set + * to be used for local caching. */ export type RequestParameters = | { +id: string, - +text: null, + +text: string | null, // common fields +name: string, +operationKind: 'mutation' | 'query' | 'subscription', @@ -74,7 +74,7 @@ export type ClientRequestParameters = { +text: null, // common fields +name: string, - +operationKind: 'query', + +operationKind: 'query' | 'mutation', +providedVariables?: ProvidedVariablesType, +metadata: {[key: string]: mixed, ...}, }; @@ -95,6 +95,7 @@ export type GeneratedNode = const RelayConcreteNode = { ACTOR_CHANGE: 'ActorChange', + CATCH_FIELD: 'CatchField', CONDITION: 'Condition', CLIENT_COMPONENT: 'ClientComponent', CLIENT_EDGE_TO_SERVER_OBJECT: 'ClientEdgeToServerObject', @@ -102,7 +103,6 @@ const RelayConcreteNode = { CLIENT_EXTENSION: 'ClientExtension', DEFER: 'Defer', CONNECTION: 'Connection', - FLIGHT_FIELD: 'FlightField', FRAGMENT: 'Fragment', FRAGMENT_SPREAD: 'FragmentSpread', INLINE_DATA_FRAGMENT_SPREAD: 'InlineDataFragmentSpread', diff --git a/packages/relay-runtime/util/RelayFeatureFlags.js b/packages/relay-runtime/util/RelayFeatureFlags.js index 28c15e2dd02fc..32e9bc6e92596 100644 --- a/packages/relay-runtime/util/RelayFeatureFlags.js +++ b/packages/relay-runtime/util/RelayFeatureFlags.js @@ -14,10 +14,7 @@ import type {Disposable} from '../util/RelayRuntimeTypes'; export type FeatureFlags = { - ENABLE_CLIENT_EDGES: boolean, ENABLE_VARIABLE_CONNECTION_KEY: boolean, - ENABLE_PARTIAL_RENDERING_DEFAULT: boolean, - ENABLE_REACT_FLIGHT_COMPONENT_FIELD: boolean, ENABLE_RELAY_RESOLVERS: boolean, ENABLE_GETFRAGMENTIDENTIFIER_OPTIMIZATION: boolean, ENABLE_FRIENDLY_QUERY_NAME_GQL_URL: boolean, @@ -28,16 +25,35 @@ export type FeatureFlags = { ENABLE_CONTAINERS_SUBSCRIBE_ON_COMMIT: boolean, MAX_DATA_ID_LENGTH: ?number, STRING_INTERN_LEVEL: number, - USE_REACT_CACHE: boolean, - USE_REACT_CACHE_LEGACY_TIMEOUTS: boolean, - ENABLE_QUERY_RENDERER_SET_STATE_PREVENTION: boolean, + LOG_MISSING_RECORDS_IN_PROD: boolean, + ENABLE_RELAY_OPERATION_TRACKER_SUSPENSE: boolean, + + // Configure RelayStoreSubscriptions to mark a subscription as affected by an + // update if there are any overlapping IDs other than ROOT_ID or VIWER_ID, + // even if none of the read fields were affected. The strict behavior (current + // default) requires eagerly reading fragments as they change which is + // incompatible with lazily notifying React of updats using `setState(() => + // read())`, so we are experimenting with this loose behavior which should be + // more compatible. + ENABLE_LOOSE_SUBSCRIPTION_ATTRIBUTION: boolean, + ENABLE_OPERATION_TRACKER_OPTIMISTIC_UPDATES: boolean, + + // Configure whether Relay should handle any field errors that it encounteres + // in a partial response. + // @see https://spec.graphql.org/October2021/#sec-Handling-Field-Errors + ENABLE_FIELD_ERROR_HANDLING: boolean, + + ENABLE_FIELD_ERROR_HANDLING_THROW_BY_DEFAULT: boolean, + ENABLE_FIELD_ERROR_HANDLING_CATCH_DIRECTIVE: boolean, + + PROCESS_OPTIMISTIC_UPDATE_BEFORE_SUBSCRIPTION: boolean, + + // Temporary flag to enable a gradual rollout of the fix for T185969900 + MARK_RESOLVER_VALUES_AS_CLEAN_AFTER_FRAGMENT_REREAD: boolean, }; const RelayFeatureFlags: FeatureFlags = { - ENABLE_CLIENT_EDGES: false, ENABLE_VARIABLE_CONNECTION_KEY: false, - ENABLE_PARTIAL_RENDERING_DEFAULT: true, - ENABLE_REACT_FLIGHT_COMPONENT_FIELD: false, ENABLE_RELAY_RESOLVERS: false, ENABLE_GETFRAGMENTIDENTIFIER_OPTIMIZATION: false, ENABLE_FRIENDLY_QUERY_NAME_GQL_URL: false, @@ -48,9 +64,15 @@ const RelayFeatureFlags: FeatureFlags = { ENABLE_CONTAINERS_SUBSCRIBE_ON_COMMIT: false, MAX_DATA_ID_LENGTH: null, STRING_INTERN_LEVEL: 0, - USE_REACT_CACHE: false, - USE_REACT_CACHE_LEGACY_TIMEOUTS: true, - ENABLE_QUERY_RENDERER_SET_STATE_PREVENTION: false, + LOG_MISSING_RECORDS_IN_PROD: false, + ENABLE_LOOSE_SUBSCRIPTION_ATTRIBUTION: false, + ENABLE_OPERATION_TRACKER_OPTIMISTIC_UPDATES: false, + ENABLE_RELAY_OPERATION_TRACKER_SUSPENSE: false, + ENABLE_FIELD_ERROR_HANDLING: false, + ENABLE_FIELD_ERROR_HANDLING_THROW_BY_DEFAULT: false, + ENABLE_FIELD_ERROR_HANDLING_CATCH_DIRECTIVE: false, + PROCESS_OPTIMISTIC_UPDATE_BEFORE_SUBSCRIPTION: false, + MARK_RESOLVER_VALUES_AS_CLEAN_AFTER_FRAGMENT_REREAD: false, }; module.exports = RelayFeatureFlags; diff --git a/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest1Query.graphql.js b/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest1Query.graphql.js index e930aa5f9f0e4..ab2803ec94c86 100644 --- a/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest1Query.graphql.js +++ b/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest1Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<41e083a018213043ae8d87d99a15ce1d>> + * @generated SignedSource<<6703998458a25903621ee13227354a96>> * @flow * @lightSyntaxTransform * @nogrep @@ -29,17 +29,15 @@ export type withProvidedVariablesTest1Query = {| response: withProvidedVariablesTest1Query$data, variables: withProvidedVariablesTest1Query$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__provideNumberOfFriendsrelayprovider": require('./../provideNumberOfFriends.relayprovider') +}: {| +__relay_internal__pv__provideNumberOfFriendsrelayprovider: {| +get: () => number, |}, -|}; +|}); */ -var providedVariablesDefinition/*: ProvidedVariablesType*/ = { - "__relay_internal__pv__provideNumberOfFriendsrelayprovider": require('./../provideNumberOfFriends.relayprovider') -}; - var node/*: ConcreteRequest*/ = (function(){ var v0 = [ { @@ -152,7 +150,9 @@ return { "name": "withProvidedVariablesTest1Query", "operationKind": "query", "text": "query withProvidedVariablesTest1Query(\n $__relay_internal__pv__provideNumberOfFriendsrelayprovider: Int!\n) {\n node(id: 4) {\n __typename\n ...withProvidedVariablesTest1Fragment\n id\n }\n}\n\nfragment withProvidedVariablesTest1Fragment on User {\n friends(first: $__relay_internal__pv__provideNumberOfFriendsrelayprovider) {\n count\n }\n}\n", - "providedVariables": providedVariablesDefinition + "providedVariables": { + "__relay_internal__pv__provideNumberOfFriendsrelayprovider": require('./../provideNumberOfFriends.relayprovider') + } } }; })(); diff --git a/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest2Query.graphql.js b/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest2Query.graphql.js index 2c88a772302b6..7b51233673ed9 100644 --- a/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest2Query.graphql.js +++ b/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest2Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<930f7fd0c533887865d2503fc6e48d02>> + * @generated SignedSource<<28b3eba1bf961281a612ddc50ead4b1f>> * @flow * @lightSyntaxTransform * @nogrep @@ -31,17 +31,15 @@ export type withProvidedVariablesTest2Query = {| response: withProvidedVariablesTest2Query$data, variables: withProvidedVariablesTest2Query$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__provideNumberOfFriendsrelayprovider": require('./../provideNumberOfFriends.relayprovider') +}: {| +__relay_internal__pv__provideNumberOfFriendsrelayprovider: {| +get: () => number, |}, -|}; +|}); */ -var providedVariablesDefinition/*: ProvidedVariablesType*/ = { - "__relay_internal__pv__provideNumberOfFriendsrelayprovider": require('./../provideNumberOfFriends.relayprovider') -}; - var node/*: ConcreteRequest*/ = (function(){ var v0 = { "defaultValue": null, @@ -175,7 +173,9 @@ return { "name": "withProvidedVariablesTest2Query", "operationKind": "query", "text": "query withProvidedVariablesTest2Query(\n $includeFriendsCount: Boolean!\n $__relay_internal__pv__provideNumberOfFriendsrelayprovider: Int!\n) {\n node(id: 4) {\n __typename\n ...withProvidedVariablesTest2Fragment_47ZY3u\n id\n }\n}\n\nfragment withProvidedVariablesTest2Fragment_47ZY3u on User {\n friends(first: $__relay_internal__pv__provideNumberOfFriendsrelayprovider) {\n count @include(if: $includeFriendsCount)\n }\n}\n", - "providedVariables": providedVariablesDefinition + "providedVariables": { + "__relay_internal__pv__provideNumberOfFriendsrelayprovider": require('./../provideNumberOfFriends.relayprovider') + } } }; })(); diff --git a/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest3Query.graphql.js b/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest3Query.graphql.js index daac06c698ac6..eb7961d1bd5cc 100644 --- a/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest3Query.graphql.js +++ b/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest3Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<18a39aa5973ba29b7169324c0037ec4c>> + * @generated SignedSource<<5815185ccc4d1a85d36ed17b4f10ec0d>> * @flow * @lightSyntaxTransform * @nogrep @@ -29,21 +29,19 @@ export type withProvidedVariablesTest3Query = {| response: withProvidedVariablesTest3Query$data, variables: withProvidedVariablesTest3Query$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__provideNumberOfFriendsrelayprovider": require('./../provideNumberOfFriends.relayprovider'), + "__relay_internal__pv__provideIncludeUserNamesrelayprovider": require('./../provideIncludeUserNames.relayprovider') +}: {| +__relay_internal__pv__provideIncludeUserNamesrelayprovider: {| +get: () => boolean, |}, +__relay_internal__pv__provideNumberOfFriendsrelayprovider: {| +get: () => number, |}, -|}; +|}); */ -var providedVariablesDefinition/*: ProvidedVariablesType*/ = { - "__relay_internal__pv__provideNumberOfFriendsrelayprovider": require('./../provideNumberOfFriends.relayprovider'), - "__relay_internal__pv__provideIncludeUserNamesrelayprovider": require('./../provideIncludeUserNames.relayprovider') -}; - var node/*: ConcreteRequest*/ = (function(){ var v0 = [ { @@ -175,7 +173,10 @@ return { "name": "withProvidedVariablesTest3Query", "operationKind": "query", "text": "query withProvidedVariablesTest3Query(\n $__relay_internal__pv__provideNumberOfFriendsrelayprovider: Int!\n $__relay_internal__pv__provideIncludeUserNamesrelayprovider: Boolean!\n) {\n node(id: 4) {\n __typename\n ...withProvidedVariablesTest3Fragment\n id\n }\n}\n\nfragment withProvidedVariablesTest3Fragment on User {\n name @include(if: $__relay_internal__pv__provideIncludeUserNamesrelayprovider)\n friends(first: $__relay_internal__pv__provideNumberOfFriendsrelayprovider) {\n count\n }\n}\n", - "providedVariables": providedVariablesDefinition + "providedVariables": { + "__relay_internal__pv__provideNumberOfFriendsrelayprovider": require('./../provideNumberOfFriends.relayprovider'), + "__relay_internal__pv__provideIncludeUserNamesrelayprovider": require('./../provideIncludeUserNames.relayprovider') + } } }; })(); diff --git a/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest4Query.graphql.js b/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest4Query.graphql.js index d741b0bde1c11..6dd528e418ba3 100644 --- a/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest4Query.graphql.js +++ b/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest4Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<738f0bb0bc84818c73d75e001c580e88>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -30,21 +30,19 @@ export type withProvidedVariablesTest4Query = {| response: withProvidedVariablesTest4Query$data, variables: withProvidedVariablesTest4Query$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__provideNumberOfFriendsrelayprovider": require('./../provideNumberOfFriends.relayprovider'), + "__relay_internal__pv__provideIncludeUserNamesrelayprovider": require('./../provideIncludeUserNames.relayprovider') +}: {| +__relay_internal__pv__provideIncludeUserNamesrelayprovider: {| +get: () => boolean, |}, +__relay_internal__pv__provideNumberOfFriendsrelayprovider: {| +get: () => number, |}, -|}; +|}); */ -var providedVariablesDefinition/*: ProvidedVariablesType*/ = { - "__relay_internal__pv__provideNumberOfFriendsrelayprovider": require('./../provideNumberOfFriends.relayprovider'), - "__relay_internal__pv__provideIncludeUserNamesrelayprovider": require('./../provideIncludeUserNames.relayprovider') -}; - var node/*: ConcreteRequest*/ = (function(){ var v0 = [ { @@ -207,7 +205,10 @@ return { "name": "withProvidedVariablesTest4Query", "operationKind": "query", "text": "query withProvidedVariablesTest4Query(\n $__relay_internal__pv__provideNumberOfFriendsrelayprovider: Int!\n $__relay_internal__pv__provideIncludeUserNamesrelayprovider: Boolean!\n) {\n node(id: 4) {\n __typename\n ...withProvidedVariablesTest4Fragment1\n ...withProvidedVariablesTest4Fragment2\n id\n }\n}\n\nfragment withProvidedVariablesTest4Fragment1 on User {\n friends(first: $__relay_internal__pv__provideNumberOfFriendsrelayprovider) {\n count\n edges {\n node {\n name @include(if: $__relay_internal__pv__provideIncludeUserNamesrelayprovider)\n id\n }\n }\n }\n}\n\nfragment withProvidedVariablesTest4Fragment2 on User {\n name @include(if: $__relay_internal__pv__provideIncludeUserNamesrelayprovider)\n}\n", - "providedVariables": providedVariablesDefinition + "providedVariables": { + "__relay_internal__pv__provideNumberOfFriendsrelayprovider": require('./../provideNumberOfFriends.relayprovider'), + "__relay_internal__pv__provideIncludeUserNamesrelayprovider": require('./../provideIncludeUserNames.relayprovider') + } } }; })(); diff --git a/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest5Query.graphql.js b/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest5Query.graphql.js index b8f0f50e7f08d..d2216f8272afa 100644 --- a/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest5Query.graphql.js +++ b/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest5Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<4d4d9044120228582912eca1d5a3b57b>> + * @generated SignedSource<<9f59b2fcd357e043b5d5fa94238f3fe2>> * @flow * @lightSyntaxTransform * @nogrep @@ -29,21 +29,19 @@ export type withProvidedVariablesTest5Query = {| response: withProvidedVariablesTest5Query$data, variables: withProvidedVariablesTest5Query$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__provideRandomNumber_invalid1relayprovider": require('./../provideRandomNumber_invalid1.relayprovider'), + "__relay_internal__pv__provideRandomNumber_invalid2relayprovider": require('./../provideRandomNumber_invalid2.relayprovider') +}: {| +__relay_internal__pv__provideRandomNumber_invalid1relayprovider: {| +get: () => number, |}, +__relay_internal__pv__provideRandomNumber_invalid2relayprovider: {| +get: () => number, |}, -|}; +|}); */ -var providedVariablesDefinition/*: ProvidedVariablesType*/ = { - "__relay_internal__pv__provideRandomNumber_invalid1relayprovider": require('./../provideRandomNumber_invalid1.relayprovider'), - "__relay_internal__pv__provideRandomNumber_invalid2relayprovider": require('./../provideRandomNumber_invalid2.relayprovider') -}; - var node/*: ConcreteRequest*/ = (function(){ var v0 = [ { @@ -178,7 +176,10 @@ return { "name": "withProvidedVariablesTest5Query", "operationKind": "query", "text": "query withProvidedVariablesTest5Query(\n $__relay_internal__pv__provideRandomNumber_invalid1relayprovider: Float!\n $__relay_internal__pv__provideRandomNumber_invalid2relayprovider: Float!\n) {\n node(id: 4) {\n __typename\n ...withProvidedVariablesTest5Fragment\n id\n }\n}\n\nfragment withProvidedVariablesTest5Fragment on User {\n profile_picture(scale: $__relay_internal__pv__provideRandomNumber_invalid1relayprovider) {\n uri\n }\n other_picture: profile_picture(scale: $__relay_internal__pv__provideRandomNumber_invalid2relayprovider) {\n uri\n }\n}\n", - "providedVariables": providedVariablesDefinition + "providedVariables": { + "__relay_internal__pv__provideRandomNumber_invalid1relayprovider": require('./../provideRandomNumber_invalid1.relayprovider'), + "__relay_internal__pv__provideRandomNumber_invalid2relayprovider": require('./../provideRandomNumber_invalid2.relayprovider') + } } }; })(); diff --git a/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest6Query.graphql.js b/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest6Query.graphql.js index 92161c7d6f3de..6b3d98efd465a 100644 --- a/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest6Query.graphql.js +++ b/packages/relay-runtime/util/__tests__/__generated__/withProvidedVariablesTest6Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<2dc5f8f1fb7f57f4fbd9ed95c754cf4f>> + * @generated SignedSource<<33a7c304a92c212c1292fd71495c8cb8>> * @flow * @lightSyntaxTransform * @nogrep @@ -29,17 +29,15 @@ export type withProvidedVariablesTest6Query = {| response: withProvidedVariablesTest6Query$data, variables: withProvidedVariablesTest6Query$variables, |}; -type ProvidedVariablesType = {| +({ + "__relay_internal__pv__provideRandomNumber_invalid1relayprovider": require('./../provideRandomNumber_invalid1.relayprovider') +}: {| +__relay_internal__pv__provideRandomNumber_invalid1relayprovider: {| +get: () => number, |}, -|}; +|}); */ -var providedVariablesDefinition/*: ProvidedVariablesType*/ = { - "__relay_internal__pv__provideRandomNumber_invalid1relayprovider": require('./../provideRandomNumber_invalid1.relayprovider') -}; - var node/*: ConcreteRequest*/ = (function(){ var v0 = [ { @@ -152,7 +150,9 @@ return { "name": "withProvidedVariablesTest6Query", "operationKind": "query", "text": "query withProvidedVariablesTest6Query(\n $__relay_internal__pv__provideRandomNumber_invalid1relayprovider: Float!\n) {\n node(id: 4) {\n __typename\n ...withProvidedVariablesTest6Fragment\n id\n }\n}\n\nfragment withProvidedVariablesTest6Fragment on User {\n profile_picture(scale: $__relay_internal__pv__provideRandomNumber_invalid1relayprovider) {\n uri\n }\n}\n", - "providedVariables": providedVariablesDefinition + "providedVariables": { + "__relay_internal__pv__provideRandomNumber_invalid1relayprovider": require('./../provideRandomNumber_invalid1.relayprovider') + } } }; })(); diff --git a/packages/relay-runtime/util/__tests__/deepFreeze-test.js b/packages/relay-runtime/util/__tests__/deepFreeze-test.js index 64173eb1fa676..caec7b6fa9296 100644 --- a/packages/relay-runtime/util/__tests__/deepFreeze-test.js +++ b/packages/relay-runtime/util/__tests__/deepFreeze-test.js @@ -36,6 +36,13 @@ describe('deepFreeze()', () => { expect(object.b).toBeFrozen(); }); + it('freezes nested arrays', () => { + const array = deepFreeze([[[1]]]); + expect(array).toBeFrozen(); + expect(array[0]).toBeFrozen(); + expect(array[0][0]).toBeFrozen(); + }); + it('short-circuits given a circular reference', () => { const object = {a: 1, b: {c: 2}}; object.b.d = object; @@ -53,4 +60,18 @@ describe('deepFreeze()', () => { it('copes with null values', () => { expect(deepFreeze({a: null})).toBeFrozen(); }); + + it('does not throw on array buffers', () => { + const x = new Uint16Array([21, 31]); + expect(() => deepFreeze(x)).not.toThrow(); + expect(() => deepFreeze({x})).not.toThrow(); + }); + + it('does not freeze class instances', () => { + class Foo { + val = 3; + } + const f = deepFreeze(new Foo()); + expect(f).not.toBeFrozen(); + }); }); diff --git a/packages/relay-runtime/util/__tests__/recycleNodesInto-test.js b/packages/relay-runtime/util/__tests__/recycleNodesInto-test.js index 7198bf22a5aca..9aa7e0ec64d85 100644 --- a/packages/relay-runtime/util/__tests__/recycleNodesInto-test.js +++ b/packages/relay-runtime/util/__tests__/recycleNodesInto-test.js @@ -10,6 +10,7 @@ 'use strict'; +const deepFreeze = require('../deepFreeze'); const recycleNodesInto = require('../recycleNodesInto'); describe('recycleNodesInto', () => { @@ -137,6 +138,7 @@ describe('recycleNodesInto', () => { expect(recycled).not.toBe(prevData); expect(recycled.foo).not.toBe(prevData.foo); + expect(recycled).toBe(nextData); }); it('does not recycle arrays as objects', () => { @@ -176,6 +178,12 @@ describe('recycleNodesInto', () => { expect(recycleNodesInto(prevData, nextData)).toBe(prevData); }); + it('does not recycle different `nextData`', () => { + const prevData = [{x: 1}, 2, 3]; + const nextData = [{x: 1}, 2, 4]; + expect(recycleNodesInto(prevData, nextData)).toBe(nextData); + }); + it('recycles arrays without mutating `prevData`', () => { const prevItem = {foo: 1}; const prevData = [prevItem]; @@ -325,4 +333,128 @@ describe('recycleNodesInto', () => { expect(recycleNodesInto(prevData, nextData)).toBe(nextData); }); }); + + describe('errors', () => { + it('does not recycle errors with equal values', () => { + const a = new Error('test 1'); + const b = new Error('test 1'); + expect(recycleNodesInto(a, b)).toBe(b); + }); + + it('does not recycle errors with unequal values', () => { + const a = new Error('test 1'); + const b = new Error('test 2'); + expect(recycleNodesInto(a, b)).toBe(b); + }); + }); + + describe('freeze', () => { + it('does not mutate deeply frozen array in `nextData`', () => { + const prevData = [[{x: 1}], 1]; + const nextData = [[{x: 1}], 2]; + deepFreeze(nextData); + const recycled = recycleNodesInto(prevData, nextData); + expect(recycled).toBe(nextData); + expect(recycled[0]).toBe(nextData[0]); + expect(recycled[0][0]).toBe(nextData[0][0]); + }); + + it('does not mutate deeply frozen object in `nextData`', () => { + const nextItem = { + c: 1, + }; + const nextObject = { + b: nextItem, + }; + const nextData = { + a: nextObject, + }; + const prevData = {a: {b: {c: 1}}, d: 1}; + + deepFreeze(nextData); + const recycled = recycleNodesInto(prevData, nextData); + expect(recycled).toBe(nextData); + expect(recycled.a).toBe(nextObject); + expect(recycled.a.b).toBe(nextItem); + }); + + it('does not mutate into frozen object in `nextData`', () => { + const nextItem = { + c: 1, + }; + const nextObject = { + b: nextItem, + }; + const nextData = { + a: nextObject, + }; + const prevData = {a: {b: {c: 1}}, d: 1}; + + Object.freeze(nextData); + const recycled = recycleNodesInto(prevData, nextData); + expect(recycled).toBe(nextData); + expect(recycled.a).toBe(nextObject); + expect(recycled.a.b).toBe(nextItem); + }); + + it('reuse prevData and does not mutate deeply frozen array in `nextData`', () => { + const nextItem = {x: 1}; + const nextArray = [nextItem]; + const prevData = [[{x: 1}], 1]; + const nextData = [nextArray, 1]; + deepFreeze(nextData); + const recycled = recycleNodesInto(prevData, nextData); + expect(recycled).toBe(prevData); + expect(nextData[0]).toBe(nextArray); + expect(nextData[0][0]).toBe(nextItem); + }); + + it('reuse prevData and does not mutate deeply frozen object in `nextData`', () => { + const nextItem = { + c: 1, + }; + const nextObject = { + b: nextItem, + }; + const nextData = { + a: nextObject, + }; + const prevData = { + a: { + b: { + c: 1, + }, + }, + }; + deepFreeze(nextData); + const recycled = recycleNodesInto(prevData, nextData); + expect(recycled).toBe(prevData); + expect(nextData.a).toBe(nextObject); + expect(nextData.a.b).toBe(nextItem); + }); + }); + + it('reuse prevData and does not mutate frozen object in `nextData`', () => { + const nextItem = { + c: 1, + }; + const nextObject = { + b: nextItem, + }; + const nextData = { + a: nextObject, + }; + const prevData = { + a: { + b: { + c: 1, + }, + }, + }; + Object.freeze(nextData); + const recycled = recycleNodesInto(prevData, nextData); + expect(recycled).toBe(prevData); + expect(nextData.a).toBe(nextObject); + expect(nextData.a.b).toBe(nextItem); + }); }); diff --git a/packages/relay-runtime/util/createPayloadFor3DField.js b/packages/relay-runtime/util/createPayloadFor3DField.js index ae91d5dbccab5..866b0fd86a9c5 100644 --- a/packages/relay-runtime/util/createPayloadFor3DField.js +++ b/packages/relay-runtime/util/createPayloadFor3DField.js @@ -25,6 +25,7 @@ export opaque type Local3DPayload< +Response: {...}, > = Response; +// $FlowFixMe[unsupported-variance-annotation] function createPayloadFor3DField<+DocumentName: string, +Response: {...}>( name: DocumentName, operation: JSResourceReference, diff --git a/packages/relay-runtime/util/deepFreeze.js b/packages/relay-runtime/util/deepFreeze.js index 6fcd355e02a38..cbbb25f7fd81c 100644 --- a/packages/relay-runtime/util/deepFreeze.js +++ b/packages/relay-runtime/util/deepFreeze.js @@ -18,6 +18,9 @@ * returns the now-frozen original object. */ function deepFreeze(object: T): T { + if (!shouldBeFrozen(object)) { + return object; + } Object.freeze(object); Object.getOwnPropertyNames(object).forEach(name => { const property = object[name]; @@ -32,4 +35,13 @@ function deepFreeze(object: T): T { return object; } +function shouldBeFrozen(value: mixed): boolean { + // Only freeze plain JS arrays and objects + return ( + value != null && + (Array.isArray(value) || + (typeof value === 'object' && value.constructor === Object)) + ); +} + module.exports = deepFreeze; diff --git a/packages/relay-runtime/util/getFragmentIdentifier.js b/packages/relay-runtime/util/getFragmentIdentifier.js index 54c7a6fcca301..16440c2841222 100644 --- a/packages/relay-runtime/util/getFragmentIdentifier.js +++ b/packages/relay-runtime/util/getFragmentIdentifier.js @@ -32,10 +32,10 @@ function getFragmentIdentifier( selector == null ? 'null' : selector.kind === 'SingularReaderSelector' - ? selector.owner.identifier - : '[' + - selector.selectors.map(sel => sel.owner.identifier).join(',') + - ']'; + ? selector.owner.identifier + : '[' + + selector.selectors.map(sel => sel.owner.identifier).join(',') + + ']'; const fragmentVariables = getVariablesFromFragment(fragmentNode, fragmentRef); const dataIDs = getDataIDsFromFragment(fragmentNode, fragmentRef); @@ -44,10 +44,10 @@ function getFragmentIdentifier( typeof dataIDs === 'undefined' ? 'missing' : dataIDs == null - ? 'null' - : Array.isArray(dataIDs) - ? '[' + dataIDs.join(',') + ']' - : dataIDs; + ? 'null' + : Array.isArray(dataIDs) + ? '[' + dataIDs.join(',') + ']' + : dataIDs; ids = RelayFeatureFlags.STRING_INTERN_LEVEL <= 1 ? ids diff --git a/packages/relay-runtime/util/getPaginationMetadata.js b/packages/relay-runtime/util/getPaginationMetadata.js index c5577ec76dede..7ea9a35b7609d 100644 --- a/packages/relay-runtime/util/getPaginationMetadata.js +++ b/packages/relay-runtime/util/getPaginationMetadata.js @@ -50,14 +50,15 @@ function getPaginationMetadata( componentDisplayName, fragmentNode.name, ); - const identifierField = refetchMetadata.identifierField; + const identifierInfo = refetchMetadata.identifierInfo; invariant( - identifierField == null || typeof identifierField === 'string', + identifierInfo?.identifierField == null || + typeof identifierInfo.identifierField === 'string', 'Relay: getRefetchMetadata(): Expected `identifierField` to be a string.', ); return { connectionPathInFragmentData, - identifierField, + identifierField: identifierInfo?.identifierField, paginationRequest, paginationMetadata, stream: connectionMetadata.stream === true, diff --git a/packages/relay-runtime/util/getRefetchMetadata.js b/packages/relay-runtime/util/getRefetchMetadata.js index 60f3c61d69099..86935623c3023 100644 --- a/packages/relay-runtime/util/getRefetchMetadata.js +++ b/packages/relay-runtime/util/getRefetchMetadata.js @@ -11,7 +11,11 @@ 'use strict'; -import type {ReaderFragment, ReaderRefetchMetadata} from './ReaderNode'; +import type { + ReaderFragment, + ReaderRefetchMetadata, + RefetchableIdentifierInfo, +} from './ReaderNode'; import type {ConcreteRequest} from './RelayConcreteNode'; const invariant = require('invariant'); @@ -21,7 +25,7 @@ function getRefetchMetadata( componentDisplayName: string, ): { fragmentRefPathInResponse: $ReadOnlyArray, - identifierField: ?string, + identifierInfo: ?RefetchableIdentifierInfo, refetchableRequest: ConcreteRequest, refetchMetadata: ReaderRefetchMetadata, } { @@ -58,14 +62,22 @@ function getRefetchMetadata( 'this is likely a bug in Relay.', componentDisplayName, ); - const identifierField = refetchMetadata.identifierField; - invariant( - identifierField == null || typeof identifierField === 'string', - 'Relay: getRefetchMetadata(): Expected `identifierField` to be a string.', - ); + const identifierInfo = refetchMetadata.identifierInfo; + if (identifierInfo != null) { + invariant( + identifierInfo.identifierField == null || + typeof identifierInfo.identifierField === 'string', + 'Relay: getRefetchMetadata(): Expected `identifierField` to be a string.', + ); + invariant( + identifierInfo.identifierQueryVariableName == null || + typeof identifierInfo.identifierQueryVariableName === 'string', + 'Relay: getRefetchMetadata(): Expected `identifierQueryVariableName` to be a string.', + ); + } return { fragmentRefPathInResponse, - identifierField, + identifierInfo, refetchableRequest, refetchMetadata, }; diff --git a/packages/relay-runtime/util/handlePotentialSnapshotErrors.js b/packages/relay-runtime/util/handlePotentialSnapshotErrors.js index db1549f31fca6..af40bbd571fb8 100644 --- a/packages/relay-runtime/util/handlePotentialSnapshotErrors.js +++ b/packages/relay-runtime/util/handlePotentialSnapshotErrors.js @@ -12,30 +12,76 @@ 'use strict'; import type { + ErrorResponseFields, IEnvironment, MissingRequiredFields, RelayResolverErrors, } from '../store/RelayStoreTypes'; +import {RelayFieldError} from '../store/RelayErrorTrie'; +import RelayFeatureFlags from './RelayFeatureFlags'; + function handlePotentialSnapshotErrors( environment: IEnvironment, missingRequiredFields: ?MissingRequiredFields, relayResolverErrors: RelayResolverErrors, + errorResponseFields: ?ErrorResponseFields, + throwOnFieldError: boolean, ) { for (const resolverError of relayResolverErrors) { - environment.requiredFieldLogger({ + environment.relayFieldLogger({ kind: 'relay_resolver.error', owner: resolverError.field.owner, fieldPath: resolverError.field.path, error: resolverError.error, }); } + + if ( + relayResolverErrors.length > 0 && + (RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING_THROW_BY_DEFAULT || + throwOnFieldError) + ) { + throw new RelayFieldError( + `Relay: Unexpected resolver exception`, + relayResolverErrors.map(e => ({message: e.error.message})), + ); + } + + if ( + (RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING || throwOnFieldError) && + errorResponseFields != null + ) { + if (errorResponseFields != null) { + for (const fieldError of errorResponseFields) { + const {path, owner, error} = fieldError; + + environment.relayFieldLogger({ + kind: 'relay_field_payload.error', + owner: owner, + fieldPath: path, + error, + }); + } + } + + if ( + RelayFeatureFlags.ENABLE_FIELD_ERROR_HANDLING_THROW_BY_DEFAULT || + throwOnFieldError + ) { + throw new RelayFieldError( + `Relay: Unexpected response payload - this object includes an errors property in which you can access the underlying errors`, + errorResponseFields.map(({error}) => error), + ); + } + } + if (missingRequiredFields != null) { switch (missingRequiredFields.action) { case 'THROW': { const {path, owner} = missingRequiredFields.field; // This gives the consumer the chance to throw their own error if they so wish. - environment.requiredFieldLogger({ + environment.relayFieldLogger({ kind: 'missing_field.throw', owner, fieldPath: path, @@ -46,7 +92,7 @@ function handlePotentialSnapshotErrors( } case 'LOG': missingRequiredFields.fields.forEach(({path, owner}) => { - environment.requiredFieldLogger({ + environment.relayFieldLogger({ kind: 'missing_field.log', owner, fieldPath: path, diff --git a/packages/relay-runtime/util/isPromise.js b/packages/relay-runtime/util/isPromise.js index fa054c398ee91..1d14833860d07 100644 --- a/packages/relay-runtime/util/isPromise.js +++ b/packages/relay-runtime/util/isPromise.js @@ -11,10 +11,10 @@ 'use strict'; -declare function isPromise(p: mixed): boolean %checks(p instanceof Promise); +declare function isPromise(p: mixed): p is Promise; -function isPromise(p: $FlowFixMe): boolean { - return !!p && typeof p.then === 'function'; +function isPromise(p: mixed) { + return p != null && typeof p === 'object' && typeof p.then === 'function'; } module.exports = isPromise; diff --git a/packages/relay-runtime/util/recycleNodesInto.js b/packages/relay-runtime/util/recycleNodesInto.js index ce718c5f1b34b..cc611e530d397 100644 --- a/packages/relay-runtime/util/recycleNodesInto.js +++ b/packages/relay-runtime/util/recycleNodesInto.js @@ -11,27 +11,27 @@ 'use strict'; -const hasWeakSetDefined = typeof WeakSet !== 'undefined'; -const hasWeakMapDefined = typeof WeakMap !== 'undefined'; - /** * Recycles subtrees from `prevData` by replacing equal subtrees in `nextData`. + * Does not mutate a frozen subtree. */ function recycleNodesInto(prevData: T, nextData: T): T { + return recycleNodesIntoImpl(prevData, nextData, true); +} + +function recycleNodesIntoImpl( + prevData: T, + nextData: T, + canMutate: boolean, +): T { if ( prevData === nextData || typeof prevData !== 'object' || - prevData instanceof Set || - prevData instanceof Map || - (hasWeakSetDefined && prevData instanceof WeakSet) || - (hasWeakMapDefined && prevData instanceof WeakMap) || !prevData || + (prevData.constructor !== Object && !Array.isArray(prevData)) || typeof nextData !== 'object' || - nextData instanceof Set || - nextData instanceof Map || - (hasWeakSetDefined && nextData instanceof WeakSet) || - (hasWeakMapDefined && nextData instanceof WeakMap) || - !nextData + !nextData || + (nextData.constructor !== Object && !Array.isArray(nextData)) ) { return nextData; } @@ -41,18 +41,17 @@ function recycleNodesInto(prevData: T, nextData: T): T { const prevArray: ?Array = Array.isArray(prevData) ? prevData : null; const nextArray: ?Array = Array.isArray(nextData) ? nextData : null; if (prevArray && nextArray) { + const canMutateNext = canMutate && !Object.isFrozen(nextArray); canRecycle = nextArray.reduce((wasEqual, nextItem, ii) => { const prevValue = prevArray[ii]; - const nextValue = recycleNodesInto(prevValue, nextItem); - if (nextValue !== nextArray[ii]) { - if (__DEV__) { - if (!Object.isFrozen(nextArray)) { - nextArray[ii] = nextValue; - } - } else { - nextArray[ii] = nextValue; - } + const nextValue = recycleNodesIntoImpl( + prevValue, + nextItem, + canMutateNext, + ); + if (nextValue !== nextArray[ii] && canMutateNext) { + nextArray[ii] = nextValue; } return wasEqual && nextValue === prevArray[ii]; }, true) && prevArray.length === nextArray.length; @@ -62,20 +61,18 @@ function recycleNodesInto(prevData: T, nextData: T): T { const nextObject = nextData; const prevKeys = Object.keys(prevObject); const nextKeys = Object.keys(nextObject); + const canMutateNext = canMutate && !Object.isFrozen(nextObject); canRecycle = nextKeys.reduce((wasEqual, key) => { const prevValue = prevObject[key]; - const nextValue = recycleNodesInto(prevValue, nextObject[key]); - if (nextValue !== nextObject[key]) { - if (__DEV__) { - if (!Object.isFrozen(nextObject)) { - // $FlowFixMe[cannot-write] - nextObject[key] = nextValue; - } - } else { - // $FlowFixMe[cannot-write] - nextObject[key] = nextValue; - } + const nextValue = recycleNodesIntoImpl( + prevValue, + nextObject[key], + canMutateNext, + ); + if (nextValue !== nextObject[key] && canMutateNext) { + // $FlowFixMe[cannot-write] + nextObject[key] = nextValue; } return wasEqual && nextValue === prevObject[key]; }, true) && prevKeys.length === nextKeys.length; diff --git a/packages/relay-runtime/util/registerEnvironmentWithDevTools.js b/packages/relay-runtime/util/registerEnvironmentWithDevTools.js index a71568a79ae70..76a1baafcd792 100644 --- a/packages/relay-runtime/util/registerEnvironmentWithDevTools.js +++ b/packages/relay-runtime/util/registerEnvironmentWithDevTools.js @@ -20,8 +20,8 @@ function registerEnvironmentWithDevTools(environment: IEnvironment): void { typeof global !== 'undefined' ? global : typeof window !== 'undefined' - ? window - : undefined; + ? window + : undefined; const devToolsHook = _global && _global.__RELAY_DEVTOOLS_HOOK__; if (devToolsHook) { devToolsHook.registerEnvironment(environment); diff --git a/packages/relay-runtime/util/shallowFreeze.js b/packages/relay-runtime/util/shallowFreeze.js new file mode 100644 index 0000000000000..1defb9cdffc84 --- /dev/null +++ b/packages/relay-runtime/util/shallowFreeze.js @@ -0,0 +1,23 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @flow strict + * @format + * @oncall relay + */ + +'use strict'; + +// Shallow freeze to prevent Relay from mutating the value in recycleNodesInto or deepFreezing the value +module.exports = function shallowFreeze(value: mixed) { + if ( + typeof value === 'object' && + value != null && + (Array.isArray(value) || value.constructor === Object) + ) { + Object.freeze(value); + } +}; diff --git a/packages/relay-test-utils-internal/consoleErrorsAndWarnings.js b/packages/relay-test-utils-internal/consoleErrorsAndWarnings.js index 628c46a91b6ba..6a758475df182 100644 --- a/packages/relay-test-utils-internal/consoleErrorsAndWarnings.js +++ b/packages/relay-test-utils-internal/consoleErrorsAndWarnings.js @@ -68,6 +68,7 @@ function createConsoleInterceptionSystem( if (installed) { throw new Error(`${installerName} should be called only once.`); } + installed = true; setUpMock(handleMessage); diff --git a/packages/relay-test-utils-internal/generateTestsFromFixtures.js b/packages/relay-test-utils-internal/generateTestsFromFixtures.js index 43f1e0e3d5023..53bdc9186fe6e 100644 --- a/packages/relay-test-utils-internal/generateTestsFromFixtures.js +++ b/packages/relay-test-utils-internal/generateTestsFromFixtures.js @@ -22,7 +22,7 @@ const path = require('path'); */ const FIXTURE_TAG = Symbol.for('FIXTURE_TAG'); expect.addSnapshotSerializer({ - print(value) { + serialize(value) { return Object.keys(value) .map(key => `~~~~~~~~~~ ${key.toUpperCase()} ~~~~~~~~~~\n${value[key]}`) .join('\n'); diff --git a/packages/relay-test-utils-internal/index.js b/packages/relay-test-utils-internal/index.js index 0734cef29b478..0111a7c86c1c0 100644 --- a/packages/relay-test-utils-internal/index.js +++ b/packages/relay-test-utils-internal/index.js @@ -48,11 +48,12 @@ const {createMockEnvironment, unwrapContainer} = require('relay-test-utils'); function cannotReadPropertyOfUndefined__DEPRECATED( propertyName: string, ): string { - if (process.version.match(/^v16\.(.+)$/)) { - return `Cannot read properties of undefined (reading '${propertyName}')`; - } else { + const matches = process.version.match(/^v(\d+)\./); + const majorVersion = matches == null ? null : parseInt(matches[1], 10); + if (majorVersion == null || majorVersion < 16) { return `Cannot read property '${propertyName}' of undefined`; } + return `Cannot read properties of undefined (reading '${propertyName}')`; } /** diff --git a/packages/relay-test-utils-internal/package.json b/packages/relay-test-utils-internal/package.json index 156c6ede34e73..879b4a84243e3 100644 --- a/packages/relay-test-utils-internal/package.json +++ b/packages/relay-test-utils-internal/package.json @@ -1,7 +1,7 @@ { "name": "relay-test-utils-internal", "description": "Internal utilities for testing Relay.", - "version": "15.0.0", + "version": "17.0.0", "keywords": [ "graphql", "relay" @@ -17,7 +17,7 @@ "dependencies": { "@babel/runtime": "^7.0.0", "fbjs": "^3.0.2", - "relay-runtime": "15.0.0" + "relay-runtime": "17.0.0" }, "directories": { "": "./" diff --git a/packages/relay-test-utils-internal/schema-extensions/IAnimal.graphql b/packages/relay-test-utils-internal/schema-extensions/IAnimal.graphql new file mode 100644 index 0000000000000..53a79e3933f28 --- /dev/null +++ b/packages/relay-test-utils-internal/schema-extensions/IAnimal.graphql @@ -0,0 +1,36 @@ +interface IAnimal { + id: ID! + legs: Int +} + +input AnimalRequest { + """ + Should be a valid GraphQL type that implements `IAnimal`. + """ + ofType: String! + """ + Tells the resolver to return an ID for which a model doees not exist. + """ + returnValidID: Boolean! +} + +type Chicken implements IAnimal { + id: ID! + legs: Int + greeting: String +} + +extend type Query { + chicken: Chicken +} + +interface IWeakAnimal { + color: String +} + +input WeakAnimalRequest { + """ + Should be a valid GraphQL type that implements `IWeakAnimal`. + """ + ofType: String! +} diff --git a/packages/relay-test-utils-internal/schema-extensions/InvalidSchemaExtension-DuplicateClientInterface.graphql b/packages/relay-test-utils-internal/schema-extensions/InvalidSchemaExtension-DuplicateClientInterface.graphql deleted file mode 100644 index 30edd99d46ad6..0000000000000 --- a/packages/relay-test-utils-internal/schema-extensions/InvalidSchemaExtension-DuplicateClientInterface.graphql +++ /dev/null @@ -1,46 +0,0 @@ -# This serves as a fixture to show that we currently do not -# catch the following invalid GraphQL: duplicated client objects, -# interfaces, unions, input objects, scalars and enums. -# Note that for interfaces, client types that implement the interface -# only need to implement the second instance of the interface. - -type DuplicatedObject { - field_1: String -} -type DuplicatedObject { - field_2: String -} - -interface DuplicatedInterface { - field_1: String -} -interface DuplicatedInterface { - field_2: String -} -type SomeClientType implements DuplicatedInterface { - field_2: String -} - -union DuplicatedUnion = DuplicatedObject | SomeClientType -union DuplicatedUnion = User | Query - -input DuplicatedInput { - field_1: String -} -input DuplicatedInput { - field_2: String -} - -scalar DuplicatedScalar -scalar DuplicatedScalar - -enum DuplicatedEnum { - A - B - C -} -enum DuplicatedEnum { - D - E - F -} diff --git a/packages/relay-test-utils-internal/schema-extensions/InvalidSchemaExtension-LyingClientTypes.graphql b/packages/relay-test-utils-internal/schema-extensions/InvalidSchemaExtension-LyingClientTypes.graphql deleted file mode 100644 index 8c63bf5fc3015..0000000000000 --- a/packages/relay-test-utils-internal/schema-extensions/InvalidSchemaExtension-LyingClientTypes.graphql +++ /dev/null @@ -1,13 +0,0 @@ -# This serves as a fixture to show that we currently do not -# catch the following invalid GraphQL: a client interface and a -# client type which claims to implement that interface, but -# does not contain all the fields. -# See [the spec](https://spec.graphql.org/October2021/#sec-Objects). - -interface GoodClientInterface { - name: String -} - -type BadClientType implements GoodClientInterface { - not_name: String -} diff --git a/packages/relay-test-utils-internal/testschema.graphql b/packages/relay-test-utils-internal/testschema.graphql index 5235559fecc6e..6eb3c5484ade7 100644 --- a/packages/relay-test-utils-internal/testschema.graphql +++ b/packages/relay-test-utils-internal/testschema.graphql @@ -1063,6 +1063,7 @@ type User implements Named & Node & Actor & HasJsField & Entity & AllConcreteTyp nearest_neighbor: User! parents: [User!]! actor_key: ID! + count: Int } enum NameRendererContext { diff --git a/packages/relay-test-utils/RelayMockPayloadGenerator.js b/packages/relay-test-utils/RelayMockPayloadGenerator.js index 966c9c393bd9c..5cc61004cf4a5 100644 --- a/packages/relay-test-utils/RelayMockPayloadGenerator.js +++ b/packages/relay-test-utils/RelayMockPayloadGenerator.js @@ -23,9 +23,12 @@ import type { OperationDescriptor, Variables, } from 'relay-runtime'; +import type {GraphQLResponseWithData} from 'relay-runtime/network/RelayNetworkTypes'; +import type {GraphQLResponse} from 'relay-runtime/network/RelayNetworkTypes'; const invariant = require('invariant'); const { + __internal, RelayConcreteNode, TYPENAME_KEY, getModuleComponentKey, @@ -40,13 +43,13 @@ const { CONDITION, CONNECTION, DEFER, - FLIGHT_FIELD, FRAGMENT_SPREAD, INLINE_FRAGMENT, LINKED_FIELD, LINKED_HANDLE, MODULE_IMPORT, RELAY_RESOLVER, + RELAY_LIVE_RESOLVER, SCALAR_FIELD, SCALAR_HANDLE, STREAM, @@ -168,10 +171,10 @@ function createValueResolver(mockResolvers: ?MockResolvers): ValueResolver { function generateMockList( placeholderArray: $ReadOnlyArray, - generateListItem: (defaultValue: mixed) => T, + generateListItem: (defaultValue: mixed, index?: number) => T, ): $ReadOnlyArray { - return placeholderArray.map(possibleDefaultValue => - generateListItem(possibleDefaultValue), + return placeholderArray.map((possibleDefaultValue, index) => + generateListItem(possibleDefaultValue, index), ); } @@ -181,12 +184,15 @@ class RelayMockPayloadGenerator { _mockResolvers: MockResolvers; _selectionMetadata: SelectionMetadata; _mockClientData: boolean; + _generateDeferredPayload: boolean; + _deferredPayloads: Array; constructor(options: { +variables: Variables, +mockResolvers: MockResolvers | null, +selectionMetadata: SelectionMetadata | null, +mockClientData: ?boolean, + +generateDeferredPayload: ?boolean, }) { this._variables = options.variables; this._mockResolvers = { @@ -196,12 +202,14 @@ class RelayMockPayloadGenerator { this._selectionMetadata = options.selectionMetadata ?? {}; this._resolveValue = createValueResolver(this._mockResolvers); this._mockClientData = options.mockClientData ?? false; + this._generateDeferredPayload = options.generateDeferredPayload ?? false; + this._deferredPayloads = []; } generate( selections: $ReadOnlyArray, operationType: string, - ): MockData { + ): Array { const defaultValues = this._getDefaultValuesForObject( operationType, null, @@ -209,7 +217,7 @@ class RelayMockPayloadGenerator { [], // path {}, ); - return this._traverse( + const data = this._traverse( { selections, typeName: operationType, @@ -222,6 +230,8 @@ class RelayMockPayloadGenerator { null, // prevData defaultValues, ); + + return [{data}, ...this._deferredPayloads]; } _traverse( @@ -304,6 +314,26 @@ class RelayMockPayloadGenerator { // falls through case DEFER: case STREAM: { + const isDeferreable = + selection.if == null || this._variables[selection.if]; + if (this._generateDeferredPayload && isDeferreable) { + const deferredData = this._traverseSelections( + selection.selections, + typeName, + isAbstractType, + path, + {}, + defaultValues, + ); + + this._deferredPayloads.push({ + path: [...path], + label: selection.label, + data: deferredData, + }); + + break; + } mockData = this._traverseSelections( selection.selections, typeName, @@ -315,8 +345,24 @@ class RelayMockPayloadGenerator { break; } - case CLIENT_COMPONENT: + case CLIENT_COMPONENT: { + mockData = this._traverseSelections( + selection.fragment.selections, + typeName, + isAbstractType, + path, + mockData, + defaultValues, + ); + break; + } case FRAGMENT_SPREAD: { + const prevVariables = this._variables; + this._variables = __internal.getLocalVariables( + this._variables, + selection.fragment.argumentDefinitions, + selection.args, + ); mockData = this._traverseSelections( selection.fragment.selections, typeName, @@ -325,6 +371,7 @@ class RelayMockPayloadGenerator { mockData, defaultValues, ); + this._variables = prevVariables; break; } @@ -481,7 +528,7 @@ class RelayMockPayloadGenerator { false, path, null, - null, + defaultValues, ), }; } @@ -495,10 +542,9 @@ class RelayMockPayloadGenerator { case SCALAR_HANDLE: case LINKED_HANDLE: break; - case FLIGHT_FIELD: - throw new Error('Flight fields are not yet supported.'); case ACTOR_CHANGE: throw new Error('ActorChange fields are not yet supported.'); + case RELAY_LIVE_RESOLVER: case RELAY_RESOLVER: if (selection.fragment) { mockData = this._traverseSelections( @@ -687,9 +733,7 @@ class RelayMockPayloadGenerator { // We will pass this data down to selection, so _mockScalar(...) can use // values from `defaults` const selectionPath = [...path, applicationName]; - const typeFromSelection = this._selectionMetadata[ - selectionPath.join('.') - ] ?? { + const typeFromSelection = this._getTypeDetailsForPath(selectionPath) ?? { type: DEFAULT_MOCK_TYPENAME, }; @@ -723,13 +767,19 @@ class RelayMockPayloadGenerator { const isAbstractType = field.concreteType == null && typeName === typeFromSelection.type; - const generateDataForField = (possibleDefaultValue: mixed) => { + const generateDataForField = ( + possibleDefaultValue: mixed, + index?: number, + ) => { + const fieldPath = field.plural + ? [...selectionPath, index?.toString(10) ?? '0'] + : selectionPath; const fieldDefaultValue = this._getDefaultValuesForObject( field.concreteType ?? typeFromSelection.type, field.name, field.alias, - selectionPath, + fieldPath, args, ) ?? possibleDefaultValue; @@ -745,7 +795,7 @@ class RelayMockPayloadGenerator { alias: field.alias, args, }, - [...path, applicationName], + fieldPath, typeof data[applicationName] === 'object' ? // $FlowFixMe[incompatible-variance] data[applicationName] @@ -859,7 +909,7 @@ class RelayMockPayloadGenerator { +nullable: boolean, } { return ( - this._selectionMetadata[selectionPath.join('.')] ?? { + this._getTypeDetailsForPath(selectionPath) ?? { type: field.name === 'id' ? 'ID' : 'String', plural: false, enumValues: null, @@ -867,6 +917,21 @@ class RelayMockPayloadGenerator { } ); } + + /** + * When selecting metadata, skip the number on plural fields so that every field in the array + * gets the same metadata. + * @private + */ + _getTypeDetailsForPath( + path: $ReadOnlyArray, + ): $Values { + return this._selectionMetadata[ + // When selecting metadata, skip the number on plural fields so that every field in the array + // gets the same metadata. + path.filter(field => isNaN(parseInt(field, 10))).join('.') + ]; + } } /** @@ -877,13 +942,14 @@ function generateData( variables: Variables, mockResolvers: MockResolvers | null, selectionMetadata: SelectionMetadata | null, - options: ?{mockClientData?: boolean}, -): MockData { + options: ?{mockClientData?: boolean, generateDeferredPayload?: boolean}, +): Array { const mockGenerator = new RelayMockPayloadGenerator({ variables, mockResolvers, selectionMetadata, mockClientData: options?.mockClientData, + generateDeferredPayload: options?.generateDeferredPayload, }); let operationType; if (node.name.endsWith('Mutation')) { @@ -893,6 +959,7 @@ function generateData( } else { operationType = 'Query'; } + return mockGenerator.generate(node.selections, operationType); } @@ -940,16 +1007,41 @@ function generateDataForOperation( mockResolvers: ?MockResolvers, options: ?{mockClientData?: boolean}, ): GraphQLSingularResponse { - const data = generateData( - operation.request.node.operation, + const concreteOperation = operation.request.node.operation; + const [initialPayload] = generateData( + concreteOperation, + operation.request.variables, + mockResolvers ?? null, + getSelectionMetadataFromOperation(operation), + {...options, generateDeferredPayload: false}, + ); + + return initialPayload; +} + +function generateWithDefer( + operation: OperationDescriptor, + mockResolvers: ?MockResolvers, + options: ?{mockClientData?: boolean, generateDeferredPayload?: boolean}, +): GraphQLResponse { + const {generateDeferredPayload = false, ...otherOptions} = options ?? {}; + const concreteOperation = operation.request.node.operation; + const payloads = generateData( + concreteOperation, operation.request.variables, mockResolvers ?? null, getSelectionMetadataFromOperation(operation), - options, + {...otherOptions, generateDeferredPayload: generateDeferredPayload}, ); - return {data}; + + if (!generateDeferredPayload) { + return payloads[0]; + } + + return payloads; } module.exports = { generate: generateDataForOperation, + generateWithDefer, }; diff --git a/packages/relay-test-utils/RelayModernMockEnvironment.js b/packages/relay-test-utils/RelayModernMockEnvironment.js index 4d7994ff2f1b0..eda5f493a6af9 100644 --- a/packages/relay-test-utils/RelayModernMockEnvironment.js +++ b/packages/relay-test-utils/RelayModernMockEnvironment.js @@ -113,7 +113,7 @@ type MockFunctions = { +complete: (request: ConcreteRequest | OperationDescriptor) => void, +resolve: ( request: ConcreteRequest | OperationDescriptor, - payload: GraphQLSingularResponse, + payload: $ReadOnlyArray | GraphQLSingularResponse, ) => void, +getAllOperations: () => $ReadOnlyArray, +findOperation: ( @@ -400,12 +400,15 @@ function createMockEnvironment( const resolve = ( request: ConcreteRequest | OperationDescriptor, - payload: GraphQLSingularResponse, + response: $ReadOnlyArray | GraphQLSingularResponse, ): void => { getRequests(request).forEach(foundRequest => { const {sink} = foundRequest; invariant(sink !== null, 'Sink should be defined.'); - sink.next(ensureValidPayload(payload)); + const payloads = Array.isArray(response) ? response : [response]; + payloads.forEach(payload => { + sink.next(ensureValidPayload(payload)); + }); sink.complete(); }); }; @@ -478,6 +481,7 @@ function createMockEnvironment( environment.executeMutation, ); + // $FlowFixMe[incompatible-type] if (global?.process?.env?.NODE_ENV === 'test') { // Mock all the functions with their original behavior mockDisposableMethod(environment, 'applyUpdate'); diff --git a/packages/relay-test-utils/RelayResolverTestUtils.js b/packages/relay-test-utils/RelayResolverTestUtils.js index 1161e5b9e3c06..b6b084f19a78b 100644 --- a/packages/relay-test-utils/RelayResolverTestUtils.js +++ b/packages/relay-test-utils/RelayResolverTestUtils.js @@ -38,7 +38,7 @@ function testResolver( resolver: ({$data: D, $fragmentRefs: any, $fragmentSpreads: any}) => Ret, // indexed_access is not yet enabled for this code base. Once it is, this can // become: `Key['$data']` - fragmentData: D, + fragmentData: $Diff, ): Ret { const readFragment = ResolverFragments.readFragment; // $FlowFixMe: a test utility, so... YOLO!! diff --git a/packages/relay-test-utils/__flowtests__/RelayResolverTestUtilsFlowTest.js b/packages/relay-test-utils/__flowtests__/RelayResolverTestUtilsFlowTest.js index c4ac1e14e3a26..eeddfffd8a672 100644 --- a/packages/relay-test-utils/__flowtests__/RelayResolverTestUtilsFlowTest.js +++ b/packages/relay-test-utils/__flowtests__/RelayResolverTestUtilsFlowTest.js @@ -30,14 +30,13 @@ function myTestResolver(rootKey: RelayResolverTestUtilsFlowTest$key): string { return `Hello ${user.name ?? 'stranger'}!`; } -testResolver(myTestResolver, {name: 'Elizabeth', $fragmentType: (null: any)}); +testResolver(myTestResolver, {name: 'Elizabeth'}); // $FlowExpectedError foo is an unexpected key testResolver(myTestResolver, { name: 'Elizabeth', foo: 'bar', - $fragmentType: (null: any), }); // $FlowExpectedError Object is not a string -testResolver(myTestResolver, {name: {}, $fragmentType: (null: any)}); +testResolver(myTestResolver, {name: {}}); diff --git a/packages/relay-test-utils/__tests__/RelayMockEnvironment-test.js b/packages/relay-test-utils/__tests__/RelayMockEnvironment-test.js index 7f70a6de35d01..a319b2b4ed7b4 100644 --- a/packages/relay-test-utils/__tests__/RelayMockEnvironment-test.js +++ b/packages/relay-test-utils/__tests__/RelayMockEnvironment-test.js @@ -11,29 +11,34 @@ 'use strict'; +import type {MockResolvers} from '../RelayMockPayloadGenerator'; +import type {RelayMockEnvironmentTestWithDeferFragment_user$key} from './__generated__/RelayMockEnvironmentTestWithDeferFragment_user.graphql'; + const preloadQuery = require('../../react-relay/relay-hooks/preloadQuery_DEPRECATED'); const RelayEnvironmentProvider = require('../../react-relay/relay-hooks/RelayEnvironmentProvider'); +const useFragment = require('../../react-relay/relay-hooks/useFragment'); +const useLazyLoadQuery = require('../../react-relay/relay-hooks/useLazyLoadQuery'); const usePreloadedQuery = require('../../react-relay/relay-hooks/usePreloadedQuery'); const React = require('react'); const TestRenderer = require('react-test-renderer'); +const {act} = require('react-test-renderer'); const {graphql} = require('relay-runtime'); const { MockPayloadGenerator, createMockEnvironment, } = require('relay-test-utils'); -const query = graphql` - query RelayMockEnvironmentTestQuery($id: ID!) { - node(id: $id) { - id - ... on User { - name +describe('when using queuePendingOperation, queueOperationResolver and preloadQuery in tests', () => { + const query = graphql` + query RelayMockEnvironmentTestQuery($id: ID!) { + node(id: $id) { + id + ... on User { + name + } } } - } -`; - -describe('when using queuePendingOperation, queueOperationResolver and preloadQuery in tests', () => { + `; let prefetched; let mockEnvironment; @@ -130,3 +135,88 @@ describe('when using queuePendingOperation, queueOperationResolver and preloadQu }); }); }); + +describe('when generating multiple payloads for deferred data', () => { + const query = graphql` + query RelayMockEnvironmentTestWithDeferQuery($id: ID!) { + node(id: $id) { + id + ... on User { + ...RelayMockEnvironmentTestWithDeferFragment_user @defer + } + } + } + `; + + const fragment = graphql` + fragment RelayMockEnvironmentTestWithDeferFragment_user on User { + name + } + `; + + const render = () => { + const mockEnvironment = createMockEnvironment(); + const variables = {id: '4'}; + + function Component(props: {}) { + const data = useLazyLoadQuery(query, variables); + return ( + <> + {data.node?.id} + {data.node && } + + ); + } + function DeferredComponent(props: { + user: RelayMockEnvironmentTestWithDeferFragment_user$key, + }) { + const data = useFragment(fragment, props.user); + return data?.name; + } + const renderer = TestRenderer.create( + + + + + , + ); + + const isSuspended = () => renderer.toJSON() === 'Fallback'; + + const generateData = (resolvers: MockResolvers) => { + const operation = mockEnvironment.mock.getMostRecentOperation(); + const mockData = MockPayloadGenerator.generateWithDefer( + operation, + resolvers, + {generateDeferredPayload: true}, + ); + mockEnvironment.mock.resolve(operation, mockData); + + act(() => jest.runAllTimers()); + }; + + return { + generateData, + renderer, + isSuspended, + }; + }; + + it('renders the initial and deferred payloads', () => { + const {renderer, isSuspended, generateData} = render(); + + expect(isSuspended()).toEqual(true); + + generateData({ + ID() { + return '4'; + }, + String() { + return 'Zuck'; + }, + }); + + expect(isSuspended()).toEqual(false); + expect(renderer.toJSON()).toEqual(['4', 'Zuck']); + }); +}); diff --git a/packages/relay-test-utils/__tests__/RelayMockEnvironmentWithComponents-test.js b/packages/relay-test-utils/__tests__/RelayMockEnvironmentWithComponents-test.js index 7acb5b2e72a77..2e0dd4a69197b 100644 --- a/packages/relay-test-utils/__tests__/RelayMockEnvironmentWithComponents-test.js +++ b/packages/relay-test-utils/__tests__/RelayMockEnvironmentWithComponents-test.js @@ -92,6 +92,10 @@ describe('ReactRelayTestMocker with Containers', () => { it('should resolve query', () => { // Should render loading state expect(() => { + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] testComponentTree.root.find(node => node.props.testID === 'loading'); }).not.toThrow(); @@ -108,6 +112,10 @@ describe('ReactRelayTestMocker with Containers', () => { environment.mock.rejectMostRecentOperation(new Error('Uh-oh')); const errorMessage = testComponentTree.root.find( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] node => node.props.testID === 'error', ); // Should render error @@ -121,6 +129,10 @@ describe('ReactRelayTestMocker with Containers', () => { ); const errorMessage = testComponentTree.root.find( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] node => node.props.testID === 'error', ); // Should render error @@ -164,6 +176,9 @@ describe('ReactRelayTestMocker with Containers', () => { } `; const ProfilePicture = createFragmentContainer( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. // $FlowFixMe[missing-local-annot] Error found while enabling LTI on this file props => { return ( @@ -223,6 +238,10 @@ describe('ReactRelayTestMocker with Containers', () => { }), ); const image = testComponentTree.root.find( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] node => node.props.testID === 'profile_picture', ); expect(image.props.src).toBe('http://test.com/image-url'); @@ -370,6 +389,10 @@ describe('ReactRelayTestMocker with Containers', () => { ); }); const list = testComponentTree.root.find( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] node => node.props.testID === 'list', ); expect(list.props.children).toBeInstanceOf(Array); @@ -407,6 +430,10 @@ describe('ReactRelayTestMocker with Containers', () => { ); }); const loadMore = testComponentTree.root.find( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] node => node.props.testID === 'loadMore', ); expect(loadMore.props.disabled).toBe(false); @@ -416,6 +443,10 @@ describe('ReactRelayTestMocker with Containers', () => { // Should show preloader expect(() => { testComponentTree.root.find( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] node => node.props.testID === 'loadingMore', ); }).not.toThrow(); @@ -446,6 +477,10 @@ describe('ReactRelayTestMocker with Containers', () => { ); }); const list = testComponentTree.root.find( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] node => node.props.testID === 'list', ); expect(list.props.children).toBeInstanceOf(Array); @@ -568,11 +603,19 @@ describe('ReactRelayTestMocker with Containers', () => { ); // Make sure we're rendered correct hometown expect( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] testComponentTree.root.find(node => node.props.testID === 'hometown') .children, ).toEqual(['PHL']); const refetch = testComponentTree.root.find( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] node => node.props.testID === 'refetch', ); ReactTestRenderer.act(() => { @@ -580,6 +623,10 @@ describe('ReactRelayTestMocker with Containers', () => { }); // Should load loading state expect(() => { + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] testComponentTree.root.find(node => node.props.testID === 'refetching'); }).not.toThrow(); @@ -604,6 +651,10 @@ describe('ReactRelayTestMocker with Containers', () => { }), ); expect( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] testComponentTree.root.find(node => node.props.testID === 'hometown') .children, ).toEqual(['SFO']); @@ -744,6 +795,10 @@ describe('ReactRelayTestMocker with Containers', () => { it('should resolve mutation', () => { const likeButton = testComponentTree.root.find( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] node => node.props.testID === 'likeButton', ); expect(likeButton.props.disabled).toBe(false); @@ -783,6 +838,10 @@ describe('ReactRelayTestMocker with Containers', () => { it('should reject mutation', () => { const likeButton = testComponentTree.root.find( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] node => node.props.testID === 'likeButton', ); // Should apply optimistic updates @@ -866,6 +925,10 @@ describe('ReactRelayTestMocker with Containers', () => { ); expect( testComponentTree.root.find( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] node => node.props.testID === 'helloMessage', ).children, ).toEqual(['Hello, CAROL!']); @@ -993,6 +1056,10 @@ describe('ReactRelayTestMocker with Containers', () => { }); const reaction = testComponentTree.root.find( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] node => node.props.testID === 'reaction', ); expect(reaction.props.reactionType).toBe('Viewer does not like it'); @@ -1115,10 +1182,18 @@ describe('ReactRelayTestMocker with Containers', () => { }), ); expect( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] testComponentTree.root.find(node => node.props.testID === 'user') .children, ).toEqual(['Alice']); expect( + // In www, this is differently typed (via react-test-renderer.js.flow) than in + // fbsource, so it isn't obvious (without syncing react-test-renderer.js.flow) how + // to get flow to accept this typing. + // $FlowFixMe[missing-local-annot] testComponentTree.root.find(node => node.props.testID === 'page') .children, ).toEqual(['My Page']); diff --git a/packages/relay-test-utils/__tests__/RelayMockPayloadGenerator-test.js b/packages/relay-test-utils/__tests__/RelayMockPayloadGenerator-test.js index 51f976ab55c8c..01f362b5cbbd9 100644 --- a/packages/relay-test-utils/__tests__/RelayMockPayloadGenerator-test.js +++ b/packages/relay-test-utils/__tests__/RelayMockPayloadGenerator-test.js @@ -21,10 +21,11 @@ const {FIXTURE_TAG} = require('relay-test-utils-internal'); function testGeneratedData( query: Query, mockResolvers: ?MockResolvers, - options: ?{mockClientData?: boolean}, + options: ?{mockClientData?: boolean, generateDeferredPayload?: boolean}, + variables: Variables = {}, ): void { - const operation = createOperationDescriptor(query, {}); - const payload = RelayMockPayloadGenerator.generate( + const operation = createOperationDescriptor(query, variables); + const payload = RelayMockPayloadGenerator.generateWithDefer( operation, mockResolvers, options, @@ -1689,3 +1690,268 @@ describe('with @relay_test_operation', () => { }); }); }); + +test('Query with @no_inline fragment spread with literal argument', () => { + const query = graphql` + query RelayMockPayloadGeneratorTest58Query { + node(id: "4") { + ...RelayMockPayloadGeneratorTest_fragment59 @arguments(cond: true) + } + } + `; + graphql` + fragment RelayMockPayloadGeneratorTest_fragment59 on User + @argumentDefinitions(cond: {type: "Boolean", defaultValue: false}) + @no_inline(raw_response_type: true) { + id + name @include(if: $cond) + } + `; + testGeneratedData(query, undefined, { + mockClientData: false, + }); +}); + +test('Query with @no_inline fragment spread with variable argument', () => { + const query = graphql` + query RelayMockPayloadGeneratorTest60Query($cond: Boolean!) { + node(id: "4") { + ...RelayMockPayloadGeneratorTest_fragment61 @arguments(cond: $cond) + } + } + `; + graphql` + fragment RelayMockPayloadGeneratorTest_fragment61 on User + @argumentDefinitions(cond: {type: "Boolean", defaultValue: false}) + @no_inline(raw_response_type: true) { + id + name @include(if: $cond) + } + `; + testGeneratedData( + query, + undefined, + { + mockClientData: false, + }, + { + cond: true, + }, + ); +}); + +test('generate mock for deferred fragments', () => { + graphql` + fragment RelayMockPayloadGeneratorTest61SubFragment on User { + id + name + } + `; + + graphql` + fragment RelayMockPayloadGeneratorTest61Fragment on User { + name + ... on User { + id + friends { + edges { + node { + ...RelayMockPayloadGeneratorTest61SubFragment @defer + } + } + } + } + } + `; + testGeneratedData( + graphql` + query RelayMockPayloadGeneratorTest61Query { + node(id: "my-id") { + id + ...RelayMockPayloadGeneratorTest61Fragment @defer + } + } + `, + { + FriendsConnection() { + return { + edges: Array(5).fill(), + }; + }, + }, + {generateDeferredPayload: true}, + ); +}); + +test('generate mock for deferred fragments with if condition true', () => { + graphql` + fragment RelayMockPayloadGeneratorTest62Fragment on User { + name + } + `; + testGeneratedData( + graphql` + query RelayMockPayloadGeneratorTest62Query { + node(id: "my-id") { + id + ...RelayMockPayloadGeneratorTest62Fragment @defer(if: true) + } + } + `, + null, + {generateDeferredPayload: true}, + ); +}); + +test('generate mock for deferred fragments with if condition false', () => { + graphql` + fragment RelayMockPayloadGeneratorTest63Fragment on User { + name + } + `; + testGeneratedData( + graphql` + query RelayMockPayloadGeneratorTest63Query { + node(id: "my-id") { + id + ...RelayMockPayloadGeneratorTest63Fragment @defer(if: false) + } + } + `, + null, + {generateDeferredPayload: true}, + ); +}); + +test('generate mock for streamed fragments', () => { + graphql` + fragment RelayMockPayloadGeneratorTest64Fragment on User { + id + } + `; + testGeneratedData( + graphql` + query RelayMockPayloadGeneratorTest64Query { + me { + ... on User { + friends(first: 10) + @stream_connection(initial_count: 4, key: "test-64__friends") { + edges { + node { + ...RelayMockPayloadGeneratorTest64Fragment + } + } + } + } + } + } + `, + null, + {generateDeferredPayload: true}, + ); +}); + +test('generate mock for streamed fragments with if condition true', () => { + graphql` + fragment RelayMockPayloadGeneratorTest65Fragment on User { + id + } + `; + testGeneratedData( + graphql` + query RelayMockPayloadGeneratorTest65Query { + me { + ... on User { + friends(first: 10) + @stream_connection( + initial_count: 4 + key: "test-65__friends" + if: true + ) { + edges { + node { + ...RelayMockPayloadGeneratorTest65Fragment + } + } + } + } + } + } + `, + null, + {generateDeferredPayload: true}, + ); +}); + +test('generate mock for streamed fragments with if condition false', () => { + graphql` + fragment RelayMockPayloadGeneratorTest66Fragment on User { + id + } + `; + testGeneratedData( + graphql` + query RelayMockPayloadGeneratorTest66Query { + me { + ... on User { + friends(first: 10) + @stream_connection( + initial_count: 4 + key: "test-66__friends" + if: false + ) { + edges { + node { + ...RelayMockPayloadGeneratorTest66Fragment + } + } + } + } + } + } + `, + null, + {generateDeferredPayload: true}, + ); +}); + +test('should generate data for @match with PlainUserNameRenderer_name and use defaults from mock resolvers', () => { + graphql` + fragment RelayMockPayloadGeneratorTest67Fragment on User { + id + nameRenderer { + ...RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name + @module(name: "PlainUserNameRenderer.react") + } + } + `; + graphql` + fragment RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name on PlainUserNameRenderer { + plaintext + data { + text + } + } + `; + + testGeneratedData( + graphql` + query RelayMockPayloadGeneratorTest67Query @relay_test_operation { + node(id: "my-id") { + ...RelayMockPayloadGeneratorTest67Fragment + } + } + `, + { + UserNameRenderer() { + return { + __typename: 'PlainUserNameRenderer', + __module_operation: require('./__generated__/RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$normalization.graphql'), + data: { + text: 'hello world', + }, + }; + }, + }, + ); +}); diff --git a/packages/relay-test-utils/__tests__/RelayResolverTestUtils-test.js b/packages/relay-test-utils/__tests__/RelayResolverTestUtils-test.js index c7ad07baf1e36..fb80d7c97739e 100644 --- a/packages/relay-test-utils/__tests__/RelayResolverTestUtils-test.js +++ b/packages/relay-test-utils/__tests__/RelayResolverTestUtils-test.js @@ -35,7 +35,6 @@ function myTestResolver(rootKey: RelayResolverTestUtilsTest$key): string { test('testResolver', () => { const input = { name: 'Elizabeth', - $fragmentType: (null: any), }; const actual = testResolver(myTestResolver, input); expect(actual).toBe('Hello Elizabeth!'); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockEnvironmentTestWithDeferFragment_user.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockEnvironmentTestWithDeferFragment_user.graphql.js new file mode 100644 index 0000000000000..01c82a28c8965 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockEnvironmentTestWithDeferFragment_user.graphql.js @@ -0,0 +1,59 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayMockEnvironmentTestWithDeferFragment_user$fragmentType: FragmentType; +export type RelayMockEnvironmentTestWithDeferFragment_user$data = {| + +name: ?string, + +$fragmentType: RelayMockEnvironmentTestWithDeferFragment_user$fragmentType, +|}; +export type RelayMockEnvironmentTestWithDeferFragment_user$key = { + +$data?: RelayMockEnvironmentTestWithDeferFragment_user$data, + +$fragmentSpreads: RelayMockEnvironmentTestWithDeferFragment_user$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockEnvironmentTestWithDeferFragment_user", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "8dc1299ed092ea4af23f5e39ab2f345a"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayMockEnvironmentTestWithDeferFragment_user$fragmentType, + RelayMockEnvironmentTestWithDeferFragment_user$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockEnvironmentTestWithDeferQuery.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockEnvironmentTestWithDeferQuery.graphql.js new file mode 100644 index 0000000000000..0f847d2674021 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockEnvironmentTestWithDeferQuery.graphql.js @@ -0,0 +1,165 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<6d6f8ed199277b8878602e183463f5f0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RelayMockEnvironmentTestWithDeferFragment_user$fragmentType } from "./RelayMockEnvironmentTestWithDeferFragment_user.graphql"; +export type RelayMockEnvironmentTestWithDeferQuery$variables = {| + id: string, +|}; +export type RelayMockEnvironmentTestWithDeferQuery$data = {| + +node: ?{| + +id: string, + +$fragmentSpreads: RelayMockEnvironmentTestWithDeferFragment_user$fragmentType, + |}, +|}; +export type RelayMockEnvironmentTestWithDeferQuery = {| + response: RelayMockEnvironmentTestWithDeferQuery$data, + variables: RelayMockEnvironmentTestWithDeferQuery$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "id" + } +], +v1 = [ + { + "kind": "Variable", + "name": "id", + "variableName": "id" + } +], +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "RelayMockEnvironmentTestWithDeferQuery", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v2/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + { + "kind": "Defer", + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayMockEnvironmentTestWithDeferFragment_user" + } + ] + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayMockEnvironmentTestWithDeferQuery", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + (v2/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + { + "if": null, + "kind": "Defer", + "label": "RelayMockEnvironmentTestWithDeferQuery$defer$RelayMockEnvironmentTestWithDeferFragment_user", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "5f15611723acb35ba3ae949224bdb31a", + "id": null, + "metadata": {}, + "name": "RelayMockEnvironmentTestWithDeferQuery", + "operationKind": "query", + "text": "query RelayMockEnvironmentTestWithDeferQuery(\n $id: ID!\n) {\n node(id: $id) {\n __typename\n id\n ... on User {\n ...RelayMockEnvironmentTestWithDeferFragment_user @defer(label: \"RelayMockEnvironmentTestWithDeferQuery$defer$RelayMockEnvironmentTestWithDeferFragment_user\")\n }\n }\n}\n\nfragment RelayMockEnvironmentTestWithDeferFragment_user on User {\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "b81417cb4d8ea298c71a3c957694cc4e"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayMockEnvironmentTestWithDeferQuery$variables, + RelayMockEnvironmentTestWithDeferQuery$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest31Fragment.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest31Fragment.graphql.js index 4fdabc724f775..935f3d139ca3d 100644 --- a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest31Fragment.graphql.js +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest31Fragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<31cf30ca598b2e861f4bf03d02f5b91d>> + * @generated SignedSource<<921f4e90cb9a6c06cf4372d5a44facfc>> * @flow * @lightSyntaxTransform * @nogrep @@ -59,10 +59,7 @@ var node/*: ReaderFragment*/ = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -99,7 +96,7 @@ var node/*: ReaderFragment*/ = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "type": "User", diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest32Fragment.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest32Fragment.graphql.js index e20cbb58f96fd..bd1fb39710d35 100644 --- a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest32Fragment.graphql.js +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest32Fragment.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<> + * @generated SignedSource<<7393f3d9c615092dcb869ca43ab236e2>> * @flow * @lightSyntaxTransform * @nogrep @@ -59,10 +59,7 @@ var node/*: ReaderFragment*/ = { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -99,7 +96,7 @@ var node/*: ReaderFragment*/ = { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "type": "User", diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest45Query.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest45Query.graphql.js index 05a1002a14440..9527acc769714 100644 --- a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest45Query.graphql.js +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest45Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<57a273dda5f8dd12a0900c539d229e92>> + * @generated SignedSource<<6f05906ea903dfc7ef5634d397123f57>> * @flow * @lightSyntaxTransform * @nogrep @@ -124,10 +124,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -165,7 +162,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "type": "User", diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest46Query.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest46Query.graphql.js index 1fd86787298b0..4768ecf70f609 100644 --- a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest46Query.graphql.js +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest46Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<6a0086abc9a3a76bf312b25eef497c8a>> + * @generated SignedSource<<50ffc09355fbc07ab26d9a062474b9da>> * @flow * @lightSyntaxTransform * @nogrep @@ -124,10 +124,7 @@ return { { "kind": "Literal", "name": "supported", - "value": [ - "PlainUserNameRenderer", - "MarkdownUserNameRenderer" - ] + "value": "34hjiS" } ], "concreteType": null, @@ -165,7 +162,7 @@ return { "abstractKey": null } ], - "storageKey": "nameRenderer(supported:[\"PlainUserNameRenderer\",\"MarkdownUserNameRenderer\"])" + "storageKey": "nameRenderer(supported:\"34hjiS\")" } ], "type": "User", diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest57Query.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest57Query.graphql.js index b1f514bc407f9..f14302dd16dbd 100644 --- a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest57Query.graphql.js +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest57Query.graphql.js @@ -6,7 +6,7 @@ * * @oncall relay * - * @generated SignedSource<<7d6db75848959f5880f1e508c5d0255e>> + * @generated SignedSource<> * @flow * @lightSyntaxTransform * @nogrep @@ -24,11 +24,11 @@ import {name_passthrough as userNamePassthroughResolverType} from "../../../rela // A type error here indicates that the type signature of the resolver module is incorrect. (userNamePassthroughResolverType: ( rootKey: UserNamePassthroughResolver$key, -) => mixed); +) => ?string); export type RelayMockPayloadGeneratorTest57Query$variables = {||}; export type RelayMockPayloadGeneratorTest57Query$data = {| +me: ?{| - +name_passthrough: ?$Call<((...empty[]) => R) => R, typeof userNamePassthroughResolverType>, + +name_passthrough: ?string, |}, |}; export type RelayMockPayloadGeneratorTest57Query = {| @@ -105,7 +105,7 @@ var node/*: ConcreteRequest*/ = { }, "kind": "RelayResolver", "storageKey": null, - "isOutputType": false + "isOutputType": true }, { "alias": null, diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest58Query.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest58Query.graphql.js new file mode 100644 index 0000000000000..e6dcfce994bd7 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest58Query.graphql.js @@ -0,0 +1,137 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<5be38ced0f7b5fc865350942614026f0>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RelayMockPayloadGeneratorTest_fragment59$fragmentType } from "./RelayMockPayloadGeneratorTest_fragment59.graphql"; +export type RelayMockPayloadGeneratorTest58Query$variables = {||}; +export type RelayMockPayloadGeneratorTest58Query$data = {| + +node: ?{| + +$fragmentSpreads: RelayMockPayloadGeneratorTest_fragment59$fragmentType, + |}, +|}; +export type RelayMockPayloadGeneratorTest58Query = {| + response: RelayMockPayloadGeneratorTest58Query$data, + variables: RelayMockPayloadGeneratorTest58Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "kind": "Literal", + "name": "id", + "value": "4" + } +]; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest58Query", + "selections": [ + { + "alias": null, + "args": (v0/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": [ + { + "kind": "Literal", + "name": "cond", + "value": true + } + ], + "kind": "FragmentSpread", + "name": "RelayMockPayloadGeneratorTest_fragment59" + } + ], + "storageKey": "node(id:\"4\")" + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayMockPayloadGeneratorTest58Query", + "selections": [ + { + "alias": null, + "args": (v0/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "args": [ + { + "kind": "Literal", + "name": "RelayMockPayloadGeneratorTest_fragment59$cond", + "value": true + } + ], + "fragment": require('./RelayMockPayloadGeneratorTest_fragment59$normalization.graphql'), + "kind": "FragmentSpread" + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": "node(id:\"4\")" + } + ] + }, + "params": { + "cacheID": "5a6272973d0967c473047a457505a252", + "id": null, + "metadata": {}, + "name": "RelayMockPayloadGeneratorTest58Query", + "operationKind": "query", + "text": "query RelayMockPayloadGeneratorTest58Query {\n node(id: \"4\") {\n __typename\n ...RelayMockPayloadGeneratorTest_fragment59_22eGLd\n id\n }\n}\n\nfragment RelayMockPayloadGeneratorTest_fragment59_22eGLd on User {\n id\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "7207fa27437b4334d2950de9e7043322"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayMockPayloadGeneratorTest58Query$variables, + RelayMockPayloadGeneratorTest58Query$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest60Query.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest60Query.graphql.js new file mode 100644 index 0000000000000..cf94aeca80f08 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest60Query.graphql.js @@ -0,0 +1,146 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RelayMockPayloadGeneratorTest_fragment61$fragmentType } from "./RelayMockPayloadGeneratorTest_fragment61.graphql"; +export type RelayMockPayloadGeneratorTest60Query$variables = {| + cond: boolean, +|}; +export type RelayMockPayloadGeneratorTest60Query$data = {| + +node: ?{| + +$fragmentSpreads: RelayMockPayloadGeneratorTest_fragment61$fragmentType, + |}, +|}; +export type RelayMockPayloadGeneratorTest60Query = {| + response: RelayMockPayloadGeneratorTest60Query$data, + variables: RelayMockPayloadGeneratorTest60Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "defaultValue": null, + "kind": "LocalArgument", + "name": "cond" + } +], +v1 = [ + { + "kind": "Literal", + "name": "id", + "value": "4" + } +]; +return { + "fragment": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest60Query", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": [ + { + "kind": "Variable", + "name": "cond", + "variableName": "cond" + } + ], + "kind": "FragmentSpread", + "name": "RelayMockPayloadGeneratorTest_fragment61" + } + ], + "storageKey": "node(id:\"4\")" + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": (v0/*: any*/), + "kind": "Operation", + "name": "RelayMockPayloadGeneratorTest60Query", + "selections": [ + { + "alias": null, + "args": (v1/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + { + "args": [ + { + "kind": "Variable", + "name": "RelayMockPayloadGeneratorTest_fragment61$cond", + "variableName": "cond" + } + ], + "fragment": require('./RelayMockPayloadGeneratorTest_fragment61$normalization.graphql'), + "kind": "FragmentSpread" + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": "node(id:\"4\")" + } + ] + }, + "params": { + "cacheID": "fea43ee9ba842a5f60a79b84f0bcb503", + "id": null, + "metadata": {}, + "name": "RelayMockPayloadGeneratorTest60Query", + "operationKind": "query", + "text": "query RelayMockPayloadGeneratorTest60Query(\n $cond: Boolean!\n) {\n node(id: \"4\") {\n __typename\n ...RelayMockPayloadGeneratorTest_fragment61_yuQoQ\n id\n }\n}\n\nfragment RelayMockPayloadGeneratorTest_fragment61_yuQoQ on User {\n id\n name @include(if: $cond)\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "3deee5972aa6d5f988a035cc01194d24"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayMockPayloadGeneratorTest60Query$variables, + RelayMockPayloadGeneratorTest60Query$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest61Fragment.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest61Fragment.graphql.js new file mode 100644 index 0000000000000..b6088284842ef --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest61Fragment.graphql.js @@ -0,0 +1,118 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<2950a4af508db05f20089599ee9bf66a>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { RelayMockPayloadGeneratorTest61SubFragment$fragmentType } from "./RelayMockPayloadGeneratorTest61SubFragment.graphql"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayMockPayloadGeneratorTest61Fragment$fragmentType: FragmentType; +export type RelayMockPayloadGeneratorTest61Fragment$data = {| + +friends: ?{| + +edges: ?$ReadOnlyArray, + |}, + +id: string, + +name: ?string, + +$fragmentType: RelayMockPayloadGeneratorTest61Fragment$fragmentType, +|}; +export type RelayMockPayloadGeneratorTest61Fragment$key = { + +$data?: RelayMockPayloadGeneratorTest61Fragment$data, + +$fragmentSpreads: RelayMockPayloadGeneratorTest61Fragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest61Fragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "friends", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "kind": "Defer", + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayMockPayloadGeneratorTest61SubFragment" + } + ] + } + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "853420f56943a6c9654b9c942463c857"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayMockPayloadGeneratorTest61Fragment$fragmentType, + RelayMockPayloadGeneratorTest61Fragment$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest61Query.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest61Query.graphql.js new file mode 100644 index 0000000000000..16298de8d3430 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest61Query.graphql.js @@ -0,0 +1,194 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RelayMockPayloadGeneratorTest61Fragment$fragmentType } from "./RelayMockPayloadGeneratorTest61Fragment.graphql"; +export type RelayMockPayloadGeneratorTest61Query$variables = {||}; +export type RelayMockPayloadGeneratorTest61Query$data = {| + +node: ?{| + +id: string, + +$fragmentSpreads: RelayMockPayloadGeneratorTest61Fragment$fragmentType, + |}, +|}; +export type RelayMockPayloadGeneratorTest61Query = {| + response: RelayMockPayloadGeneratorTest61Query$data, + variables: RelayMockPayloadGeneratorTest61Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "kind": "Literal", + "name": "id", + "value": "my-id" + } +], +v1 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}, +v2 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest61Query", + "selections": [ + { + "alias": null, + "args": (v0/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v1/*: any*/), + { + "kind": "Defer", + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayMockPayloadGeneratorTest61Fragment" + } + ] + } + ], + "storageKey": "node(id:\"my-id\")" + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayMockPayloadGeneratorTest61Query", + "selections": [ + { + "alias": null, + "args": (v0/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + (v1/*: any*/), + { + "if": null, + "kind": "Defer", + "label": "RelayMockPayloadGeneratorTest61Query$defer$RelayMockPayloadGeneratorTest61Fragment", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + (v2/*: any*/), + (v1/*: any*/), + { + "alias": null, + "args": null, + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "friends", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "if": null, + "kind": "Defer", + "label": "RelayMockPayloadGeneratorTest61Fragment$defer$RelayMockPayloadGeneratorTest61SubFragment", + "selections": [ + (v1/*: any*/), + (v2/*: any*/) + ] + }, + (v1/*: any*/) + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "storageKey": "node(id:\"my-id\")" + } + ] + }, + "params": { + "cacheID": "aafc055b85b18a9924f0cb18b78d581d", + "id": null, + "metadata": {}, + "name": "RelayMockPayloadGeneratorTest61Query", + "operationKind": "query", + "text": "query RelayMockPayloadGeneratorTest61Query {\n node(id: \"my-id\") {\n __typename\n id\n ...RelayMockPayloadGeneratorTest61Fragment @defer(label: \"RelayMockPayloadGeneratorTest61Query$defer$RelayMockPayloadGeneratorTest61Fragment\")\n }\n}\n\nfragment RelayMockPayloadGeneratorTest61Fragment on User {\n name\n id\n friends {\n edges {\n node {\n ...RelayMockPayloadGeneratorTest61SubFragment @defer(label: \"RelayMockPayloadGeneratorTest61Fragment$defer$RelayMockPayloadGeneratorTest61SubFragment\")\n id\n }\n }\n }\n}\n\nfragment RelayMockPayloadGeneratorTest61SubFragment on User {\n id\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "2f6426e08b7bf1bc520a2e62d89805ff"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayMockPayloadGeneratorTest61Query$variables, + RelayMockPayloadGeneratorTest61Query$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest61SubFragment.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest61SubFragment.graphql.js new file mode 100644 index 0000000000000..89d9e446234bd --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest61SubFragment.graphql.js @@ -0,0 +1,67 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<3b49c6ad600457731a71a9f8cb5dd0c2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayMockPayloadGeneratorTest61SubFragment$fragmentType: FragmentType; +export type RelayMockPayloadGeneratorTest61SubFragment$data = {| + +id: string, + +name: ?string, + +$fragmentType: RelayMockPayloadGeneratorTest61SubFragment$fragmentType, +|}; +export type RelayMockPayloadGeneratorTest61SubFragment$key = { + +$data?: RelayMockPayloadGeneratorTest61SubFragment$data, + +$fragmentSpreads: RelayMockPayloadGeneratorTest61SubFragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest61SubFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "3c22efb98076b78b92f80b216c9ebe16"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayMockPayloadGeneratorTest61SubFragment$fragmentType, + RelayMockPayloadGeneratorTest61SubFragment$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest62Fragment.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest62Fragment.graphql.js new file mode 100644 index 0000000000000..581f5927247a1 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest62Fragment.graphql.js @@ -0,0 +1,59 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<25752eb32ba8c5af150b9a9dd4fb6e02>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayMockPayloadGeneratorTest62Fragment$fragmentType: FragmentType; +export type RelayMockPayloadGeneratorTest62Fragment$data = {| + +name: ?string, + +$fragmentType: RelayMockPayloadGeneratorTest62Fragment$fragmentType, +|}; +export type RelayMockPayloadGeneratorTest62Fragment$key = { + +$data?: RelayMockPayloadGeneratorTest62Fragment$data, + +$fragmentSpreads: RelayMockPayloadGeneratorTest62Fragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest62Fragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "87cd4b83c43a1d7b629878f7ecdec518"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayMockPayloadGeneratorTest62Fragment$fragmentType, + RelayMockPayloadGeneratorTest62Fragment$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest62Query.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest62Query.graphql.js new file mode 100644 index 0000000000000..925596d761aa5 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest62Query.graphql.js @@ -0,0 +1,149 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<2a20a6b2373df9fd85348e0e9aa406c5>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RelayMockPayloadGeneratorTest62Fragment$fragmentType } from "./RelayMockPayloadGeneratorTest62Fragment.graphql"; +export type RelayMockPayloadGeneratorTest62Query$variables = {||}; +export type RelayMockPayloadGeneratorTest62Query$data = {| + +node: ?{| + +id: string, + +$fragmentSpreads: RelayMockPayloadGeneratorTest62Fragment$fragmentType, + |}, +|}; +export type RelayMockPayloadGeneratorTest62Query = {| + response: RelayMockPayloadGeneratorTest62Query$data, + variables: RelayMockPayloadGeneratorTest62Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "kind": "Literal", + "name": "id", + "value": "my-id" + } +], +v1 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest62Query", + "selections": [ + { + "alias": null, + "args": (v0/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v1/*: any*/), + { + "kind": "Defer", + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayMockPayloadGeneratorTest62Fragment" + } + ] + } + ], + "storageKey": "node(id:\"my-id\")" + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayMockPayloadGeneratorTest62Query", + "selections": [ + { + "alias": null, + "args": (v0/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + (v1/*: any*/), + { + "if": null, + "kind": "Defer", + "label": "RelayMockPayloadGeneratorTest62Query$defer$RelayMockPayloadGeneratorTest62Fragment", + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + } + ] + } + ], + "storageKey": "node(id:\"my-id\")" + } + ] + }, + "params": { + "cacheID": "555b245d7d41b5e70f0378297709a7ce", + "id": null, + "metadata": {}, + "name": "RelayMockPayloadGeneratorTest62Query", + "operationKind": "query", + "text": "query RelayMockPayloadGeneratorTest62Query {\n node(id: \"my-id\") {\n __typename\n id\n ...RelayMockPayloadGeneratorTest62Fragment @defer(label: \"RelayMockPayloadGeneratorTest62Query$defer$RelayMockPayloadGeneratorTest62Fragment\", if: true)\n }\n}\n\nfragment RelayMockPayloadGeneratorTest62Fragment on User {\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "b01069bf80c36d8dae8d719a55af39a7"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayMockPayloadGeneratorTest62Query$variables, + RelayMockPayloadGeneratorTest62Query$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest63Fragment.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest63Fragment.graphql.js new file mode 100644 index 0000000000000..aafc8d2fe69bd --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest63Fragment.graphql.js @@ -0,0 +1,59 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<0c5d1a69aacaec5bf6f0aa6052fb2bc2>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayMockPayloadGeneratorTest63Fragment$fragmentType: FragmentType; +export type RelayMockPayloadGeneratorTest63Fragment$data = {| + +name: ?string, + +$fragmentType: RelayMockPayloadGeneratorTest63Fragment$fragmentType, +|}; +export type RelayMockPayloadGeneratorTest63Fragment$key = { + +$data?: RelayMockPayloadGeneratorTest63Fragment$data, + +$fragmentSpreads: RelayMockPayloadGeneratorTest63Fragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest63Fragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "d578188aec90e32d00b5f17443580737"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayMockPayloadGeneratorTest63Fragment$fragmentType, + RelayMockPayloadGeneratorTest63Fragment$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest63Query.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest63Query.graphql.js new file mode 100644 index 0000000000000..80a91becddc5c --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest63Query.graphql.js @@ -0,0 +1,137 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<5aff17f172699964d5438d70948bd4f7>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RelayMockPayloadGeneratorTest63Fragment$fragmentType } from "./RelayMockPayloadGeneratorTest63Fragment.graphql"; +export type RelayMockPayloadGeneratorTest63Query$variables = {||}; +export type RelayMockPayloadGeneratorTest63Query$data = {| + +node: ?{| + +id: string, + +$fragmentSpreads: RelayMockPayloadGeneratorTest63Fragment$fragmentType, + |}, +|}; +export type RelayMockPayloadGeneratorTest63Query = {| + response: RelayMockPayloadGeneratorTest63Query$data, + variables: RelayMockPayloadGeneratorTest63Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "kind": "Literal", + "name": "id", + "value": "my-id" + } +], +v1 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest63Query", + "selections": [ + { + "alias": null, + "args": (v0/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v1/*: any*/), + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayMockPayloadGeneratorTest63Fragment" + } + ], + "storageKey": "node(id:\"my-id\")" + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayMockPayloadGeneratorTest63Query", + "selections": [ + { + "alias": null, + "args": (v0/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null + }, + (v1/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": "node(id:\"my-id\")" + } + ] + }, + "params": { + "cacheID": "6e1f88b885f100be4bd640bd33bf071c", + "id": null, + "metadata": {}, + "name": "RelayMockPayloadGeneratorTest63Query", + "operationKind": "query", + "text": "query RelayMockPayloadGeneratorTest63Query {\n node(id: \"my-id\") {\n __typename\n id\n ...RelayMockPayloadGeneratorTest63Fragment\n }\n}\n\nfragment RelayMockPayloadGeneratorTest63Fragment on User {\n name\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "5407f8130e4f883d6d9f133941b5f0dd"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayMockPayloadGeneratorTest63Query$variables, + RelayMockPayloadGeneratorTest63Query$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest64Fragment.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest64Fragment.graphql.js new file mode 100644 index 0000000000000..86a025d75048d --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest64Fragment.graphql.js @@ -0,0 +1,59 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<80259745704bf22d349ae4e6e55bc886>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayMockPayloadGeneratorTest64Fragment$fragmentType: FragmentType; +export type RelayMockPayloadGeneratorTest64Fragment$data = {| + +id: string, + +$fragmentType: RelayMockPayloadGeneratorTest64Fragment$fragmentType, +|}; +export type RelayMockPayloadGeneratorTest64Fragment$key = { + +$data?: RelayMockPayloadGeneratorTest64Fragment$data, + +$fragmentSpreads: RelayMockPayloadGeneratorTest64Fragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest64Fragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "9ba464667e581d8c4e01b05d763cdc0c"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayMockPayloadGeneratorTest64Fragment$fragmentType, + RelayMockPayloadGeneratorTest64Fragment$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest64Query.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest64Query.graphql.js new file mode 100644 index 0000000000000..3244ce84021c9 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest64Query.graphql.js @@ -0,0 +1,276 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<701a48a755da5a935358ed06500c9b86>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RelayMockPayloadGeneratorTest64Fragment$fragmentType } from "./RelayMockPayloadGeneratorTest64Fragment.graphql"; +export type RelayMockPayloadGeneratorTest64Query$variables = {||}; +export type RelayMockPayloadGeneratorTest64Query$data = {| + +me: ?{| + +friends: ?{| + +edges: ?$ReadOnlyArray, + |}, + |}, +|}; +export type RelayMockPayloadGeneratorTest64Query = {| + response: RelayMockPayloadGeneratorTest64Query$data, + variables: RelayMockPayloadGeneratorTest64Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}, +v1 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "cursor", + "storageKey": null +}, +v2 = [ + { + "alias": null, + "args": null, + "concreteType": "PageInfo", + "kind": "LinkedField", + "name": "pageInfo", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "endCursor", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "hasNextPage", + "storageKey": null + } + ], + "storageKey": null + } +], +v3 = [ + { + "kind": "Literal", + "name": "first", + "value": 10 + } +], +v4 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest64Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": "friends", + "args": null, + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "__test-64__friends_connection", + "plural": false, + "selections": [ + { + "kind": "Stream", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayMockPayloadGeneratorTest64Fragment" + }, + (v0/*: any*/) + ], + "storageKey": null + }, + (v1/*: any*/) + ], + "storageKey": null + } + ] + }, + { + "kind": "Defer", + "selections": (v2/*: any*/) + } + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayMockPayloadGeneratorTest64Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": (v3/*: any*/), + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "friends", + "plural": false, + "selections": [ + { + "if": null, + "kind": "Stream", + "label": "RelayMockPayloadGeneratorTest64Query$stream$test-64__friends", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v4/*: any*/), + (v0/*: any*/) + ], + "storageKey": null + }, + (v1/*: any*/) + ], + "storageKey": null + } + ] + }, + { + "if": null, + "kind": "Defer", + "label": "RelayMockPayloadGeneratorTest64Query$defer$test-64__friends$pageInfo", + "selections": (v2/*: any*/) + } + ], + "storageKey": "friends(first:10)" + }, + { + "alias": null, + "args": (v3/*: any*/), + "filters": null, + "handle": "connection", + "key": "test-64__friends", + "kind": "LinkedHandle", + "name": "friends" + }, + (v4/*: any*/) + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "eabbf98536e253e43e87fba3960e5d6b", + "id": null, + "metadata": { + "connection": [ + { + "count": null, + "cursor": null, + "direction": "forward", + "path": [ + "me", + "friends" + ], + "stream": true + } + ] + }, + "name": "RelayMockPayloadGeneratorTest64Query", + "operationKind": "query", + "text": "query RelayMockPayloadGeneratorTest64Query {\n me {\n friends(first: 10) {\n edges @stream(label: \"RelayMockPayloadGeneratorTest64Query$stream$test-64__friends\", initial_count: 4) {\n node {\n ...RelayMockPayloadGeneratorTest64Fragment\n id\n __typename\n }\n cursor\n }\n ... @defer(label: \"RelayMockPayloadGeneratorTest64Query$defer$test-64__friends$pageInfo\") {\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n id\n }\n}\n\nfragment RelayMockPayloadGeneratorTest64Fragment on User {\n id\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "d6bbd2f2c4ce30f659a885858e9729f4"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayMockPayloadGeneratorTest64Query$variables, + RelayMockPayloadGeneratorTest64Query$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest65Fragment.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest65Fragment.graphql.js new file mode 100644 index 0000000000000..24e795eeb99c0 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest65Fragment.graphql.js @@ -0,0 +1,59 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<84e8920f16ea348e5b84ef7d02c76afb>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayMockPayloadGeneratorTest65Fragment$fragmentType: FragmentType; +export type RelayMockPayloadGeneratorTest65Fragment$data = {| + +id: string, + +$fragmentType: RelayMockPayloadGeneratorTest65Fragment$fragmentType, +|}; +export type RelayMockPayloadGeneratorTest65Fragment$key = { + +$data?: RelayMockPayloadGeneratorTest65Fragment$data, + +$fragmentSpreads: RelayMockPayloadGeneratorTest65Fragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest65Fragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "3831685a691916b28305c9d55694bb7d"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayMockPayloadGeneratorTest65Fragment$fragmentType, + RelayMockPayloadGeneratorTest65Fragment$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest65Query.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest65Query.graphql.js new file mode 100644 index 0000000000000..5a546d980b927 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest65Query.graphql.js @@ -0,0 +1,276 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<080a1211044f9ff3052b965393cde290>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RelayMockPayloadGeneratorTest65Fragment$fragmentType } from "./RelayMockPayloadGeneratorTest65Fragment.graphql"; +export type RelayMockPayloadGeneratorTest65Query$variables = {||}; +export type RelayMockPayloadGeneratorTest65Query$data = {| + +me: ?{| + +friends: ?{| + +edges: ?$ReadOnlyArray, + |}, + |}, +|}; +export type RelayMockPayloadGeneratorTest65Query = {| + response: RelayMockPayloadGeneratorTest65Query$data, + variables: RelayMockPayloadGeneratorTest65Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}, +v1 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "cursor", + "storageKey": null +}, +v2 = [ + { + "alias": null, + "args": null, + "concreteType": "PageInfo", + "kind": "LinkedField", + "name": "pageInfo", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "endCursor", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "hasNextPage", + "storageKey": null + } + ], + "storageKey": null + } +], +v3 = [ + { + "kind": "Literal", + "name": "first", + "value": 10 + } +], +v4 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest65Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": "friends", + "args": null, + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "__test-65__friends_connection", + "plural": false, + "selections": [ + { + "kind": "Stream", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayMockPayloadGeneratorTest65Fragment" + }, + (v0/*: any*/) + ], + "storageKey": null + }, + (v1/*: any*/) + ], + "storageKey": null + } + ] + }, + { + "kind": "Defer", + "selections": (v2/*: any*/) + } + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayMockPayloadGeneratorTest65Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": (v3/*: any*/), + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "friends", + "plural": false, + "selections": [ + { + "if": null, + "kind": "Stream", + "label": "RelayMockPayloadGeneratorTest65Query$stream$test-65__friends", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v4/*: any*/), + (v0/*: any*/) + ], + "storageKey": null + }, + (v1/*: any*/) + ], + "storageKey": null + } + ] + }, + { + "if": null, + "kind": "Defer", + "label": "RelayMockPayloadGeneratorTest65Query$defer$test-65__friends$pageInfo", + "selections": (v2/*: any*/) + } + ], + "storageKey": "friends(first:10)" + }, + { + "alias": null, + "args": (v3/*: any*/), + "filters": null, + "handle": "connection", + "key": "test-65__friends", + "kind": "LinkedHandle", + "name": "friends" + }, + (v4/*: any*/) + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "8b0d7d696dc7e2ee0da0a59a77edee4d", + "id": null, + "metadata": { + "connection": [ + { + "count": null, + "cursor": null, + "direction": "forward", + "path": [ + "me", + "friends" + ], + "stream": true + } + ] + }, + "name": "RelayMockPayloadGeneratorTest65Query", + "operationKind": "query", + "text": "query RelayMockPayloadGeneratorTest65Query {\n me {\n friends(first: 10) {\n edges @stream(label: \"RelayMockPayloadGeneratorTest65Query$stream$test-65__friends\", if: true, initial_count: 4) {\n node {\n ...RelayMockPayloadGeneratorTest65Fragment\n id\n __typename\n }\n cursor\n }\n ... @defer(label: \"RelayMockPayloadGeneratorTest65Query$defer$test-65__friends$pageInfo\", if: true) {\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n id\n }\n}\n\nfragment RelayMockPayloadGeneratorTest65Fragment on User {\n id\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "3fb480ea57361c868a4a0f0cfbc91aab"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayMockPayloadGeneratorTest65Query$variables, + RelayMockPayloadGeneratorTest65Query$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest66Fragment.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest66Fragment.graphql.js new file mode 100644 index 0000000000000..e7c0d56e13e95 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest66Fragment.graphql.js @@ -0,0 +1,59 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayMockPayloadGeneratorTest66Fragment$fragmentType: FragmentType; +export type RelayMockPayloadGeneratorTest66Fragment$data = {| + +id: string, + +$fragmentType: RelayMockPayloadGeneratorTest66Fragment$fragmentType, +|}; +export type RelayMockPayloadGeneratorTest66Fragment$key = { + +$data?: RelayMockPayloadGeneratorTest66Fragment$data, + +$fragmentSpreads: RelayMockPayloadGeneratorTest66Fragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest66Fragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "c46b0829f0de24d28fd4df2f2472280f"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayMockPayloadGeneratorTest66Fragment$fragmentType, + RelayMockPayloadGeneratorTest66Fragment$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest66Query.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest66Query.graphql.js new file mode 100644 index 0000000000000..22e050c73505d --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest66Query.graphql.js @@ -0,0 +1,254 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RelayMockPayloadGeneratorTest66Fragment$fragmentType } from "./RelayMockPayloadGeneratorTest66Fragment.graphql"; +export type RelayMockPayloadGeneratorTest66Query$variables = {||}; +export type RelayMockPayloadGeneratorTest66Query$data = {| + +me: ?{| + +friends: ?{| + +edges: ?$ReadOnlyArray, + |}, + |}, +|}; +export type RelayMockPayloadGeneratorTest66Query = {| + response: RelayMockPayloadGeneratorTest66Query$data, + variables: RelayMockPayloadGeneratorTest66Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}, +v1 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "cursor", + "storageKey": null +}, +v2 = { + "alias": null, + "args": null, + "concreteType": "PageInfo", + "kind": "LinkedField", + "name": "pageInfo", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "endCursor", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "hasNextPage", + "storageKey": null + } + ], + "storageKey": null +}, +v3 = [ + { + "kind": "Literal", + "name": "first", + "value": 10 + } +], +v4 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest66Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": "friends", + "args": null, + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "__test-66__friends_connection", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayMockPayloadGeneratorTest66Fragment" + }, + (v0/*: any*/) + ], + "storageKey": null + }, + (v1/*: any*/) + ], + "storageKey": null + }, + (v2/*: any*/) + ], + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayMockPayloadGeneratorTest66Query", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "me", + "plural": false, + "selections": [ + { + "alias": null, + "args": (v3/*: any*/), + "concreteType": "FriendsConnection", + "kind": "LinkedField", + "name": "friends", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "FriendsEdge", + "kind": "LinkedField", + "name": "edges", + "plural": true, + "selections": [ + { + "alias": null, + "args": null, + "concreteType": "User", + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v4/*: any*/), + (v0/*: any*/) + ], + "storageKey": null + }, + (v1/*: any*/) + ], + "storageKey": null + }, + (v2/*: any*/) + ], + "storageKey": "friends(first:10)" + }, + { + "alias": null, + "args": (v3/*: any*/), + "filters": null, + "handle": "connection", + "key": "test-66__friends", + "kind": "LinkedHandle", + "name": "friends" + }, + (v4/*: any*/) + ], + "storageKey": null + } + ] + }, + "params": { + "cacheID": "ded78218afe15784db44e997d84a41f7", + "id": null, + "metadata": { + "connection": [ + { + "count": null, + "cursor": null, + "direction": "forward", + "path": [ + "me", + "friends" + ], + "stream": true + } + ] + }, + "name": "RelayMockPayloadGeneratorTest66Query", + "operationKind": "query", + "text": "query RelayMockPayloadGeneratorTest66Query {\n me {\n friends(first: 10) {\n edges {\n node {\n ...RelayMockPayloadGeneratorTest66Fragment\n id\n __typename\n }\n cursor\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n id\n }\n}\n\nfragment RelayMockPayloadGeneratorTest66Fragment on User {\n id\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "b92d71278394a4845d13fc6206966c02"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayMockPayloadGeneratorTest66Query$variables, + RelayMockPayloadGeneratorTest66Query$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest67Fragment.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest67Fragment.graphql.js new file mode 100644 index 0000000000000..10c7457933361 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest67Fragment.graphql.js @@ -0,0 +1,92 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +// @dataDrivenDependency RelayMockPayloadGeneratorTest67Fragment.nameRenderer {"branches":{"PlainUserNameRenderer":{"component":"PlainUserNameRenderer.react","fragment":"RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$normalization.graphql"}},"plural":false} + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$fragmentType } from "./RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name.graphql"; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayMockPayloadGeneratorTest67Fragment$fragmentType: FragmentType; +export type RelayMockPayloadGeneratorTest67Fragment$data = {| + +id: string, + +nameRenderer: ?{| + +__fragmentPropName?: ?string, + +__module_component?: ?string, + +$fragmentSpreads: RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$fragmentType, + |}, + +$fragmentType: RelayMockPayloadGeneratorTest67Fragment$fragmentType, +|}; +export type RelayMockPayloadGeneratorTest67Fragment$key = { + +$data?: RelayMockPayloadGeneratorTest67Fragment$data, + +$fragmentSpreads: RelayMockPayloadGeneratorTest67Fragment$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest67Fragment", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "nameRenderer", + "plural": false, + "selections": [ + { + "kind": "InlineFragment", + "selections": [ + { + "args": null, + "documentName": "RelayMockPayloadGeneratorTest67Fragment", + "fragmentName": "RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name", + "fragmentPropName": "name", + "kind": "ModuleImport" + } + ], + "type": "PlainUserNameRenderer", + "abstractKey": null + } + ], + "storageKey": null + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "f0621398b162c8e470bd7beb63d0d78a"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayMockPayloadGeneratorTest67Fragment$fragmentType, + RelayMockPayloadGeneratorTest67Fragment$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$normalization.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$normalization.graphql.js new file mode 100644 index 0000000000000..984a4e2b41e28 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$normalization.graphql.js @@ -0,0 +1,68 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "kind": "SplitOperation", + "metadata": {}, + "name": "RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$normalization", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "plaintext", + "storageKey": null + }, + { + "alias": null, + "args": null, + "concreteType": "PlainUserNameData", + "kind": "LinkedField", + "name": "data", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "text", + "storageKey": null + }, + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + } + ], + "storageKey": null + } + ] +}; + +if (__DEV__) { + (node/*: any*/).hash = "6292a26c9ba4d0e57ddcc19c901e1cbe"; +} + +module.exports = node; diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name.graphql.js new file mode 100644 index 0000000000000..7747ae034909f --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name.graphql.js @@ -0,0 +1,80 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<91e22cebe4e9def2c1d15095ed5522be>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$fragmentType: FragmentType; +export type RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$data = {| + +data: ?{| + +text: ?string, + |}, + +plaintext: ?string, + +$fragmentType: RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$fragmentType, +|}; +export type RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$key = { + +$data?: RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$data, + +$fragmentSpreads: RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "plaintext", + "storageKey": null + }, + { + "alias": null, + "args": null, + "concreteType": "PlainUserNameData", + "kind": "LinkedField", + "name": "data", + "plural": false, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "text", + "storageKey": null + } + ], + "storageKey": null + } + ], + "type": "PlainUserNameRenderer", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "6292a26c9ba4d0e57ddcc19c901e1cbe"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$fragmentType, + RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest67Query.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest67Query.graphql.js new file mode 100644 index 0000000000000..d3e35f35fa3aa --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest67Query.graphql.js @@ -0,0 +1,213 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +// @indirectDataDrivenDependency RelayMockPayloadGeneratorTest67Fragment.nameRenderer {"branches":{"PlainUserNameRenderer":{"component":"PlainUserNameRenderer.react","fragment":"RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$normalization.graphql"}},"plural":false} + +/*:: +import type { ConcreteRequest, Query } from 'relay-runtime'; +import type { RelayMockPayloadGeneratorTest67Fragment$fragmentType } from "./RelayMockPayloadGeneratorTest67Fragment.graphql"; +export type RelayMockPayloadGeneratorTest67Query$variables = {||}; +export type RelayMockPayloadGeneratorTest67Query$data = {| + +node: ?{| + +$fragmentSpreads: RelayMockPayloadGeneratorTest67Fragment$fragmentType, + |}, +|}; +export type RelayMockPayloadGeneratorTest67Query = {| + response: RelayMockPayloadGeneratorTest67Query$data, + variables: RelayMockPayloadGeneratorTest67Query$variables, +|}; +*/ + +var node/*: ConcreteRequest*/ = (function(){ +var v0 = [ + { + "kind": "Literal", + "name": "id", + "value": "my-id" + } +], +v1 = { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "__typename", + "storageKey": null +}, +v2 = { + "enumValues": null, + "nullable": false, + "plural": false, + "type": "String" +}, +v3 = { + "enumValues": null, + "nullable": true, + "plural": false, + "type": "JSDependency" +}, +v4 = { + "enumValues": null, + "nullable": true, + "plural": false, + "type": "String" +}; +return { + "fragment": { + "argumentDefinitions": [], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest67Query", + "selections": [ + { + "alias": null, + "args": (v0/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + { + "args": null, + "kind": "FragmentSpread", + "name": "RelayMockPayloadGeneratorTest67Fragment" + } + ], + "storageKey": "node(id:\"my-id\")" + } + ], + "type": "Query", + "abstractKey": null + }, + "kind": "Request", + "operation": { + "argumentDefinitions": [], + "kind": "Operation", + "name": "RelayMockPayloadGeneratorTest67Query", + "selections": [ + { + "alias": null, + "args": (v0/*: any*/), + "concreteType": null, + "kind": "LinkedField", + "name": "node", + "plural": false, + "selections": [ + (v1/*: any*/), + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "kind": "InlineFragment", + "selections": [ + { + "alias": null, + "args": null, + "concreteType": null, + "kind": "LinkedField", + "name": "nameRenderer", + "plural": false, + "selections": [ + (v1/*: any*/), + { + "kind": "InlineFragment", + "selections": [ + { + "args": null, + "documentName": "RelayMockPayloadGeneratorTest67Fragment", + "fragmentName": "RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name", + "fragmentPropName": "name", + "kind": "ModuleImport" + } + ], + "type": "PlainUserNameRenderer", + "abstractKey": null + } + ], + "storageKey": null + } + ], + "type": "User", + "abstractKey": null + } + ], + "storageKey": "node(id:\"my-id\")" + } + ] + }, + "params": { + "cacheID": "0ae903db9d578aaf4dd7019f4d024645", + "id": null, + "metadata": { + "relayTestingSelectionTypeInfo": { + "node": { + "enumValues": null, + "nullable": true, + "plural": false, + "type": "Node" + }, + "node.__typename": (v2/*: any*/), + "node.id": { + "enumValues": null, + "nullable": false, + "plural": false, + "type": "ID" + }, + "node.nameRenderer": { + "enumValues": null, + "nullable": true, + "plural": false, + "type": "UserNameRenderer" + }, + "node.nameRenderer.__module_component_RelayMockPayloadGeneratorTest67Fragment": (v3/*: any*/), + "node.nameRenderer.__module_operation_RelayMockPayloadGeneratorTest67Fragment": (v3/*: any*/), + "node.nameRenderer.__typename": (v2/*: any*/), + "node.nameRenderer.data": { + "enumValues": null, + "nullable": true, + "plural": false, + "type": "PlainUserNameData" + }, + "node.nameRenderer.data.id": { + "enumValues": null, + "nullable": true, + "plural": false, + "type": "ID" + }, + "node.nameRenderer.data.text": (v4/*: any*/), + "node.nameRenderer.plaintext": (v4/*: any*/) + } + }, + "name": "RelayMockPayloadGeneratorTest67Query", + "operationKind": "query", + "text": "query RelayMockPayloadGeneratorTest67Query {\n node(id: \"my-id\") {\n __typename\n ...RelayMockPayloadGeneratorTest67Fragment\n id\n }\n}\n\nfragment RelayMockPayloadGeneratorTest67Fragment on User {\n id\n nameRenderer {\n __typename\n ... on PlainUserNameRenderer {\n ...RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name\n __module_operation_RelayMockPayloadGeneratorTest67Fragment: js(module: \"RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$normalization.graphql\", id: \"RelayMockPayloadGeneratorTest67Fragment.nameRenderer\")\n __module_component_RelayMockPayloadGeneratorTest67Fragment: js(module: \"PlainUserNameRenderer.react\", id: \"RelayMockPayloadGeneratorTest67Fragment.nameRenderer\")\n }\n }\n}\n\nfragment RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name on PlainUserNameRenderer {\n plaintext\n data {\n text\n id\n }\n}\n" + } +}; +})(); + +if (__DEV__) { + (node/*: any*/).hash = "09729a7c122a5fedbea96ed60b70ee3b"; +} + +module.exports = ((node/*: any*/)/*: Query< + RelayMockPayloadGeneratorTest67Query$variables, + RelayMockPayloadGeneratorTest67Query$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest_fragment59$normalization.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest_fragment59$normalization.graphql.js new file mode 100644 index 0000000000000..c7b2a6453db73 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest_fragment59$normalization.graphql.js @@ -0,0 +1,69 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +export type RelayMockPayloadGeneratorTest_fragment59$normalization = {| + +id: string, + +name: ?string, +|}; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "argumentDefinitions": [ + { + "defaultValue": false, + "kind": "LocalArgument", + "name": "RelayMockPayloadGeneratorTest_fragment59$cond" + } + ], + "kind": "SplitOperation", + "metadata": {}, + "name": "RelayMockPayloadGeneratorTest_fragment59$normalization", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "condition": "RelayMockPayloadGeneratorTest_fragment59$cond", + "kind": "Condition", + "passingValue": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ] + } + ] +}; + +if (__DEV__) { + (node/*: any*/).hash = "efeafd8b46bc5e1d5e9deb6e69637ccc"; +} + +module.exports = node; diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest_fragment59.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest_fragment59.graphql.js new file mode 100644 index 0000000000000..ee19a8722579f --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest_fragment59.graphql.js @@ -0,0 +1,80 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<05d635030c8cf95ac5f389fc1fa09a0c>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayMockPayloadGeneratorTest_fragment59$fragmentType: FragmentType; +export type RelayMockPayloadGeneratorTest_fragment59$data = {| + +id: string, + +name?: ?string, + +$fragmentType: RelayMockPayloadGeneratorTest_fragment59$fragmentType, +|}; +export type RelayMockPayloadGeneratorTest_fragment59$key = { + +$data?: RelayMockPayloadGeneratorTest_fragment59$data, + +$fragmentSpreads: RelayMockPayloadGeneratorTest_fragment59$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [ + { + "defaultValue": false, + "kind": "LocalArgument", + "name": "cond" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest_fragment59", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "condition": "cond", + "kind": "Condition", + "passingValue": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "efeafd8b46bc5e1d5e9deb6e69637ccc"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayMockPayloadGeneratorTest_fragment59$fragmentType, + RelayMockPayloadGeneratorTest_fragment59$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest_fragment61$normalization.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest_fragment61$normalization.graphql.js new file mode 100644 index 0000000000000..f74056539c116 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest_fragment61$normalization.graphql.js @@ -0,0 +1,69 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { NormalizationSplitOperation } from 'relay-runtime'; + +export type RelayMockPayloadGeneratorTest_fragment61$normalization = {| + +id: string, + +name: ?string, +|}; + +*/ + +var node/*: NormalizationSplitOperation*/ = { + "argumentDefinitions": [ + { + "defaultValue": false, + "kind": "LocalArgument", + "name": "RelayMockPayloadGeneratorTest_fragment61$cond" + } + ], + "kind": "SplitOperation", + "metadata": {}, + "name": "RelayMockPayloadGeneratorTest_fragment61$normalization", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "condition": "RelayMockPayloadGeneratorTest_fragment61$cond", + "kind": "Condition", + "passingValue": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ] + } + ] +}; + +if (__DEV__) { + (node/*: any*/).hash = "25e64d959ac400af76cce1c64d022f38"; +} + +module.exports = node; diff --git a/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest_fragment61.graphql.js b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest_fragment61.graphql.js new file mode 100644 index 0000000000000..5d4ac3e872253 --- /dev/null +++ b/packages/relay-test-utils/__tests__/__generated__/RelayMockPayloadGeneratorTest_fragment61.graphql.js @@ -0,0 +1,80 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @oncall relay + * + * @generated SignedSource<<699a048863dd7792d83ed44763f446ed>> + * @flow + * @lightSyntaxTransform + * @nogrep + */ + +/* eslint-disable */ + +'use strict'; + +/*:: +import type { Fragment, ReaderFragment } from 'relay-runtime'; +import type { FragmentType } from "relay-runtime"; +declare export opaque type RelayMockPayloadGeneratorTest_fragment61$fragmentType: FragmentType; +export type RelayMockPayloadGeneratorTest_fragment61$data = {| + +id: string, + +name?: ?string, + +$fragmentType: RelayMockPayloadGeneratorTest_fragment61$fragmentType, +|}; +export type RelayMockPayloadGeneratorTest_fragment61$key = { + +$data?: RelayMockPayloadGeneratorTest_fragment61$data, + +$fragmentSpreads: RelayMockPayloadGeneratorTest_fragment61$fragmentType, + ... +}; +*/ + +var node/*: ReaderFragment*/ = { + "argumentDefinitions": [ + { + "defaultValue": false, + "kind": "LocalArgument", + "name": "cond" + } + ], + "kind": "Fragment", + "metadata": null, + "name": "RelayMockPayloadGeneratorTest_fragment61", + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "id", + "storageKey": null + }, + { + "condition": "cond", + "kind": "Condition", + "passingValue": true, + "selections": [ + { + "alias": null, + "args": null, + "kind": "ScalarField", + "name": "name", + "storageKey": null + } + ] + } + ], + "type": "User", + "abstractKey": null +}; + +if (__DEV__) { + (node/*: any*/).hash = "25e64d959ac400af76cce1c64d022f38"; +} + +module.exports = ((node/*: any*/)/*: Fragment< + RelayMockPayloadGeneratorTest_fragment61$fragmentType, + RelayMockPayloadGeneratorTest_fragment61$data, +>*/); diff --git a/packages/relay-test-utils/__tests__/__snapshots__/RelayMockPayloadGenerator-test.js.snap b/packages/relay-test-utils/__tests__/__snapshots__/RelayMockPayloadGenerator-test.js.snap index e54e8f1907eb5..afa05ebff0cb9 100644 --- a/packages/relay-test-utils/__tests__/__snapshots__/RelayMockPayloadGenerator-test.js.snap +++ b/packages/relay-test-utils/__tests__/__snapshots__/RelayMockPayloadGenerator-test.js.snap @@ -1,5 +1,61 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP +exports[`Query with @no_inline fragment spread with literal argument 1`] = ` +~~~~~~~~~~ INPUT ~~~~~~~~~~ +query RelayMockPayloadGeneratorTest58Query { + node(id: "4") { + __typename + ...RelayMockPayloadGeneratorTest_fragment59_22eGLd + id + } +} + +fragment RelayMockPayloadGeneratorTest_fragment59_22eGLd on User { + id + name +} + +~~~~~~~~~~ OUTPUT ~~~~~~~~~~ +{ + "data": { + "node": { + "__typename": "__MockObject", + "id": "", + "name": "" + } + } +} +`; + +exports[`Query with @no_inline fragment spread with variable argument 1`] = ` +~~~~~~~~~~ INPUT ~~~~~~~~~~ +query RelayMockPayloadGeneratorTest60Query( + $cond: Boolean! +) { + node(id: "4") { + __typename + ...RelayMockPayloadGeneratorTest_fragment61_yuQoQ + id + } +} + +fragment RelayMockPayloadGeneratorTest_fragment61_yuQoQ on User { + id + name @include(if: $cond) +} + +~~~~~~~~~~ OUTPUT ~~~~~~~~~~ +{ + "data": { + "node": { + "__typename": "__MockObject", + "id": "", + "name": "" + } + } +} +`; + exports[`check context in the mock resolver 1`] = ` ~~~~~~~~~~ INPUT ~~~~~~~~~~ query RelayMockPayloadGeneratorTest11Query { @@ -118,6 +174,222 @@ fragment RelayMockPayloadGeneratorTest14Fragment on User { } `; +exports[`generate mock for deferred fragments 1`] = ` +~~~~~~~~~~ INPUT ~~~~~~~~~~ +query RelayMockPayloadGeneratorTest61Query { + node(id: "my-id") { + __typename + id + ...RelayMockPayloadGeneratorTest61Fragment @defer(label: "RelayMockPayloadGeneratorTest61Query$defer$RelayMockPayloadGeneratorTest61Fragment") + } +} + +fragment RelayMockPayloadGeneratorTest61Fragment on User { + name + id + friends { + edges { + node { + ...RelayMockPayloadGeneratorTest61SubFragment @defer(label: "RelayMockPayloadGeneratorTest61Fragment$defer$RelayMockPayloadGeneratorTest61SubFragment") + id + } + } + } +} + +fragment RelayMockPayloadGeneratorTest61SubFragment on User { + id + name +} + +~~~~~~~~~~ OUTPUT ~~~~~~~~~~ +[ + { + "data": { + "node": { + "__typename": "__MockObject", + "id": "" + } + } + }, + { + "path": [ + "node", + "friends", + "edges", + "0", + "node" + ], + "label": "RelayMockPayloadGeneratorTest61Fragment$defer$RelayMockPayloadGeneratorTest61SubFragment", + "data": { + "id": "", + "name": "" + } + }, + { + "path": [ + "node", + "friends", + "edges", + "1", + "node" + ], + "label": "RelayMockPayloadGeneratorTest61Fragment$defer$RelayMockPayloadGeneratorTest61SubFragment", + "data": { + "id": "", + "name": "" + } + }, + { + "path": [ + "node", + "friends", + "edges", + "2", + "node" + ], + "label": "RelayMockPayloadGeneratorTest61Fragment$defer$RelayMockPayloadGeneratorTest61SubFragment", + "data": { + "id": "", + "name": "" + } + }, + { + "path": [ + "node", + "friends", + "edges", + "3", + "node" + ], + "label": "RelayMockPayloadGeneratorTest61Fragment$defer$RelayMockPayloadGeneratorTest61SubFragment", + "data": { + "id": "", + "name": "" + } + }, + { + "path": [ + "node", + "friends", + "edges", + "4", + "node" + ], + "label": "RelayMockPayloadGeneratorTest61Fragment$defer$RelayMockPayloadGeneratorTest61SubFragment", + "data": { + "id": "", + "name": "" + } + }, + { + "path": [ + "node" + ], + "label": "RelayMockPayloadGeneratorTest61Query$defer$RelayMockPayloadGeneratorTest61Fragment", + "data": { + "__typename": "User", + "name": "", + "id": "", + "friends": { + "edges": [ + { + "node": { + "id": "" + } + }, + { + "node": { + "id": "" + } + }, + { + "node": { + "id": "" + } + }, + { + "node": { + "id": "" + } + }, + { + "node": { + "id": "" + } + } + ] + } + } + } +] +`; + +exports[`generate mock for deferred fragments with if condition false 1`] = ` +~~~~~~~~~~ INPUT ~~~~~~~~~~ +query RelayMockPayloadGeneratorTest63Query { + node(id: "my-id") { + __typename + id + ...RelayMockPayloadGeneratorTest63Fragment + } +} + +fragment RelayMockPayloadGeneratorTest63Fragment on User { + name +} + +~~~~~~~~~~ OUTPUT ~~~~~~~~~~ +[ + { + "data": { + "node": { + "__typename": "User", + "id": "", + "name": "" + } + } + } +] +`; + +exports[`generate mock for deferred fragments with if condition true 1`] = ` +~~~~~~~~~~ INPUT ~~~~~~~~~~ +query RelayMockPayloadGeneratorTest62Query { + node(id: "my-id") { + __typename + id + ...RelayMockPayloadGeneratorTest62Fragment @defer(label: "RelayMockPayloadGeneratorTest62Query$defer$RelayMockPayloadGeneratorTest62Fragment", if: true) + } +} + +fragment RelayMockPayloadGeneratorTest62Fragment on User { + name +} + +~~~~~~~~~~ OUTPUT ~~~~~~~~~~ +[ + { + "data": { + "node": { + "__typename": "__MockObject", + "id": "" + } + } + }, + { + "path": [ + "node" + ], + "label": "RelayMockPayloadGeneratorTest62Query$defer$RelayMockPayloadGeneratorTest62Fragment", + "data": { + "__typename": "User", + "name": "" + } + } +] +`; + exports[`generate mock for fragment with @argumentsDefinition 1`] = ` ~~~~~~~~~~ INPUT ~~~~~~~~~~ query RelayMockPayloadGeneratorTest15Query( @@ -336,6 +608,203 @@ fragment RelayMockPayloadGeneratorTestFragment on User { } `; +exports[`generate mock for streamed fragments 1`] = ` +~~~~~~~~~~ INPUT ~~~~~~~~~~ +query RelayMockPayloadGeneratorTest64Query { + me { + friends(first: 10) { + edges @stream(label: "RelayMockPayloadGeneratorTest64Query$stream$test-64__friends", initial_count: 4) { + node { + ...RelayMockPayloadGeneratorTest64Fragment + id + __typename + } + cursor + } + ... @defer(label: "RelayMockPayloadGeneratorTest64Query$defer$test-64__friends$pageInfo") { + pageInfo { + endCursor + hasNextPage + } + } + } + id + } +} + +fragment RelayMockPayloadGeneratorTest64Fragment on User { + id +} + +~~~~~~~~~~ OUTPUT ~~~~~~~~~~ +[ + { + "data": { + "me": { + "friends": {}, + "id": "" + } + } + }, + { + "path": [ + "me", + "friends" + ], + "label": "RelayMockPayloadGeneratorTest64Query$stream$test-64__friends", + "data": { + "edges": [ + { + "node": { + "id": "", + "__typename": "User" + }, + "cursor": "" + } + ] + } + }, + { + "path": [ + "me", + "friends" + ], + "label": "RelayMockPayloadGeneratorTest64Query$defer$test-64__friends$pageInfo", + "data": { + "pageInfo": { + "endCursor": "", + "hasNextPage": "" + } + } + } +] +`; + +exports[`generate mock for streamed fragments with if condition false 1`] = ` +~~~~~~~~~~ INPUT ~~~~~~~~~~ +query RelayMockPayloadGeneratorTest66Query { + me { + friends(first: 10) { + edges { + node { + ...RelayMockPayloadGeneratorTest66Fragment + id + __typename + } + cursor + } + pageInfo { + endCursor + hasNextPage + } + } + id + } +} + +fragment RelayMockPayloadGeneratorTest66Fragment on User { + id +} + +~~~~~~~~~~ OUTPUT ~~~~~~~~~~ +[ + { + "data": { + "me": { + "friends": { + "edges": [ + { + "node": { + "id": "", + "__typename": "User" + }, + "cursor": "" + } + ], + "pageInfo": { + "endCursor": "", + "hasNextPage": "" + } + }, + "id": "" + } + } + } +] +`; + +exports[`generate mock for streamed fragments with if condition true 1`] = ` +~~~~~~~~~~ INPUT ~~~~~~~~~~ +query RelayMockPayloadGeneratorTest65Query { + me { + friends(first: 10) { + edges @stream(label: "RelayMockPayloadGeneratorTest65Query$stream$test-65__friends", if: true, initial_count: 4) { + node { + ...RelayMockPayloadGeneratorTest65Fragment + id + __typename + } + cursor + } + ... @defer(label: "RelayMockPayloadGeneratorTest65Query$defer$test-65__friends$pageInfo", if: true) { + pageInfo { + endCursor + hasNextPage + } + } + } + id + } +} + +fragment RelayMockPayloadGeneratorTest65Fragment on User { + id +} + +~~~~~~~~~~ OUTPUT ~~~~~~~~~~ +[ + { + "data": { + "me": { + "friends": {}, + "id": "" + } + } + }, + { + "path": [ + "me", + "friends" + ], + "label": "RelayMockPayloadGeneratorTest65Query$stream$test-65__friends", + "data": { + "edges": [ + { + "node": { + "id": "", + "__typename": "User" + }, + "cursor": "" + } + ] + } + }, + { + "path": [ + "me", + "friends" + ], + "label": "RelayMockPayloadGeneratorTest65Query$defer$test-65__friends$pageInfo", + "data": { + "pageInfo": { + "endCursor": "", + "hasNextPage": "" + } + } + } +] +`; + exports[`generate mock for with directives and handlers 1`] = ` ~~~~~~~~~~ INPUT ~~~~~~~~~~ query RelayMockPayloadGeneratorTest18Query( @@ -950,7 +1419,7 @@ fragment RelayMockPayloadGeneratorTest12Fragment on Page { "comment": { "id": "", "message": { - "text": "Comment text: node>myTown>feedback>comments>edges>comment" + "text": "Comment text: node>myTown>feedback>comments>edges>0>comment" }, "likeSentence": { "text": "" @@ -1018,6 +1487,56 @@ fragment RelayMockPayloadGeneratorTest13Fragment on Viewer { } `; +exports[`should generate data for @match with PlainUserNameRenderer_name and use defaults from mock resolvers 1`] = ` +~~~~~~~~~~ INPUT ~~~~~~~~~~ +query RelayMockPayloadGeneratorTest67Query { + node(id: "my-id") { + __typename + ...RelayMockPayloadGeneratorTest67Fragment + id + } +} + +fragment RelayMockPayloadGeneratorTest67Fragment on User { + id + nameRenderer { + __typename + ... on PlainUserNameRenderer { + ...RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name + __module_operation_RelayMockPayloadGeneratorTest67Fragment: js(module: "RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$normalization.graphql", id: "RelayMockPayloadGeneratorTest67Fragment.nameRenderer") + __module_component_RelayMockPayloadGeneratorTest67Fragment: js(module: "PlainUserNameRenderer.react", id: "RelayMockPayloadGeneratorTest67Fragment.nameRenderer") + } + } +} + +fragment RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name on PlainUserNameRenderer { + plaintext + data { + text + id + } +} + +~~~~~~~~~~ OUTPUT ~~~~~~~~~~ +{ + "data": { + "node": { + "__typename": "User", + "id": "", + "nameRenderer": { + "__typename": "PlainUserNameRenderer", + "__module_operation_RelayMockPayloadGeneratorTest67Fragment": "RelayMockPayloadGeneratorTest67PlainUserNameRenderer_name$normalization", + "plaintext": "", + "data": { + "text": "hello world", + "id": "" + } + } + } + } +} +`; + exports[`should return \`null\` for selection if that is specified in default values 1`] = ` ~~~~~~~~~~ INPUT ~~~~~~~~~~ query RelayMockPayloadGeneratorTest19Query { diff --git a/packages/relay-test-utils/index.js b/packages/relay-test-utils/index.js index 0428dfc5f6fe9..43625e9bf9000 100644 --- a/packages/relay-test-utils/index.js +++ b/packages/relay-test-utils/index.js @@ -17,6 +17,7 @@ const testResolver = require('./RelayResolverTestUtils'); const unwrapContainer = require('./unwrapContainer'); export type {MockResolvers} from './RelayMockPayloadGenerator'; +export type {RelayMockEnvironment} from './RelayModernMockEnvironment'; /** * The public interface to Relay Test Utils. diff --git a/packages/relay-test-utils/package.json b/packages/relay-test-utils/package.json index af5790f6c29cc..f0fe7c9687aec 100644 --- a/packages/relay-test-utils/package.json +++ b/packages/relay-test-utils/package.json @@ -1,7 +1,7 @@ { "name": "relay-test-utils", "description": "Utilities for testing Relay applications.", - "version": "15.0.0", + "version": "17.0.0", "keywords": [ "graphql", "relay" @@ -18,7 +18,7 @@ "@babel/runtime": "^7.0.0", "fbjs": "^3.0.2", "invariant": "^2.2.4", - "relay-runtime": "15.0.0" + "relay-runtime": "17.0.0" }, "directories": { "": "./" diff --git a/packages/relay-test-utils/unwrapContainer.js b/packages/relay-test-utils/unwrapContainer.js index f98d6a72ee7ac..0c5d7d4d5db94 100644 --- a/packages/relay-test-utils/unwrapContainer.js +++ b/packages/relay-test-utils/unwrapContainer.js @@ -11,7 +11,6 @@ 'use strict'; -import type React from 'React'; import type { $RelayProps, RelayPaginationProp, @@ -25,10 +24,10 @@ const invariant = require('invariant'); * Returns original component class wrapped by e.g. createFragmentContainer */ function unwrapContainer( - ComponentClass: React.ComponentType< + ComponentClass: React$ComponentType< $RelayProps, >, -): React.ComponentType { +): React$ComponentType { // $FlowExpectedError const unwrapped = ComponentClass.__ComponentClass; invariant( diff --git a/scripts/config.tests.json b/scripts/config.tests.json index a1705a9d7e41d..8cc37cd0996c6 100644 --- a/scripts/config.tests.json +++ b/scripts/config.tests.json @@ -26,9 +26,24 @@ "OpaqueScalarType": {"name": "OpaqueScalarType", "path": "../OpaqueScalarType"} }, "jsModuleFormat": "commonjs", + "schemaConfig": { + "nonNodeIdFields": { + "allowedIdTypes": { + "IDFieldTests": "IDFieldIsID", + "NonNode": "NonNodeID" + } + }, + "deferStreamInterface": { + "deferName": "defer", + "streamName": "stream", + "ifArg": "if", + "labelArg": "label", + "initialCountArg": "initial_count", + "useCustomizedBatchArg": "use_customized_batch" + } + }, "featureFlags": { "enable_relay_resolver_transform": true, - "enable_flight_transform": true, "no_inline": { "kind": "enabled" }, @@ -39,19 +54,10 @@ "kind": "enabled" }, "emit_normalization_nodes_for_client_edges": true, - "relay_resolver_enable_output_type": { - "kind": "limited", - "allowlist": [ - "example_client_object", - "many_todos", - "todo", - "todos", - "blocked_by", - "text" - ] - } + "relay_resolver_enable_interface_output_type": { "kind": "enabled" } }, - "language": "flow" + "language": "flow", + "experimentalEmitSemanticNullabilityTypes": true } }, "isDevVariableName": "__DEV__" diff --git a/scripts/jest/environment.js b/scripts/jest/environment.js index 4bb285e95a45f..870c6dcfea6ed 100644 --- a/scripts/jest/environment.js +++ b/scripts/jest/environment.js @@ -10,6 +10,7 @@ 'use strict'; +global.IS_REACT_ACT_ENVIRONMENT = true; global.__DEV__ = true; require('@babel/runtime/regenerator'); diff --git a/scripts/jest/preprocessor.js b/scripts/jest/preprocessor.js index b1b39851eee81..b7e5909367a79 100644 --- a/scripts/jest/preprocessor.js +++ b/scripts/jest/preprocessor.js @@ -18,7 +18,7 @@ const path = require('path'); const babelOptions = getBabelOptions({ env: 'test', - // Tests use a Promise polfill so they can use jest.runAllTimers(). + // Tests use a Promise polyfill so they can use jest.runAllTimers(). autoImport: true, plugins: [ './dist/babel-plugin-relay', @@ -28,6 +28,7 @@ const babelOptions = getBabelOptions({ '@babel/plugin-proposal-optional-catch-binding', '@babel/plugin-proposal-optional-chaining', '@babel/plugin-transform-async-to-generator', + 'babel-plugin-syntax-hermes-parser', ], }); @@ -37,7 +38,7 @@ module.exports = { filename: filename, retainLines: true, }); - return babel.transform(src, options).code; + return babel.transform(src, options); }, getCacheKey: createCacheKeyFunction([ diff --git a/scripts/release-notes.js b/scripts/release-notes.js new file mode 100644 index 0000000000000..4ffdc9592c8ff --- /dev/null +++ b/scripts/release-notes.js @@ -0,0 +1,120 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + * @oncall relay + */ + +'use strict'; + +const {execSync} = require('child_process'); +const {existsSync, readFileSync} = require('fs'); +const http = require('http'); + +/** + * This function will create a simple HTTP server that will show the list + * of commits to the Relay repo since the last release. + * + * To run the app: + * ``` + * node ./scripts/release-notes.js + * ``` + * + * And follow the link, printed in console: http://localhost:3123 + */ +function main() { + log('Generating release notes...'); + + const server = http.createServer((request, response) => { + // Supported Static Resources + if (request.url.endsWith('.css') || request.url.endsWith('.js')) { + const path = `./scripts/release-notes/${request.url}`; + if (!existsSync(path)) { + response.writeHead(404, {'Content-Type': 'text/plain'}); + response.write('Not Found.'); + response.end(); + return; + } + + const data = readFileSync(path); + response.writeHead(200, {'Content-Type': 'text/plain'}); + response.write(data); + response.end(); + return; + } + + const [commits, lastRelease] = getData(); + + response.writeHead(200, {'Content-Type': 'text/html'}); + response.write( + ` + + + + + + Relay commits since ${lastRelease} + + + +
+ + + + + `, + ); + response.end(); + }); + + const PORT = 3123; + server.listen(PORT); + log(`Release notes App started at http://localhost:${PORT}`); +} + +function getData() { + const lastRelease = execSync('git describe --tags --abbrev=0') + .toString() + .trim(); + + const listOfCommits = execSync( + `git log --pretty=format:"%h|%ai|%aN|%ae" ${lastRelease}...`, + ).toString(); + + const summary = execSync(`git log --pretty=format:"%s" ${lastRelease}...`) + .toString() + .split('\n'); + + const body = execSync( + `git log --pretty=format:"%b" ${lastRelease}...`, + ) + .toString() + .split('\n'); + const commits = listOfCommits.split('\n').map((commitMessage, index) => { + const [hash, date, name, _email] = commitMessage.split('|'); + return { + hash, + summary: summary[index], + message: body[index], + author: name, + date, + }; + }); + + return [commits, lastRelease]; +} + +function log(message) { + // eslint-disable-next-line no-console + console.log(message); +} + +main(); diff --git a/scripts/release-notes/App.js b/scripts/release-notes/App.js new file mode 100644 index 0000000000000..c166d862159d4 --- /dev/null +++ b/scripts/release-notes/App.js @@ -0,0 +1,242 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + * @oncall relay + */ + +'use strict'; + +/* eslint-disable react/react-in-jsx-scope*/ + +const {useEffect, useState} = React; + +const CATEGORIES = [ + ['BUGFIX', 3], + ['IMPROVEMENTS', 4], + ['DOCS', 5], + ['NEW_API', 2], + ['BREAKING', 1], + ['MISC', 6], + ['EXPERIMENTAL', 7], + ['SKIP', 8], +]; + +const CATEGORIES_NAMES = { + BUGFIX: 'Bug fixes', + IMPROVEMENTS: 'Improvements', + DOCS: 'Documentation Improvements', + NEW_API: 'Added', + BREAKING: 'Breaking Changes', + MISC: 'Miscellaneous', + EXPERIMENTAL: 'Experimental Changes', + SKIP: 'Skipped in Release Notes', +}; + +const REPO_URL = 'https://github.com/facebook/relay'; + +const CommitCard = ({ + message, + summary, + author, + date, + selectedCategory, + onCategoryChange, +}) => { + const handleClick = event => { + const category = CATEGORIES.findIndex(cat => cat[0] === selectedCategory); + let nextCategory; + if (event.type === 'contextmenu') { + event.preventDefault(); + nextCategory = -1; + } else { + nextCategory = category + 1; + if (nextCategory == CATEGORIES.length) { + nextCategory = -1; // Reset selected category + } + } + onCategoryChange(nextCategory > -1 ? CATEGORIES[nextCategory][0] : null); + }; + + return ( + + ); +}; + +// eslint-disable-next-line no-unused-vars +function App({commits, lastRelease}) { + let initialState = localStorage.getItem('selectedCategories'); + if (initialState != null) { + initialState = JSON.parse(initialState); + } else { + initialState = {}; + } + const [selectedCategories, setSelectedCategories] = useState(initialState); + useEffect(() => { + localStorage.setItem( + 'selectedCategories', + JSON.stringify(selectedCategories), + ); + }, [selectedCategories]); + + return ( + <> +

Relay commits since {lastRelease}

+
+

+ Click on the commit card to change it's category. Possible categories + are:{' '} +

+ +
+
+
+ {commits.map((commit, index) => { + return ( + { + setSelectedCategories({ + ...selectedCategories, + [commit.hash]: category, + }); + }} + /> + ); + })} +
+
+ +
+
+ + ); +} + +function Categories() { + return ( +
    + {CATEGORIES.map(([category]) => { + return ( +
  • + {category} +
  • + ); + })} +
+ ); +} + +function GeneratedReleaseNotes({commits, selectedCategories, lastRelease}) { + const categorizedCommits = new Map(); + const categories = Array.from(CATEGORIES); + categories + .sort(([, orderA], [, orderB]) => orderA - orderB) + .forEach(([category]) => { + categorizedCommits.set(category, []); + }); + + let hasBreakingChanges = false; + let hasNewApi = false; + + const nonCategorizedCommits = []; + + commits.forEach(commit => { + const commitCategory = selectedCategories[commit.hash]; + if (commitCategory != null) { + const categoryCommits = categorizedCommits.get(commitCategory); + if (categoryCommits != null) { + categoryCommits.push(commit); + if (commitCategory === 'BREAKING') { + hasBreakingChanges = true; + } + if (commitCategory === 'NEW_API') { + hasNewApi = true; + } + } + } else { + nonCategorizedCommits.push(commit); + } + }); + + return ( +
+

+ Version {nextReleaseVersion(lastRelease, hasBreakingChanges, hasNewApi)}{' '} + Release Notes +

+
+ {Array.from(categorizedCommits).map(([category, commits]) => { + if (commits.length) { + return ( +
+

{CATEGORIES_NAMES[category]}

+ +
+ ); + } else { + return null; + } + })} +

Non-categorized commits

+ +
+
+ ); +} + +function CommitList({commits}) { + return ( +
+ ); +} + +function nextReleaseVersion(lastRelease, hasBreakingChanges, hasNewApi) { + const [major, minor, patch] = lastRelease.replace('v', '').split('.'); + if (hasBreakingChanges) { + return `${next(major)}.0.0`; + } else if (hasNewApi) { + return `${major}.${next(minor)}.0`; + } else { + return `${major}.${minor}.${next(patch)}`; + } +} + +function next(versionStr) { + return parseInt(versionStr, 10) + 1; +} + +function capitalize(string) { + return string.charAt(0).toUpperCase() + string.slice(1); +} diff --git a/scripts/release-notes/style.css b/scripts/release-notes/style.css new file mode 100644 index 0000000000000..387393f0f3e43 --- /dev/null +++ b/scripts/release-notes/style.css @@ -0,0 +1,120 @@ +/** + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + * + * @format + * @oncall relay + */ + +body { + font-family: Arial; +} +h1 { + font-size: 3rem; + font-weight: 300; + margin-bottom: 1rem; +} +h2 { + font-size: 1.2rem; + font-weight: 500; +} +.commits { + display: flex; + flex-direction: row; + flex-wrap: wrap; + justify-content: space-between; + align-items: flex-start; +} +.commit { + border-radius: 8px; + box-shadow: 0 1px 3px rgba(0,0,0,0.2); + padding: 16px; + box-sizing: border-box; + margin: 8px auto; + width: 320px; + overflow: hidden; + border: none; + text-align: left; +} +.commit .summary { + font-size: 1.1rem; +} +.commit .author { + font-weight: 600; +} +.instructions { + padding: 8px; + position: sticky; + top: 0; + background: rgba(255,255,255,0.6); +} +.categories { + display: flex; + margin: 0; + padding: 0; +} + +.category { + margin: 0; + padding: 0; + list-style-type: none; + box-sizing: border-box; + margin-left: 8px; + padding: 4px; + border: 1px solid rgba(0,0,0,0.2); +} + +.BREAKING { + background-color: tomato; +} + +.NEW_API { + background-color:forestgreen; + color: white; +} + +.BUGFIX { + background-color:orange; +} + +.IMPROVEMENTS { + background-color: cornflowerblue; +} + +.DOCS { + background-color:gold +} + +.MISC { + background-color: darkgray; + color: lightblue; +} + +.SKIP { + background-color: lightgray; + color: darkgray; + opacity: 0.3; +} + +.EXPERIMENTAL { + background-color:chartreuse; + color: blueviolet; +} + +.layout { + display: grid; + grid-template-columns: 60% 40% +} + +.release_notes { + position: relative; +} + +.release_notes_content { + position: sticky; + top: 0; + overflow-y: scroll; + font-family: monospace; +} diff --git a/scripts/testDependencies.js b/scripts/testDependencies.js index 6333943df539d..4897b296a8715 100644 --- a/scripts/testDependencies.js +++ b/scripts/testDependencies.js @@ -41,10 +41,9 @@ function testDependencies(topLevelPackagePath, packagePaths) { function testPackageDependencies(topLevelPackagePath, packagePath) { const errors = []; - const topLevelPackageJson = require(path.join( - topLevelPackagePath, - 'package.json', - )); + const topLevelPackageJson = require( + path.join(topLevelPackagePath, 'package.json'), + ); const packageJson = require(path.join(packagePath, 'package.json')); const packageName = path.basename(packagePath); diff --git a/vscode-extension/.eslintrc.js b/vscode-extension/.eslintrc.js index ba27aa4114af9..11cf525d26339 100644 --- a/vscode-extension/.eslintrc.js +++ b/vscode-extension/.eslintrc.js @@ -1,5 +1,8 @@ /** - * (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary. + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. * * @format */ @@ -33,6 +36,7 @@ module.exports = { 'error', {functions: false}, ], + 'class-methods-use-this': 'off', }, }, ], diff --git a/vscode-extension/.prettierignore b/vscode-extension/.prettierignore new file mode 100644 index 0000000000000..d0176c3c1ecfe --- /dev/null +++ b/vscode-extension/.prettierignore @@ -0,0 +1,2 @@ +out/** +**.vsix diff --git a/vscode-extension/.vscodeignore b/vscode-extension/.vscodeignore new file mode 100644 index 0000000000000..d424430c68c49 --- /dev/null +++ b/vscode-extension/.vscodeignore @@ -0,0 +1,7 @@ +.vscode/** +src/** +.gitignore +**/tsconfig.json +**/.eslintrc.json +**/*.map +**/*.ts \ No newline at end of file diff --git a/vscode-extension/README.md b/vscode-extension/README.md index 554c0a1151035..aac7ec0874f7b 100644 --- a/vscode-extension/README.md +++ b/vscode-extension/README.md @@ -70,6 +70,15 @@ Path to a Relay config relative to the `rootDirectory`. Without this, the compil An array of project configuration in the form `{name: string, rootDirectory: string, pathToConfig: string}`. If omitted, it is assumed your workspace uses a single Relay config and the compiler will search for your config file. But you can also use this configuration if your Relay config is in a nested directory. This configuration must be used if your workspace has multiple Relay projects, each with their own config file. +#### `relay.pathToLocateCommand` (default: `null`) + +Path to a script to look up the actual definition for a GraphQL entity for implementation-first GraphQL schemas. This script will be called for "goto definition" requests to the LSP instead of opening the schema. +The script will be called with 2 arguments. The first will be the relay project name, the second will be either "Type" or "Type.field" (a type or the field of a type, repectively). +The script must respond with a single line of output matching "/absolute/file/path:1:2" where "1" is the line number in the file and "2" is the character on that line that the definition starts with. If it fails +to match this pattern (or the script fails to execute for some reason) the GraphQL schema will be opened as a fallback. + +This option requires >15.0.0 of the Relay compiler to function. + ## Features - IntelliSense diff --git a/vscode-extension/package.json b/vscode-extension/package.json index 33bc64f4c7fdc..132b624aa279a 100644 --- a/vscode-extension/package.json +++ b/vscode-extension/package.json @@ -1,7 +1,7 @@ { "name": "relay", "displayName": "Relay GraphQL", - "version": "2.0.0", + "version": "2.3.0", "description": "Relay-powered IDE experience", "repository": { "type": "git", @@ -20,7 +20,8 @@ "onLanguage:javascript", "onLanguage:javascriptreact", "onLanguage:typescript", - "onLanguage:typescriptreact" + "onLanguage:typescriptreact", + "onLanguage:graphql" ], "extensionDependencies": [ "GraphQL.vscode-graphql-syntax" @@ -68,6 +69,15 @@ ], "description": "Controls what is logged to the Output Channel for the Relay language server." }, + "relay.pathToLocateCommand": { + "scope": "workspace", + "default": null, + "type": [ + "string", + "null" + ], + "description": "Path to an optional script to look up the actual definition for a GraphQL entity for implementation-first GraphQL schemas." + }, "relay.pathToRelay": { "scope": "workspace", "default": null, @@ -115,15 +125,18 @@ "typecheck": "tsc --noEmit", "prettier-check": "prettier -c .", "lint": "eslint --max-warnings 0 .", - "vscode:prepublish": "tsc", - "build-local": "vsce package" + "vscode:prepublish": "rm -f tsconfig.tsbuildinfo && rm -rf out && yarn run esbuild-base -- --minify", + "build-local": "vsce package", + "esbuild-base": "esbuild ./src/extension.ts --bundle --outfile=out/extension.js --external:vscode --format=cjs --platform=node", + "esbuild": "yarn run esbuild-base --sourcemap", + "esbuild-watch": "yarn run esbuild-base --sourcemap --watch" }, "engines": { "vscode": "^1.60.0" }, "dependencies": { "semver": "^7.3.7", - "vscode-languageclient": "^7.0.0" + "vscode-languageclient": "^9.0.1" }, "devDependencies": { "@types/node": "^17.0.23", @@ -131,12 +144,13 @@ "@types/vscode": "^1.60.0", "@typescript-eslint/eslint-plugin": "^5.13.0", "@typescript-eslint/parser": "^5.0.0", - "eslint": "^8.12.0", + "@vscode/vsce": "^2.18.0", + "esbuild": "^0.17.12", + "eslint": "^8.19.0", "eslint-config-airbnb-base": "^15.0.0", "eslint-config-airbnb-typescript": "^17.0.0", "eslint-plugin-import": "^2.26.0", "prettier": "^2.6.2", - "typescript": "^4.6.3", - "vsce": "^2.7.0" + "typescript": "^4.6.3" } } diff --git a/vscode-extension/src/compiler.ts b/vscode-extension/src/compiler.ts index a8bc7b4fa14f3..5e5a9c8ef389d 100644 --- a/vscode-extension/src/compiler.ts +++ b/vscode-extension/src/compiler.ts @@ -25,6 +25,7 @@ export function createAndStartCompiler(context: RelayExtensionContext) { const terminal = window.createTerminal({ name: 'Relay Compiler', cwd: context.relayBinaryExecutionOptions.rootPath, + isTransient: true, }); terminal.sendText( diff --git a/vscode-extension/src/config.ts b/vscode-extension/src/config.ts index 9097f315961d0..15d15ce044530 100644 --- a/vscode-extension/src/config.ts +++ b/vscode-extension/src/config.ts @@ -11,6 +11,7 @@ export type Config = { rootDirectory: string | null; pathToRelay: string | null; pathToConfig: string | null; + pathToLocateCommand: string | null; lspOutputLevel: string; compilerOutpuLevel: string; autoStartCompiler: boolean; @@ -22,7 +23,8 @@ export function getConfig(scope?: ConfigurationScope): Config { return { pathToRelay: configuration.get('pathToRelay') ?? null, pathToConfig: configuration.get('pathToConfig') ?? null, - lspOutputLevel: configuration.get('lspOutputLevel') ?? 'quiet-with-errros', + pathToLocateCommand: configuration.get('pathToLocateCommand') ?? null, + lspOutputLevel: configuration.get('lspOutputLevel') ?? 'quiet-with-errors', compilerOutpuLevel: configuration.get('compilerOutputLevel') ?? 'info', rootDirectory: configuration.get('rootDirectory') ?? null, autoStartCompiler: configuration.get('autoStartCompiler') ?? false, diff --git a/vscode-extension/src/errorHandler.ts b/vscode-extension/src/errorHandler.ts index b99a4598ebc08..65ca7cf4aaa6e 100644 --- a/vscode-extension/src/errorHandler.ts +++ b/vscode-extension/src/errorHandler.ts @@ -41,7 +41,9 @@ export function createErrorHandler( } }); - return CloseAction.DoNotRestart; + return { + action: CloseAction.DoNotRestart, + }; }, // This `error` callback should probably never happen. 🙏 error() { @@ -57,7 +59,9 @@ export function createErrorHandler( } }); - return ErrorAction.Continue; + return { + action: ErrorAction.Continue, + }; }, }; } diff --git a/vscode-extension/src/languageClient.ts b/vscode-extension/src/languageClient.ts index 89028d70dee8a..80a3851f8b8ae 100644 --- a/vscode-extension/src/languageClient.ts +++ b/vscode-extension/src/languageClient.ts @@ -30,6 +30,10 @@ export function createAndStartLanguageClient(context: RelayExtensionContext) { args.push(config.pathToConfig); } + if (config.pathToLocateCommand) { + args.push(`--locateCommand=${config.pathToLocateCommand}`); + } + const serverOptions: ServerOptions = { options: { cwd: context.relayBinaryExecutionOptions.rootPath, @@ -51,6 +55,7 @@ export function createAndStartLanguageClient(context: RelayExtensionContext) { {scheme: 'file', language: 'typescript'}, {scheme: 'file', language: 'typescriptreact'}, {scheme: 'file', language: 'javascriptreact'}, + {scheme: 'file', language: 'graphql'}, ], outputChannel: context.lspOutputChannel, diff --git a/vscode-extension/src/lspStatusBarFeature.ts b/vscode-extension/src/lspStatusBarFeature.ts index 362f9544a0598..c15822c1948b3 100644 --- a/vscode-extension/src/lspStatusBarFeature.ts +++ b/vscode-extension/src/lspStatusBarFeature.ts @@ -10,6 +10,7 @@ import { StaticFeature, InitializeParams, RequestType, + FeatureState, } from 'vscode-languageclient'; import {RelayExtensionContext} from './context'; @@ -116,13 +117,12 @@ export function handleShowStatusMethod( } } -// This StaticFeature is solely responsible for intercepting -// window/showStatus commands from the LSP Server and displaying -// those messages on the client status bar. +// This StaticFeature is solely responsible for intercepting window/showStatus +// commands from the LSP Server and displaying those messages on the client +// status bar. // -// The StatusBarItem creation does not happen here since we may -// want to use the status bar to display messages before we -// get messages from the LSP server. +// The StatusBarItem creation does not happen here since we may want to use the +// status bar to display messages before we get messages from the LSP server. // e.g. Looking for Relay binary... export class LSPStatusBarFeature implements StaticFeature { private context: RelayExtensionContext; @@ -133,6 +133,15 @@ export class LSPStatusBarFeature implements StaticFeature { this.context = context; } + getState(): FeatureState { + return {kind: 'static'}; + } + + clear(): void { + this.context.statusBar.hide(); + this.disposable?.dispose(); + } + fillInitializeParams?: ((params: InitializeParams) => void) | undefined; // eslint-disable-next-line class-methods-use-this @@ -146,8 +155,4 @@ export class LSPStatusBarFeature implements StaticFeature { }, ); } - - dispose(): void { - this.disposable?.dispose(); - } } diff --git a/vscode-extension/tsconfig.json b/vscode-extension/tsconfig.json index 63563ce4a1829..0b36a3e0e344c 100644 --- a/vscode-extension/tsconfig.json +++ b/vscode-extension/tsconfig.json @@ -5,7 +5,7 @@ "lib": ["es2020"], "outDir": "out", "rootDir": "src", - "sourceMap": true, + "sourceMap": false, "incremental": true, "strict": true }, diff --git a/vscode-extension/yarn.lock b/vscode-extension/yarn.lock new file mode 100644 index 0000000000000..d154e6fa5ac6e --- /dev/null +++ b/vscode-extension/yarn.lock @@ -0,0 +1,2276 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@esbuild/android-arm64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.17.15.tgz#893ad71f3920ccb919e1757c387756a9bca2ef42" + integrity sha512-0kOB6Y7Br3KDVgHeg8PRcvfLkq+AccreK///B4Z6fNZGr/tNHX0z2VywCc7PTeWp+bPvjA5WMvNXltHw5QjAIA== + +"@esbuild/android-arm@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.17.15.tgz#143e0d4e4c08c786ea410b9a7739779a9a1315d8" + integrity sha512-sRSOVlLawAktpMvDyJIkdLI/c/kdRTOqo8t6ImVxg8yT7LQDUYV5Rp2FKeEosLr6ZCja9UjYAzyRSxGteSJPYg== + +"@esbuild/android-x64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.17.15.tgz#d2d12a7676b2589864281b2274355200916540bc" + integrity sha512-MzDqnNajQZ63YkaUWVl9uuhcWyEyh69HGpMIrf+acR4otMkfLJ4sUCxqwbCyPGicE9dVlrysI3lMcDBjGiBBcQ== + +"@esbuild/darwin-arm64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.17.15.tgz#2e88e79f1d327a2a7d9d06397e5232eb0a473d61" + integrity sha512-7siLjBc88Z4+6qkMDxPT2juf2e8SJxmsbNVKFY2ifWCDT72v5YJz9arlvBw5oB4W/e61H1+HDB/jnu8nNg0rLA== + +"@esbuild/darwin-x64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.17.15.tgz#9384e64c0be91388c57be6d3a5eaf1c32a99c91d" + integrity sha512-NbImBas2rXwYI52BOKTW342Tm3LTeVlaOQ4QPZ7XuWNKiO226DisFk/RyPk3T0CKZkKMuU69yOvlapJEmax7cg== + +"@esbuild/freebsd-arm64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.15.tgz#2ad5a35bc52ebd9ca6b845dbc59ba39647a93c1a" + integrity sha512-Xk9xMDjBVG6CfgoqlVczHAdJnCs0/oeFOspFap5NkYAmRCT2qTn1vJWA2f419iMtsHSLm+O8B6SLV/HlY5cYKg== + +"@esbuild/freebsd-x64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.17.15.tgz#b513a48446f96c75fda5bef470e64d342d4379cd" + integrity sha512-3TWAnnEOdclvb2pnfsTWtdwthPfOz7qAfcwDLcfZyGJwm1SRZIMOeB5FODVhnM93mFSPsHB9b/PmxNNbSnd0RQ== + +"@esbuild/linux-arm64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.17.15.tgz#9697b168175bfd41fa9cc4a72dd0d48f24715f31" + integrity sha512-T0MVnYw9KT6b83/SqyznTs/3Jg2ODWrZfNccg11XjDehIved2oQfrX/wVuev9N936BpMRaTR9I1J0tdGgUgpJA== + +"@esbuild/linux-arm@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.17.15.tgz#5b22062c54f48cd92fab9ffd993732a52db70cd3" + integrity sha512-MLTgiXWEMAMr8nmS9Gigx43zPRmEfeBfGCwxFQEMgJ5MC53QKajaclW6XDPjwJvhbebv+RzK05TQjvH3/aM4Xw== + +"@esbuild/linux-ia32@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.17.15.tgz#eb28a13f9b60b5189fcc9e98e1024f6b657ba54c" + integrity sha512-wp02sHs015T23zsQtU4Cj57WiteiuASHlD7rXjKUyAGYzlOKDAjqK6bk5dMi2QEl/KVOcsjwL36kD+WW7vJt8Q== + +"@esbuild/linux-loong64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.17.15.tgz#32454bdfe144cf74b77895a8ad21a15cb81cfbe5" + integrity sha512-k7FsUJjGGSxwnBmMh8d7IbObWu+sF/qbwc+xKZkBe/lTAF16RqxRCnNHA7QTd3oS2AfGBAnHlXL67shV5bBThQ== + +"@esbuild/linux-mips64el@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.17.15.tgz#af12bde0d775a318fad90eb13a0455229a63987c" + integrity sha512-ZLWk6czDdog+Q9kE/Jfbilu24vEe/iW/Sj2d8EVsmiixQ1rM2RKH2n36qfxK4e8tVcaXkvuV3mU5zTZviE+NVQ== + +"@esbuild/linux-ppc64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.17.15.tgz#34c5ed145b2dfc493d3e652abac8bd3baa3865a5" + integrity sha512-mY6dPkIRAiFHRsGfOYZC8Q9rmr8vOBZBme0/j15zFUKM99d4ILY4WpOC7i/LqoY+RE7KaMaSfvY8CqjJtuO4xg== + +"@esbuild/linux-riscv64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.17.15.tgz#87bd515e837f2eb004b45f9e6a94dc5b93f22b92" + integrity sha512-EcyUtxffdDtWjjwIH8sKzpDRLcVtqANooMNASO59y+xmqqRYBBM7xVLQhqF7nksIbm2yHABptoioS9RAbVMWVA== + +"@esbuild/linux-s390x@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.17.15.tgz#20bf7947197f199ddac2ec412029a414ceae3aa3" + integrity sha512-BuS6Jx/ezxFuHxgsfvz7T4g4YlVrmCmg7UAwboeyNNg0OzNzKsIZXpr3Sb/ZREDXWgt48RO4UQRDBxJN3B9Rbg== + +"@esbuild/linux-x64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.17.15.tgz#31b93f9c94c195e852c20cd3d1914a68aa619124" + integrity sha512-JsdS0EgEViwuKsw5tiJQo9UdQdUJYuB+Mf6HxtJSPN35vez1hlrNb1KajvKWF5Sa35j17+rW1ECEO9iNrIXbNg== + +"@esbuild/netbsd-x64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.17.15.tgz#8da299b3ac6875836ca8cdc1925826498069ac65" + integrity sha512-R6fKjtUysYGym6uXf6qyNephVUQAGtf3n2RCsOST/neIwPqRWcnc3ogcielOd6pT+J0RDR1RGcy0ZY7d3uHVLA== + +"@esbuild/openbsd-x64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.17.15.tgz#04a1ec3d4e919714dba68dcf09eeb1228ad0d20c" + integrity sha512-mVD4PGc26b8PI60QaPUltYKeSX0wxuy0AltC+WCTFwvKCq2+OgLP4+fFd+hZXzO2xW1HPKcytZBdjqL6FQFa7w== + +"@esbuild/sunos-x64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.17.15.tgz#6694ebe4e16e5cd7dab6505ff7c28f9c1c695ce5" + integrity sha512-U6tYPovOkw3459t2CBwGcFYfFRjivcJJc1WC8Q3funIwX8x4fP+R6xL/QuTPNGOblbq/EUDxj9GU+dWKX0oWlQ== + +"@esbuild/win32-arm64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.17.15.tgz#1f95b2564193c8d1fee8f8129a0609728171d500" + integrity sha512-W+Z5F++wgKAleDABemiyXVnzXgvRFs+GVKThSI+mGgleLWluv0D7Diz4oQpgdpNzh4i2nNDzQtWbjJiqutRp6Q== + +"@esbuild/win32-ia32@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.17.15.tgz#c362b88b3df21916ed7bcf75c6d09c6bf3ae354a" + integrity sha512-Muz/+uGgheShKGqSVS1KsHtCyEzcdOn/W/Xbh6H91Etm+wiIfwZaBn1W58MeGtfI8WA961YMHFYTthBdQs4t+w== + +"@esbuild/win32-x64@0.17.15": + version "0.17.15" + resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.17.15.tgz#c2e737f3a201ebff8e2ac2b8e9f246b397ad19b8" + integrity sha512-DjDa9ywLUUmjhV2Y9wUTIF+1XsmuFGvZoCmOWkli1XcNAh5t25cc7fgsCx4Zi/Uurep3TTLyDiKATgGEg61pkA== + +"@eslint-community/eslint-utils@^4.2.0": + version "4.4.0" + resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" + integrity sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA== + dependencies: + eslint-visitor-keys "^3.3.0" + +"@eslint-community/regexpp@^4.4.0": + version "4.5.0" + resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.5.0.tgz#f6f729b02feee2c749f57e334b7a1b5f40a81724" + integrity sha512-vITaYzIcNmjn5tF5uxcZ/ft7/RXGrMUIS9HalWckEOF6ESiwXKoMzAQf2UW0aVd6rnOeExTJVd5hmWXucBKGXQ== + +"@eslint/eslintrc@^2.0.2": + version "2.0.2" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.0.2.tgz#01575e38707add677cf73ca1589abba8da899a02" + integrity sha512-3W4f5tDUra+pA+FzgugqL2pRimUTDJWKr7BINqOpkZrC0uYI0NIc0/JFgBROCU07HR6GieA5m3/rsPIhDmCXTQ== + dependencies: + ajv "^6.12.4" + debug "^4.3.2" + espree "^9.5.1" + globals "^13.19.0" + ignore "^5.2.0" + import-fresh "^3.2.1" + js-yaml "^4.1.0" + minimatch "^3.1.2" + strip-json-comments "^3.1.1" + +"@eslint/js@8.37.0": + version "8.37.0" + resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.37.0.tgz#cf1b5fa24217fe007f6487a26d765274925efa7d" + integrity sha512-x5vzdtOOGgFVDCUs81QRB2+liax8rFg3+7hqM+QhBG0/G3F1ZsoYl97UrqgHgQ9KKT7G6c4V+aTUCgu/n22v1A== + +"@humanwhocodes/config-array@^0.11.8": + version "0.11.8" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.8.tgz#03595ac2075a4dc0f191cc2131de14fbd7d410b9" + integrity sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g== + dependencies: + "@humanwhocodes/object-schema" "^1.2.1" + debug "^4.1.1" + minimatch "^3.0.5" + +"@humanwhocodes/module-importer@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== + +"@humanwhocodes/object-schema@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3", "@nodelib/fs.walk@^1.2.8": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@types/json-schema@^7.0.9": + version "7.0.11" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" + integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== + +"@types/json5@^0.0.29": + version "0.0.29" + resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== + +"@types/node@^17.0.23": + version "17.0.45" + resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.45.tgz#2c0fafd78705e7a18b7906b5201a522719dc5190" + integrity sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw== + +"@types/semver@^7.3.12", "@types/semver@^7.3.9": + version "7.3.13" + resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.3.13.tgz#da4bfd73f49bd541d28920ab0e2bf0ee80f71c91" + integrity sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw== + +"@types/vscode@^1.60.0": + version "1.77.0" + resolved "https://registry.yarnpkg.com/@types/vscode/-/vscode-1.77.0.tgz#f92f15a636abc9ef562f44dd8af6766aefedb445" + integrity sha512-MWFN5R7a33n8eJZJmdVlifjig3LWUNRrPeO1xemIcZ0ae0TEQuRc7G2xV0LUX78RZFECY1plYBn+dP/Acc3L0Q== + +"@typescript-eslint/eslint-plugin@^5.13.0": + version "5.57.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.57.1.tgz#d1ab162a3cd2671b8a1c9ddf6e2db73b14439735" + integrity sha512-1MeobQkQ9tztuleT3v72XmY0XuKXVXusAhryoLuU5YZ+mXoYKZP9SQ7Flulh1NX4DTjpGTc2b/eMu4u7M7dhnQ== + dependencies: + "@eslint-community/regexpp" "^4.4.0" + "@typescript-eslint/scope-manager" "5.57.1" + "@typescript-eslint/type-utils" "5.57.1" + "@typescript-eslint/utils" "5.57.1" + debug "^4.3.4" + grapheme-splitter "^1.0.4" + ignore "^5.2.0" + natural-compare-lite "^1.4.0" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/parser@^5.0.0": + version "5.57.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.57.1.tgz#af911234bd4401d09668c5faf708a0570a17a748" + integrity sha512-hlA0BLeVSA/wBPKdPGxoVr9Pp6GutGoY380FEhbVi0Ph4WNe8kLvqIRx76RSQt1lynZKfrXKs0/XeEk4zZycuA== + dependencies: + "@typescript-eslint/scope-manager" "5.57.1" + "@typescript-eslint/types" "5.57.1" + "@typescript-eslint/typescript-estree" "5.57.1" + debug "^4.3.4" + +"@typescript-eslint/scope-manager@5.57.1": + version "5.57.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.57.1.tgz#5d28799c0fc8b501a29ba1749d827800ef22d710" + integrity sha512-N/RrBwEUKMIYxSKl0oDK5sFVHd6VI7p9K5MyUlVYAY6dyNb/wHUqndkTd3XhpGlXgnQsBkRZuu4f9kAHghvgPw== + dependencies: + "@typescript-eslint/types" "5.57.1" + "@typescript-eslint/visitor-keys" "5.57.1" + +"@typescript-eslint/type-utils@5.57.1": + version "5.57.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.57.1.tgz#235daba621d3f882b8488040597b33777c74bbe9" + integrity sha512-/RIPQyx60Pt6ga86hKXesXkJ2WOS4UemFrmmq/7eOyiYjYv/MUSHPlkhU6k9T9W1ytnTJueqASW+wOmW4KrViw== + dependencies: + "@typescript-eslint/typescript-estree" "5.57.1" + "@typescript-eslint/utils" "5.57.1" + debug "^4.3.4" + tsutils "^3.21.0" + +"@typescript-eslint/types@5.57.1": + version "5.57.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.57.1.tgz#d9989c7a9025897ea6f0550b7036027f69e8a603" + integrity sha512-bSs4LOgyV3bJ08F5RDqO2KXqg3WAdwHCu06zOqcQ6vqbTJizyBhuh1o1ImC69X4bV2g1OJxbH71PJqiO7Y1RuA== + +"@typescript-eslint/typescript-estree@5.57.1": + version "5.57.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.57.1.tgz#10d9643e503afc1ca4f5553d9bbe672ea4050b71" + integrity sha512-A2MZqD8gNT0qHKbk2wRspg7cHbCDCk2tcqt6ScCFLr5Ru8cn+TCfM786DjPhqwseiS+PrYwcXht5ztpEQ6TFTw== + dependencies: + "@typescript-eslint/types" "5.57.1" + "@typescript-eslint/visitor-keys" "5.57.1" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/utils@5.57.1": + version "5.57.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.57.1.tgz#0f97b0bbd88c2d5e2036869f26466be5f4c69475" + integrity sha512-kN6vzzf9NkEtawECqze6v99LtmDiUJCVpvieTFA1uL7/jDghiJGubGZ5csicYHU1Xoqb3oH/R5cN5df6W41Nfg== + dependencies: + "@eslint-community/eslint-utils" "^4.2.0" + "@types/json-schema" "^7.0.9" + "@types/semver" "^7.3.12" + "@typescript-eslint/scope-manager" "5.57.1" + "@typescript-eslint/types" "5.57.1" + "@typescript-eslint/typescript-estree" "5.57.1" + eslint-scope "^5.1.1" + semver "^7.3.7" + +"@typescript-eslint/visitor-keys@5.57.1": + version "5.57.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.57.1.tgz#585e5fa42a9bbcd9065f334fd7c8a4ddfa7d905e" + integrity sha512-RjQrAniDU0CEk5r7iphkm731zKlFiUjvcBS2yHAg8WWqFMCaCrD0rKEVOMUyMMcbGPZ0bPp56srkGWrgfZqLRA== + dependencies: + "@typescript-eslint/types" "5.57.1" + eslint-visitor-keys "^3.3.0" + +"@vscode/vsce@^2.18.0": + version "2.18.0" + resolved "https://registry.yarnpkg.com/@vscode/vsce/-/vsce-2.18.0.tgz#9f40bf8e7df084a36844b9dadf5c277265c9fbd6" + integrity sha512-tUA3XoKx5xjoi3EDcngk0VUYMhvfXLhS4s7CntpLPh1qtLYtgSCexTIMUHkCy6MqyozRW98bdW3a2yHPEADRnQ== + dependencies: + azure-devops-node-api "^11.0.1" + chalk "^2.4.2" + cheerio "^1.0.0-rc.9" + commander "^6.1.0" + glob "^7.0.6" + hosted-git-info "^4.0.2" + jsonc-parser "^3.2.0" + leven "^3.1.0" + markdown-it "^12.3.2" + mime "^1.3.4" + minimatch "^3.0.3" + parse-semver "^1.1.1" + read "^1.0.7" + semver "^5.1.0" + tmp "^0.2.1" + typed-rest-client "^1.8.4" + url-join "^4.0.1" + xml2js "^0.4.23" + yauzl "^2.3.1" + yazl "^2.2.2" + optionalDependencies: + keytar "^7.7.0" + +acorn-jsx@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn@^8.8.0: + version "8.8.2" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.2.tgz#1b2f25db02af965399b9776b0c2c391276d37c4a" + integrity sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw== + +ajv@^6.10.0, ajv@^6.12.4: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +array-buffer-byte-length@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz#fabe8bc193fea865f317fe7807085ee0dee5aead" + integrity sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A== + dependencies: + call-bind "^1.0.2" + is-array-buffer "^3.0.1" + +array-includes@^3.1.6: + version "3.1.6" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.6.tgz#9e9e720e194f198266ba9e18c29e6a9b0e4b225f" + integrity sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + get-intrinsic "^1.1.3" + is-string "^1.0.7" + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array.prototype.flat@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz#ffc6576a7ca3efc2f46a143b9d1dda9b4b3cf5e2" + integrity sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + es-shim-unscopables "^1.0.0" + +array.prototype.flatmap@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz#1aae7903c2100433cb8261cd4ed310aab5c4a183" + integrity sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + es-shim-unscopables "^1.0.0" + +available-typed-arrays@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" + integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== + +azure-devops-node-api@^11.0.1: + version "11.2.0" + resolved "https://registry.yarnpkg.com/azure-devops-node-api/-/azure-devops-node-api-11.2.0.tgz#bf04edbef60313117a0507415eed4790a420ad6b" + integrity sha512-XdiGPhrpaT5J8wdERRKs5g8E0Zy1pvOYTli7z9E8nmOn3YGp4FhtjhrOyFmX/8veWCwdI69mCHKJw6l+4J/bHA== + dependencies: + tunnel "0.0.6" + typed-rest-client "^1.8.4" + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +base64-js@^1.3.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== + +bl@^4.0.3: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== + dependencies: + buffer "^5.5.0" + inherits "^2.0.4" + readable-stream "^3.4.0" + +boolbase@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" + integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + +braces@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +buffer-crc32@~0.2.3: + version "0.2.13" + resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242" + integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ== + +buffer@^5.5.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" + integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.1.13" + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +chalk@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^4.0.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +cheerio-select@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cheerio-select/-/cheerio-select-2.1.0.tgz#4d8673286b8126ca2a8e42740d5e3c4884ae21b4" + integrity sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g== + dependencies: + boolbase "^1.0.0" + css-select "^5.1.0" + css-what "^6.1.0" + domelementtype "^2.3.0" + domhandler "^5.0.3" + domutils "^3.0.1" + +cheerio@^1.0.0-rc.9: + version "1.0.0-rc.12" + resolved "https://registry.yarnpkg.com/cheerio/-/cheerio-1.0.0-rc.12.tgz#788bf7466506b1c6bf5fae51d24a2c4d62e47683" + integrity sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q== + dependencies: + cheerio-select "^2.1.0" + dom-serializer "^2.0.0" + domhandler "^5.0.3" + domutils "^3.0.1" + htmlparser2 "^8.0.1" + parse5 "^7.0.0" + parse5-htmlparser2-tree-adapter "^7.0.0" + +chownr@^1.1.1: + version "1.1.4" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" + integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +commander@^6.1.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c" + integrity sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +confusing-browser-globals@^1.0.10: + version "1.0.11" + resolved "https://registry.yarnpkg.com/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz#ae40e9b57cdd3915408a2805ebd3a5585608dc81" + integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA== + +cross-spawn@^7.0.2: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +css-select@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/css-select/-/css-select-5.1.0.tgz#b8ebd6554c3637ccc76688804ad3f6a6fdaea8a6" + integrity sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg== + dependencies: + boolbase "^1.0.0" + css-what "^6.1.0" + domhandler "^5.0.2" + domutils "^3.0.1" + nth-check "^2.0.1" + +css-what@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" + integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== + +debug@^3.2.7: + version "3.2.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +decompress-response@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc" + integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ== + dependencies: + mimic-response "^3.1.0" + +deep-extend@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" + integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== + +deep-is@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +define-properties@^1.1.3, define-properties@^1.1.4: + version "1.2.0" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.0.tgz#52988570670c9eacedd8064f4a990f2405849bd5" + integrity sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA== + dependencies: + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + +detect-libc@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.1.tgz#e1897aa88fa6ad197862937fbc0441ef352ee0cd" + integrity sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w== + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +dom-serializer@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-2.0.0.tgz#e41b802e1eedf9f6cae183ce5e622d789d7d8e53" + integrity sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.2" + entities "^4.2.0" + +domelementtype@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" + integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== + +domhandler@^5.0.1, domhandler@^5.0.2, domhandler@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-5.0.3.tgz#cc385f7f751f1d1fc650c21374804254538c7d31" + integrity sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w== + dependencies: + domelementtype "^2.3.0" + +domutils@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-3.0.1.tgz#696b3875238338cb186b6c0612bd4901c89a4f1c" + integrity sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q== + dependencies: + dom-serializer "^2.0.0" + domelementtype "^2.3.0" + domhandler "^5.0.1" + +end-of-stream@^1.1.0, end-of-stream@^1.4.1: + version "1.4.4" + resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" + integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== + dependencies: + once "^1.4.0" + +entities@^4.2.0, entities@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/entities/-/entities-4.4.0.tgz#97bdaba170339446495e653cfd2db78962900174" + integrity sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA== + +entities@~2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/entities/-/entities-2.1.0.tgz#992d3129cf7df6870b96c57858c249a120f8b8b5" + integrity sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w== + +es-abstract@^1.19.0, es-abstract@^1.20.4: + version "1.21.2" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.21.2.tgz#a56b9695322c8a185dc25975aa3b8ec31d0e7eff" + integrity sha512-y/B5POM2iBnIxCiernH1G7rC9qQoM77lLIMQLuob0zhp8C56Po81+2Nj0WFKnd0pNReDTnkYryc+zhOzpEIROg== + dependencies: + array-buffer-byte-length "^1.0.0" + available-typed-arrays "^1.0.5" + call-bind "^1.0.2" + es-set-tostringtag "^2.0.1" + es-to-primitive "^1.2.1" + function.prototype.name "^1.1.5" + get-intrinsic "^1.2.0" + get-symbol-description "^1.0.0" + globalthis "^1.0.3" + gopd "^1.0.1" + has "^1.0.3" + has-property-descriptors "^1.0.0" + has-proto "^1.0.1" + has-symbols "^1.0.3" + internal-slot "^1.0.5" + is-array-buffer "^3.0.2" + is-callable "^1.2.7" + is-negative-zero "^2.0.2" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + is-string "^1.0.7" + is-typed-array "^1.1.10" + is-weakref "^1.0.2" + object-inspect "^1.12.3" + object-keys "^1.1.1" + object.assign "^4.1.4" + regexp.prototype.flags "^1.4.3" + safe-regex-test "^1.0.0" + string.prototype.trim "^1.2.7" + string.prototype.trimend "^1.0.6" + string.prototype.trimstart "^1.0.6" + typed-array-length "^1.0.4" + unbox-primitive "^1.0.2" + which-typed-array "^1.1.9" + +es-set-tostringtag@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz#338d502f6f674301d710b80c8592de8a15f09cd8" + integrity sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg== + dependencies: + get-intrinsic "^1.1.3" + has "^1.0.3" + has-tostringtag "^1.0.0" + +es-shim-unscopables@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== + dependencies: + has "^1.0.3" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +esbuild@^0.17.12: + version "0.17.15" + resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.17.15.tgz#209ebc87cb671ffb79574db93494b10ffaf43cbc" + integrity sha512-LBUV2VsUIc/iD9ME75qhT4aJj0r75abCVS0jakhFzOtR7TQsqQA5w0tZ+KTKnwl3kXE0MhskNdHDh/I5aCR1Zw== + optionalDependencies: + "@esbuild/android-arm" "0.17.15" + "@esbuild/android-arm64" "0.17.15" + "@esbuild/android-x64" "0.17.15" + "@esbuild/darwin-arm64" "0.17.15" + "@esbuild/darwin-x64" "0.17.15" + "@esbuild/freebsd-arm64" "0.17.15" + "@esbuild/freebsd-x64" "0.17.15" + "@esbuild/linux-arm" "0.17.15" + "@esbuild/linux-arm64" "0.17.15" + "@esbuild/linux-ia32" "0.17.15" + "@esbuild/linux-loong64" "0.17.15" + "@esbuild/linux-mips64el" "0.17.15" + "@esbuild/linux-ppc64" "0.17.15" + "@esbuild/linux-riscv64" "0.17.15" + "@esbuild/linux-s390x" "0.17.15" + "@esbuild/linux-x64" "0.17.15" + "@esbuild/netbsd-x64" "0.17.15" + "@esbuild/openbsd-x64" "0.17.15" + "@esbuild/sunos-x64" "0.17.15" + "@esbuild/win32-arm64" "0.17.15" + "@esbuild/win32-ia32" "0.17.15" + "@esbuild/win32-x64" "0.17.15" + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +eslint-config-airbnb-base@^15.0.0: + version "15.0.0" + resolved "https://registry.yarnpkg.com/eslint-config-airbnb-base/-/eslint-config-airbnb-base-15.0.0.tgz#6b09add90ac79c2f8d723a2580e07f3925afd236" + integrity sha512-xaX3z4ZZIcFLvh2oUNvcX5oEofXda7giYmuplVxoOg5A7EXJMrUyqRgR+mhDhPK8LZ4PttFOBvCYDbX3sUoUig== + dependencies: + confusing-browser-globals "^1.0.10" + object.assign "^4.1.2" + object.entries "^1.1.5" + semver "^6.3.0" + +eslint-config-airbnb-typescript@^17.0.0: + version "17.0.0" + resolved "https://registry.yarnpkg.com/eslint-config-airbnb-typescript/-/eslint-config-airbnb-typescript-17.0.0.tgz#360dbcf810b26bbcf2ff716198465775f1c49a07" + integrity sha512-elNiuzD0kPAPTXjFWg+lE24nMdHMtuxgYoD30OyMD6yrW1AhFZPAg27VX7d3tzOErw+dgJTNWfRSDqEcXb4V0g== + dependencies: + eslint-config-airbnb-base "^15.0.0" + +eslint-import-resolver-node@^0.3.7: + version "0.3.7" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz#83b375187d412324a1963d84fa664377a23eb4d7" + integrity sha512-gozW2blMLJCeFpBwugLTGyvVjNoeo1knonXAcatC6bjPBZitotxdWf7Gimr25N4c0AAOo4eOUfaG82IJPDpqCA== + dependencies: + debug "^3.2.7" + is-core-module "^2.11.0" + resolve "^1.22.1" + +eslint-module-utils@^2.7.4: + version "2.7.4" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz#4f3e41116aaf13a20792261e61d3a2e7e0583974" + integrity sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA== + dependencies: + debug "^3.2.7" + +eslint-plugin-import@^2.26.0: + version "2.27.5" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.27.5.tgz#876a6d03f52608a3e5bb439c2550588e51dd6c65" + integrity sha512-LmEt3GVofgiGuiE+ORpnvP+kAm3h6MLZJ4Q5HCyHADofsb4VzXFsRiWj3c0OFiV+3DWFh0qg3v9gcPlfc3zRow== + dependencies: + array-includes "^3.1.6" + array.prototype.flat "^1.3.1" + array.prototype.flatmap "^1.3.1" + debug "^3.2.7" + doctrine "^2.1.0" + eslint-import-resolver-node "^0.3.7" + eslint-module-utils "^2.7.4" + has "^1.0.3" + is-core-module "^2.11.0" + is-glob "^4.0.3" + minimatch "^3.1.2" + object.values "^1.1.6" + resolve "^1.22.1" + semver "^6.3.0" + tsconfig-paths "^3.14.1" + +eslint-scope@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-scope@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" + integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== + dependencies: + esrecurse "^4.3.0" + estraverse "^5.2.0" + +eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.0: + version "3.4.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.0.tgz#c7f0f956124ce677047ddbc192a68f999454dedc" + integrity sha512-HPpKPUBQcAsZOsHAFwTtIKcYlCje62XB7SEAcxjtmW6TD1WVpkS6i6/hOVtTZIl4zGj/mBqpFVGvaDneik+VoQ== + +eslint@^8.19.0: + version "8.37.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.37.0.tgz#1f660ef2ce49a0bfdec0b0d698e0b8b627287412" + integrity sha512-NU3Ps9nI05GUoVMxcZx1J8CNR6xOvUT4jAUMH5+z8lpp3aEdPVCImKw6PWG4PY+Vfkpr+jvMpxs/qoE7wq0sPw== + dependencies: + "@eslint-community/eslint-utils" "^4.2.0" + "@eslint-community/regexpp" "^4.4.0" + "@eslint/eslintrc" "^2.0.2" + "@eslint/js" "8.37.0" + "@humanwhocodes/config-array" "^0.11.8" + "@humanwhocodes/module-importer" "^1.0.1" + "@nodelib/fs.walk" "^1.2.8" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.3.2" + doctrine "^3.0.0" + escape-string-regexp "^4.0.0" + eslint-scope "^7.1.1" + eslint-visitor-keys "^3.4.0" + espree "^9.5.1" + esquery "^1.4.2" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + find-up "^5.0.0" + glob-parent "^6.0.2" + globals "^13.19.0" + grapheme-splitter "^1.0.4" + ignore "^5.2.0" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + is-path-inside "^3.0.3" + js-sdsl "^4.1.4" + js-yaml "^4.1.0" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.1.2" + natural-compare "^1.4.0" + optionator "^0.9.1" + strip-ansi "^6.0.1" + strip-json-comments "^3.1.0" + text-table "^0.2.0" + +espree@^9.5.1: + version "9.5.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.5.1.tgz#4f26a4d5f18905bf4f2e0bd99002aab807e96dd4" + integrity sha512-5yxtHSZXRSW5pvv3hAlXM5+/Oswi1AUFqBmbibKb5s6bp3rGIDkyXU6xCoyuuLhijr4SFwPrXRoZjz0AZDN9tg== + dependencies: + acorn "^8.8.0" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^3.4.0" + +esquery@^1.4.2: + version "1.5.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b" + integrity sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +expand-template@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c" + integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg== + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.2.9: + version "3.2.12" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== + +fastq@^1.6.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a" + integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw== + dependencies: + reusify "^1.0.4" + +fd-slicer@~1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/fd-slicer/-/fd-slicer-1.1.0.tgz#25c7c89cb1f9077f8891bbe61d8f390eae256f1e" + integrity sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g== + dependencies: + pend "~1.2.0" + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +find-up@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== + dependencies: + locate-path "^6.0.0" + path-exists "^4.0.0" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flatted@^3.1.0: + version "3.2.7" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" + integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== + +for-each@^0.3.3: + version "0.3.3" + resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" + integrity sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw== + dependencies: + is-callable "^1.1.3" + +fs-constants@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" + integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +function.prototype.name@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" + integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.0" + functions-have-names "^1.2.2" + +functions-have-names@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.0.tgz#7ad1dc0535f3a2904bba075772763e5051f6d05f" + integrity sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.3" + +get-symbol-description@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.1" + +github-from-package@0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/github-from-package/-/github-from-package-0.0.0.tgz#97fb5d96bfde8973313f20e8288ef9a167fa64ce" + integrity sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw== + +glob-parent@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob-parent@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob@^7.0.6, glob@^7.1.3: + version "7.2.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globals@^13.19.0: + version "13.20.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.20.0.tgz#ea276a1e508ffd4f1612888f9d1bad1e2717bf82" + integrity sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ== + dependencies: + type-fest "^0.20.2" + +globalthis@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.3.tgz#5852882a52b80dc301b0660273e1ed082f0b6ccf" + integrity sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA== + dependencies: + define-properties "^1.1.3" + +globby@^11.1.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +gopd@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== + dependencies: + get-intrinsic "^1.1.3" + +grapheme-splitter@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" + integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ== + +has-bigints@^1.0.1, has-bigints@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-property-descriptors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== + dependencies: + get-intrinsic "^1.1.1" + +has-proto@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" + integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== + +has-symbols@^1.0.2, has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +hosted-git-info@^4.0.2: + version "4.1.0" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-4.1.0.tgz#827b82867e9ff1c8d0c4d9d53880397d2c86d224" + integrity sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA== + dependencies: + lru-cache "^6.0.0" + +htmlparser2@^8.0.1: + version "8.0.2" + resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-8.0.2.tgz#f002151705b383e62433b5cf466f5b716edaec21" + integrity sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.3" + domutils "^3.0.1" + entities "^4.4.0" + +ieee754@^1.1.13: + version "1.2.1" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== + +ignore@^5.2.0: + version "5.2.4" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" + integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== + +import-fresh@^3.0.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@^2.0.3, inherits@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +ini@~1.3.0: + version "1.3.8" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + +internal-slot@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.5.tgz#f2a2ee21f668f8627a4667f309dc0f4fb6674986" + integrity sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ== + dependencies: + get-intrinsic "^1.2.0" + has "^1.0.3" + side-channel "^1.0.4" + +is-array-buffer@^3.0.1, is-array-buffer@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.2.tgz#f2653ced8412081638ecb0ebbd0c41c6e0aecbbe" + integrity sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.2.0" + is-typed-array "^1.1.10" + +is-bigint@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== + dependencies: + has-bigints "^1.0.1" + +is-boolean-object@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== + +is-core-module@^2.11.0: + version "2.11.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.11.0.tgz#ad4cb3e3863e814523c96f3f58d26cc570ff0144" + integrity sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw== + dependencies: + has "^1.0.3" + +is-date-object@^1.0.1: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-negative-zero@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== + +is-number-object@^1.0.4: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-path-inside@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" + integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== + +is-regex@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-shared-array-buffer@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" + integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== + dependencies: + call-bind "^1.0.2" + +is-string@^1.0.5, is-string@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-typed-array@^1.1.10, is-typed-array@^1.1.9: + version "1.1.10" + resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.10.tgz#36a5b5cb4189b575d1a3e4b08536bfb485801e3f" + integrity sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A== + dependencies: + available-typed-arrays "^1.0.5" + call-bind "^1.0.2" + for-each "^0.3.3" + gopd "^1.0.1" + has-tostringtag "^1.0.0" + +is-weakref@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== + dependencies: + call-bind "^1.0.2" + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +js-sdsl@^4.1.4: + version "4.4.0" + resolved "https://registry.yarnpkg.com/js-sdsl/-/js-sdsl-4.4.0.tgz#8b437dbe642daa95760400b602378ed8ffea8430" + integrity sha512-FfVSdx6pJ41Oa+CF7RDaFmTnCaFhua+SNYQX74riGOpl96x+2jQCqEfQ2bnXu/5DPCqlRuiqyvTJM0Qjz26IVg== + +js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== + +json5@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" + integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== + dependencies: + minimist "^1.2.0" + +jsonc-parser@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.2.0.tgz#31ff3f4c2b9793f89c67212627c51c6394f88e76" + integrity sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w== + +keytar@^7.7.0: + version "7.9.0" + resolved "https://registry.yarnpkg.com/keytar/-/keytar-7.9.0.tgz#4c6225708f51b50cbf77c5aae81721964c2918cb" + integrity sha512-VPD8mtVtm5JNtA2AErl6Chp06JBfy7diFQ7TQQhdpWOl6MrCRB+eRbvAZUsbGQS9kiMq0coJsy0W0vHpDCkWsQ== + dependencies: + node-addon-api "^4.3.0" + prebuild-install "^7.0.1" + +leven@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +linkify-it@^3.0.1: + version "3.0.3" + resolved "https://registry.yarnpkg.com/linkify-it/-/linkify-it-3.0.3.tgz#a98baf44ce45a550efb4d49c769d07524cc2fa2e" + integrity sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ== + dependencies: + uc.micro "^1.0.1" + +locate-path@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== + dependencies: + p-locate "^5.0.0" + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +markdown-it@^12.3.2: + version "12.3.2" + resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-12.3.2.tgz#bf92ac92283fe983fe4de8ff8abfb5ad72cd0c90" + integrity sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg== + dependencies: + argparse "^2.0.1" + entities "~2.1.0" + linkify-it "^3.0.1" + mdurl "^1.0.1" + uc.micro "^1.0.5" + +mdurl@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-1.0.1.tgz#fe85b2ec75a59037f2adfec100fd6c601761152e" + integrity sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g== + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +micromatch@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + +mime@^1.3.4: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +mimic-response@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" + integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== + +minimatch@^3.0.3, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^5.1.0: + version "5.1.6" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.6.tgz#1cfcb8cf5522ea69952cd2af95ae09477f122a96" + integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g== + dependencies: + brace-expansion "^2.0.1" + +minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.6: + version "1.2.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" + integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== + +mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3: + version "0.5.3" + resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" + integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@^2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +mute-stream@~0.0.4: + version "0.0.8" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" + integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== + +napi-build-utils@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/napi-build-utils/-/napi-build-utils-1.0.2.tgz#b1fddc0b2c46e380a0b7a76f984dd47c41a13806" + integrity sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg== + +natural-compare-lite@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz#17b09581988979fddafe0201e931ba933c96cbb4" + integrity sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== + +node-abi@^3.3.0: + version "3.35.0" + resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.35.0.tgz#c7f171af995cc5286fa95612c4ca97b3910eaa79" + integrity sha512-jAlSOFR1Bls963NmFwxeQkNTzqjUF0NThm8Le7eRIRGzFUVJuMOFZDLv5Y30W/Oaw+KEebEJLAigwO9gQHoEmw== + dependencies: + semver "^7.3.5" + +node-addon-api@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-4.3.0.tgz#52a1a0b475193e0928e98e0426a0d1254782b77f" + integrity sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ== + +nth-check@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" + integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== + dependencies: + boolbase "^1.0.0" + +object-inspect@^1.12.3, object-inspect@^1.9.0: + version "1.12.3" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" + integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g== + +object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.2, object.assign@^4.1.4: + version "4.1.4" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" + integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + has-symbols "^1.0.3" + object-keys "^1.1.1" + +object.entries@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.6.tgz#9737d0e5b8291edd340a3e3264bb8a3b00d5fa23" + integrity sha512-leTPzo4Zvg3pmbQ3rDK69Rl8GQvIqMWubrkxONG9/ojtFE2rD9fjMKfSI5BxW3osRH1m6VdzmqK8oAY9aT4x5w== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + +object.values@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.6.tgz#4abbaa71eba47d63589d402856f908243eea9b1d" + integrity sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + +once@^1.3.0, once@^1.3.1, once@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +p-limit@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" + +p-locate@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-semver@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/parse-semver/-/parse-semver-1.1.1.tgz#9a4afd6df063dc4826f93fba4a99cf223f666cb8" + integrity sha512-Eg1OuNntBMH0ojvEKSrvDSnwLmvVuUOSdylH/pSCPNMIspLlweJyIWXCE+k/5hm3cj/EBUYwmWkjhBALNP4LXQ== + dependencies: + semver "^5.1.0" + +parse5-htmlparser2-tree-adapter@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz#23c2cc233bcf09bb7beba8b8a69d46b08c62c2f1" + integrity sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g== + dependencies: + domhandler "^5.0.2" + parse5 "^7.0.0" + +parse5@^7.0.0: + version "7.1.2" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.1.2.tgz#0736bebbfd77793823240a23b7fc5e010b7f8e32" + integrity sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw== + dependencies: + entities "^4.4.0" + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +pend@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" + integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg== + +picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +prebuild-install@^7.0.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.1.tgz#de97d5b34a70a0c81334fd24641f2a1702352e45" + integrity sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw== + dependencies: + detect-libc "^2.0.0" + expand-template "^2.0.3" + github-from-package "0.0.0" + minimist "^1.2.3" + mkdirp-classic "^0.5.3" + napi-build-utils "^1.0.1" + node-abi "^3.3.0" + pump "^3.0.0" + rc "^1.2.7" + simple-get "^4.0.0" + tar-fs "^2.0.0" + tunnel-agent "^0.6.0" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prettier@^2.6.2: + version "2.8.7" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.7.tgz#bb79fc8729308549d28fe3a98fce73d2c0656450" + integrity sha512-yPngTo3aXUUmyuTjeTUT75txrf+aMh9FiD7q9ZE/i6r0bPb22g4FsE6Y338PQX1bmfy08i9QQCB7/rcUAVntfw== + +pump@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" + integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +punycode@^2.1.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" + integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== + +qs@^6.9.1: + version "6.11.1" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.1.tgz#6c29dff97f0c0060765911ba65cbc9764186109f" + integrity sha512-0wsrzgTz/kAVIeuxSjnpGC56rzYtr6JT/2BwEvMaPhFIoYa1aGO8LbzuU1R0uUYQkLpWBTOj0l/CLAJB64J6nQ== + dependencies: + side-channel "^1.0.4" + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +rc@^1.2.7: + version "1.2.8" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" + integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== + dependencies: + deep-extend "^0.6.0" + ini "~1.3.0" + minimist "^1.2.0" + strip-json-comments "~2.0.1" + +read@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/read/-/read-1.0.7.tgz#b3da19bd052431a97671d44a42634adf710b40c4" + integrity sha512-rSOKNYUmaxy0om1BNjMN4ezNT6VKK+2xF4GBhc81mkH7L60i6dp8qPYrkndNLT3QPphoII3maL9PVC9XmhHwVQ== + dependencies: + mute-stream "~0.0.4" + +readable-stream@^3.1.1, readable-stream@^3.4.0: + version "3.6.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +regexp.prototype.flags@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" + integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + functions-have-names "^1.2.2" + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve@^1.22.1: + version "1.22.2" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.2.tgz#0ed0943d4e301867955766c9f3e1ae6d01c6845f" + integrity sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g== + dependencies: + is-core-module "^2.11.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@^3.0.0, rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +safe-buffer@^5.0.1, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-regex-test@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" + integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.3" + is-regex "^1.1.4" + +sax@>=0.6.0: + version "1.2.4" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +semver@^5.1.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" + integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== + +semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +semver@^7.3.5, semver@^7.3.7: + version "7.3.8" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" + integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== + dependencies: + lru-cache "^6.0.0" + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +side-channel@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +simple-concat@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/simple-concat/-/simple-concat-1.0.1.tgz#f46976082ba35c2263f1c8ab5edfe26c41c9552f" + integrity sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q== + +simple-get@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-4.0.1.tgz#4a39db549287c979d352112fa03fd99fd6bc3543" + integrity sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA== + dependencies: + decompress-response "^6.0.0" + once "^1.3.1" + simple-concat "^1.0.0" + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +string.prototype.trim@^1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz#a68352740859f6893f14ce3ef1bb3037f7a90533" + integrity sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + +string.prototype.trimend@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz#c4a27fa026d979d79c04f17397f250a462944533" + integrity sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + +string.prototype.trimstart@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz#e90ab66aa8e4007d92ef591bbf3cd422c56bdcf4" + integrity sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +tar-fs@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" + integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== + dependencies: + chownr "^1.1.1" + mkdirp-classic "^0.5.2" + pump "^3.0.0" + tar-stream "^2.1.4" + +tar-stream@^2.1.4: + version "2.2.0" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" + integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== + dependencies: + bl "^4.0.3" + end-of-stream "^1.4.1" + fs-constants "^1.0.0" + inherits "^2.0.3" + readable-stream "^3.1.1" + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== + +tmp@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.2.1.tgz#8457fc3037dcf4719c251367a1af6500ee1ccf14" + integrity sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ== + dependencies: + rimraf "^3.0.0" + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +tsconfig-paths@^3.14.1: + version "3.14.2" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz#6e32f1f79412decd261f92d633a9dc1cfa99f088" + integrity sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g== + dependencies: + "@types/json5" "^0.0.29" + json5 "^1.0.2" + minimist "^1.2.6" + strip-bom "^3.0.0" + +tslib@^1.8.1: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tsutils@^3.21.0: + version "3.21.0" + resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== + dependencies: + tslib "^1.8.1" + +tunnel-agent@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== + dependencies: + safe-buffer "^5.0.1" + +tunnel@0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/tunnel/-/tunnel-0.0.6.tgz#72f1314b34a5b192db012324df2cc587ca47f92c" + integrity sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg== + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +typed-array-length@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/typed-array-length/-/typed-array-length-1.0.4.tgz#89d83785e5c4098bec72e08b319651f0eac9c1bb" + integrity sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng== + dependencies: + call-bind "^1.0.2" + for-each "^0.3.3" + is-typed-array "^1.1.9" + +typed-rest-client@^1.8.4: + version "1.8.9" + resolved "https://registry.yarnpkg.com/typed-rest-client/-/typed-rest-client-1.8.9.tgz#e560226bcadfe71b0fb5c416b587f8da3b8f92d8" + integrity sha512-uSmjE38B80wjL85UFX3sTYEUlvZ1JgCRhsWj/fJ4rZ0FqDUFoIuodtiVeE+cUqiVTOKPdKrp/sdftD15MDek6g== + dependencies: + qs "^6.9.1" + tunnel "0.0.6" + underscore "^1.12.1" + +typescript@^4.6.3: + version "4.9.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.5.tgz#095979f9bcc0d09da324d58d03ce8f8374cbe65a" + integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== + +uc.micro@^1.0.1, uc.micro@^1.0.5: + version "1.0.6" + resolved "https://registry.yarnpkg.com/uc.micro/-/uc.micro-1.0.6.tgz#9c411a802a409a91fc6cf74081baba34b24499ac" + integrity sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA== + +unbox-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== + dependencies: + call-bind "^1.0.2" + has-bigints "^1.0.2" + has-symbols "^1.0.3" + which-boxed-primitive "^1.0.2" + +underscore@^1.12.1: + version "1.13.6" + resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.13.6.tgz#04786a1f589dc6c09f761fc5f45b89e935136441" + integrity sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A== + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +url-join@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/url-join/-/url-join-4.0.1.tgz#b642e21a2646808ffa178c4c5fda39844e12cde7" + integrity sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA== + +util-deprecate@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +vscode-jsonrpc@8.2.0: + version "8.2.0" + resolved "https://registry.yarnpkg.com/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz#f43dfa35fb51e763d17cd94dcca0c9458f35abf9" + integrity sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA== + +vscode-languageclient@^9.0.1: + version "9.0.1" + resolved "https://registry.yarnpkg.com/vscode-languageclient/-/vscode-languageclient-9.0.1.tgz#cdfe20267726c8d4db839dc1e9d1816e1296e854" + integrity sha512-JZiimVdvimEuHh5olxhxkht09m3JzUGwggb5eRUkzzJhZ2KjCN0nh55VfiED9oez9DyF8/fz1g1iBV3h+0Z2EA== + dependencies: + minimatch "^5.1.0" + semver "^7.3.7" + vscode-languageserver-protocol "3.17.5" + +vscode-languageserver-protocol@3.17.5: + version "3.17.5" + resolved "https://registry.yarnpkg.com/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz#864a8b8f390835572f4e13bd9f8313d0e3ac4bea" + integrity sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg== + dependencies: + vscode-jsonrpc "8.2.0" + vscode-languageserver-types "3.17.5" + +vscode-languageserver-types@3.17.5: + version "3.17.5" + resolved "https://registry.yarnpkg.com/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz#3273676f0cf2eab40b3f44d085acbb7f08a39d8a" + integrity sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg== + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which-typed-array@^1.1.9: + version "1.1.9" + resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.9.tgz#307cf898025848cf995e795e8423c7f337efbde6" + integrity sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA== + dependencies: + available-typed-arrays "^1.0.5" + call-bind "^1.0.2" + for-each "^0.3.3" + gopd "^1.0.1" + has-tostringtag "^1.0.0" + is-typed-array "^1.1.10" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +word-wrap@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +xml2js@^0.4.23: + version "0.4.23" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.23.tgz#a0c69516752421eb2ac758ee4d4ccf58843eac66" + integrity sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug== + dependencies: + sax ">=0.6.0" + xmlbuilder "~11.0.0" + +xmlbuilder@~11.0.0: + version "11.0.1" + resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" + integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yauzl@^2.3.1: + version "2.10.0" + resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.10.0.tgz#c7eb17c93e112cb1086fa6d8e51fb0667b79a5f9" + integrity sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g== + dependencies: + buffer-crc32 "~0.2.3" + fd-slicer "~1.1.0" + +yazl@^2.2.2: + version "2.5.1" + resolved "https://registry.yarnpkg.com/yazl/-/yazl-2.5.1.tgz#a3d65d3dd659a5b0937850e8609f22fffa2b5c35" + integrity sha512-phENi2PLiHnHb6QBVot+dJnaAZ0xosj7p3fWl+znIjBDlnMI2PsZCJZ306BPTFOaHf5qdDEI8x5qFrSOBN5vrw== + dependencies: + buffer-crc32 "~0.2.3" + +yocto-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== diff --git a/website/blog/2023-03-30-relay-15.mdx b/website/blog/2023-03-30-relay-15.mdx new file mode 100644 index 0000000000000..8073afbdbd10c --- /dev/null +++ b/website/blog/2023-03-30-relay-15.mdx @@ -0,0 +1,135 @@ +--- +title: Relay v15.0 +author: The Relay Team +hide_table_of_contents: false +--- + +The Relay team is happy to announce the release of Relay v15. While this release is a major version bump and includes a couple of breaking changes, we expect that most users will be unaffected and will experience a seamless upgrade. You can find the full list of changes in the [v15 Release Notes](https://github.com/facebook/relay/releases/tag/v15.0.0). + +## What's new in Relay 15? + +### Support for `@refetchable` on interfaces + +Previously it wasn't possible to add the `@refetchable` directive on fragment definitions on server interface types. + +``` +// schema.graphql + +interface RefetchableInterfaceFoo @fetchable(field_name: "id") { + id: ID! +} + +extend type Query { + fetch__RefetchableInterfaceFoo(id: ID!): RefetchableInterfaceFoo +} + +// fragment + +fragment RefetchableFragmentFoo on RefetchableInterfaceFoo + @refetchable(queryName: "RefetchableFragmentFooQuery") { + id +} +``` + +### Persisted query improvements + +If you use URL-based persisted queries, you can now specify custom headers to send with the request that persists the query. For example, this can be used to send auth headers to your query persistence URL endpoint. + +```js +persistConfig: { + url: 'example.com/persist', + headers: { + Authorization: 'bearer TOKEN' + } +} +``` + +For file-based persisted queries, we added a new feature flag, `compact_query_text`, that removes all whitespace from the persisted query text. This can make the file more than 60% smaller. This new feature flag can be enabled within your Relay config file. + +```js +persistConfig: { + file: 'path/to/file.json', + algorithm: 'SHA256' +}, +featureFlags: { + compact_query_text: true +} +``` + +### Typesafe updates now support missing field handlers + +Typesafe updaters now support missing field handlers. Previously, if you selected `node(id: 4) { ... on User { name, __typename } }` in a typesafe updater, but that user was fetched in a different way (e.g. with `best_friend { name }`), you would not be able to access and mutate that user using the typesafe updater. + +In this release, we add support for missing field handlers in typesafe updaters, meaning that if a missing field handler is set up for node (as in [this example](https://relay.dev/docs/next/guided-tour/reusing-cached-data/filling-in-missing-data/#internaldocs-banner)), you will be able to update the user's name with this missing field handler. + +In order to support this, the signature of [missing field handlers](https://relay.dev/docs/guided-tour/reusing-cached-data/filling-in-missing-data) has been changed. The `record` argument to the handler used to recieve a `Record` type (which is an untyped grab-bag of data). It now receives a `ReadOnlyRecordProxy`. Furthermore, the field argument of type `NormalizationLinkedField` is now `CommonLinkedField`, which is a type containing the properties found in both `ReaderLinkedField` and `NormalizationLinkedField`. + +### Flow type improvements + +Flow users will now get types inferred from `graphql` literals with more Relay APIs. No longer do Flow users need to explicitly type the return value of the `usePreloadedQuery`, `useQueryLoader`, `useRefetchableFragment`, `usePaginationFragment`, and `useBlockingPaginationFragment` API methods. + +### Relay Resolver improvements + +A significant portion of our development effort since our last release has gone into improving [Relay Resolvers](https://relay.dev/api-reference/relay-resolvers/introduction/) (a mechanism for exposing derived data in the graph). It is worth noting that Relay Resolvers are still experimental and API changes might occur in the future. + +#### Terser docblock tags + +The annotation for Relay Resolver functions has been simplified. In many scenarios you can now use the `ParentType.field_name: ReturnType` syntax to define what new field your Relay Resolver exposes. + +Before: + +```js +/** + * @RelayResolver + * @onType User + * @fieldName favorite_page + * @rootFragment myRootFragment + */ +``` + +After: + +```js +/** + * @RelayResolver User.favorite_page: Page + * @rootFragment myRootFragment + */ +``` + +In the above example, the `Page` type is a schema type. If your Relay Resolver doesn't return a schema type, you can use fixed `RelayResolverValue` value as your return type + +```js +/** + * @RelayResolver User.best_friend: RelayResolverValue + * @rootFragment myRootFragment + */ +``` + +#### Define multiple resolvers per file + +Prior to this release we only allowed a single Relay Resolver per file and required the Relay Resolver function to be the default export. In Relay 15 you're now able to define multiple Relay Resolvers per file and use named exports. + +```js +/** + * @RelayResolver User.favorite_page: Page + * @rootFragment favoritePageFragment + */ +function usersFavoritePage(){ + ... +} + +/** + * @RelayResolver User.best_friend: RelayResolverValue + * @rootFragment bestFriendFragment + */ +function usersBestFriend(){ + ... +} + +module.exports = { + usersFavoritePage, + usersBestFriend +} +``` + +Happy Querying! diff --git a/website/blog/2023-10-24-how-relay-enables-optimal-data-fetching.md b/website/blog/2023-10-24-how-relay-enables-optimal-data-fetching.md new file mode 100644 index 0000000000000..3b68300034fb3 --- /dev/null +++ b/website/blog/2023-10-24-how-relay-enables-optimal-data-fetching.md @@ -0,0 +1,112 @@ +--- +title: How Relay Enables Optimal Data Fetching +author: Jordan Eldredge +tags: [] +description: + Exploring the tradoeffs that most data fetching strategies are forced to make, + and how Relay allows you to have your cake and eat it too. +hide_table_of_contents: false +--- + +Relay’s approach to application authorship enables a unique combination of +optimal runtime performance and application maintainability. In this post I’ll +describe the tradeoffs most apps are forced to make with their data fetching and +then describe how Relay’s approach allows you to sidestep these tradeoffs and +achieve an optimal outcome across multiple tradeoff dimensions. + +--- + +In component-based UI systems such as React, one important decision to make is +where in your UI tree you fetch data. While data fetching can be done at any +point in the UI tree, in order to understand the tradeoffs at play, let’s +consider the two extremes: + +- Leaf node: Fetch data directly within each component that uses data +- Root node: Fetch all data at the root of your UI and thread it down to leaf + nodes using prop drilling + +Where in the UI tree you fetch data impacts multiple dimensions of the +performance and maintainability of your application. Unfortunately, with naive +data fetching, neither extreme is optimal for all dimensions. Let’s look at +these dimensions and consider which improve as you move data fetching closer to +the leaves, vs. which improve as you move data fetching closer to the root. + +### Loading experience + +- 🚫 Leaf node: If individual nodes fetch data, you will end up with request + cascades where your UI needs to make multiple request roundtrips in series + (waterfalls) since each layer of the UI is blocked on its parent layer + rendering. Additionally, if multiple components happen to use the same data, + you will end up fetching the same data multiple times +- ✅ Root node: If all your data is fetched at the root, you will make single + request and render the whole UI without any duplicate data or cascading + requests + +### Suspense cascades + +- 🚫 Leaf node: If each individual component needs to fetch data separately, + each component will suspend on initial render. With the current implementation + of React, unsuspending results in rerendering from the nearest parent suspense + boundary. This means you will have to reevaluate product component code O(n) + times during initial load, where n is the depth of the tree. +- ✅ Root node: If all your data is fetched at the root, you will suspend a + single time and evaluate product component code only once. + +### Composability + +- ✅ Leaf node: Using an existing component in a new place is as easy as + rendering it. Removing a component is as simple as not-rendering it. Similarly + adding/removing data dependencies can be done fully locally. +- 🚫 Root node: Adding an existing component as a child of another component + requires updating every query that includes that component to fetch the new + data and then threading the new data through all intermediate layers. + Similarly, removing a component requires tracing those data dependencies back + to each root component and determining if the component you removed was that + data’s last remaining consumer. The same dynamics apply to adding/removing new + data to an existing component. + +### Granular updates + +- ✅ Leaf node: When data changes, each component reading that data can + individually rerender, avoiding the need to rerender unaffected components. +- 🚫 Root node: Since all data originates at the root, when any data updates it + always forces the root component to update forcing an expensive rerender of + the entire component tree. + +## Relay + +Relay leverages GraphQL fragments and a compiler build step to offer a more +optimal alternative. In an app that uses Relay, each component defines a GraphQL +fragment which declares the data that it needs. This includes both the concrete +values the component will render as well as the fragments (referenced by name) +of each direct child component it will render. + +At build time, the Relay compiler collects these fragments and builds a single +query for each root node in your application. Let’s look at how this approach +plays out for each of the dimensions described above: + +- ✅ Loading experience - The compiler generated query fetches all data needed + for the surface in a single roundtrip +- ✅ Suspense cascades - Since all data is fetched in a single request, we only + suspend once, and it’s right at the root of the tree +- ✅ Composability - Adding/removing data from a component, including the + fragment data needed to render a child component, can be done locally within a + single component. The compiler takes care of updating all impacted root + queries +- ✅ Granular updates - Because each component defines a fragment, Relay knows + exactly which data is consumed by each component. This lets relay perform + optimal updates where the minimal set of components are rerendered when data + changes + +## Summary + +As you can see, Relay’s use of a declarative composable data fetching language +(GraphQL), combined a compiler step, allows us to achieve optimal outcomes +across all of the tradeoff dimensions outlined above: + +| | Leaf node | Root node | GraphQL/Relay | +| ------------------ | --------- | --------- | ------------- | +| Loading experience | 🚫 | ✅ | ✅ | +| Suspense cascades | 🚫 | ✅ | ✅ | +| Composability | ✅ | 🚫 | ✅ | +| Granular updates | ✅ | 🚫 | ✅ | diff --git a/website/docs/api-reference/graphql/graphql-directives.md b/website/docs/api-reference/graphql/graphql-directives.md index 4d5dd12edb272..3450d789224ca 100644 --- a/website/docs/api-reference/graphql/graphql-directives.md +++ b/website/docs/api-reference/graphql/graphql-directives.md @@ -110,10 +110,28 @@ Notes: With `usePaginationFragment`, Relay expects connection fields to be annotated with a `@connection` directive. For more detailed information and an example, check out the [docs on `usePaginationFragment`](../../guided-tour/list-data/rendering-connections). -## `@refetchable(queryName: String!)` +## `@refetchable(queryName: String!, directives: [String], preferFetchable: Boolean)` With `useRefetchableFragment` and `usePaginationFragment`, Relay expects a `@refetchable` directive. The `@refetchable` directive can only be added to fragments that are "refetchable", that is, on fragments that are declared on `Viewer` or `Query` types, or on a type that implements `Node` (i.e. a type that has an id). The `@refetchable` directive will autogenerate a query with the specified `queryName`. This will also generate Flow types for the query, available to import from the generated file: `.graphql.js`. For more detailed information and examples, check out the docs on [`useRefetchableFragment`](../use-refetchable-fragment/) or [`usePaginationFragment`](../use-pagination-fragment/). +Optionally, you can pass in a list of directives to add to the autogenerated query. For example, this can be used to add the `@relay_test_operation` directive for [testing](../../guides/testing-relay-components): + +[Optional] `preferFetchable: Boolean` + +This argument tells the Relay compiler to prefer generating `fetch_MyType(): MyType` queries for types that implement the `Node` interface. This is useful for schemas that have adopted the `@strong` and `@fetchable` server annotations for types. You can directly fetch concrete objects without needing to refine `Node` interface to a specific type. + +```javascript +graphql` + fragment FriendsListComponent_user on User + @refetchable( + queryName: "FriendsListFetchQuery" + directives: ["@relay_test_operation"] + ) { + ... + } +` +``` + ## `@relay(plural: Boolean)` When defining a fragment for use with a Fragment container, you can use the `@relay(plural: true)` directive to indicate that container expects the prop for that fragment to be a list of items instead of a single item. A query or parent that spreads a `@relay(plural: true)` fragment should do so within a plural field (ie a field backed by a [GraphQL list](http://graphql.org/learn/schema/#lists-and-non-null). For example: @@ -142,6 +160,22 @@ fragment TodoApp_app on App { See also [the @required guide](../../guides/required-directive/). +## `@alias` + +`@alias` is a directive that allows you to give a fragment spread or inline fragment an alias, similar to a field alias. This is useful when you want to conditionally include a fragment and check if it was fetched, or otherwise group data together. + +For fragment spreads, the alias will default to the fragment name. For inline fragments, the alias will default to the type name. If you wish to supply your own name, or you have an inline fragment without any type condition, you can specify the alias using the `as` argument. + +```graphql +fragment MyFragment on User { + ... on User @alias(as: "myGreatAlias") { + name + } +} +``` + +See also [the @alias guide](../../guides/alias-directive/). + ## `@inline` The hooks APIs that Relay exposes allow you to read data from the store only during the render phase. In order to read data from outside of the render phase (or from outside of React), Relay exposes the `@inline` directive. The data from a fragment annotated with `@inline` can be read using `readInlineData`. @@ -215,4 +249,20 @@ graphql` `; ``` +## `@waterfall` + +With [Relay Resolvers](../../guides/relay-resolvers/introduction.md) it's possible to create client-defined edges in the graph which point to server types. When reading these edge fields, Relay is forced to lazily fetch the server data for the edge. This will force Relay to make a second request to the server to fetch the data for the edge. + +To highlight this tradeoff both in the editor and during code review, the Relay compiler expects all reads of these fields to be annotated as `@waterfall`. + +```graphql +fragment EditPost on DraftPost { + author @waterfall { + name + } +} +``` + +See the [Return Type](../../guides/relay-resolvers/return-types.md#server-types) portion of the Relay Resolvers guide for more information. + diff --git a/website/docs/api-reference/hooks/load-query.md b/website/docs/api-reference/hooks/load-query.md index 1c58c23c019a0..33966e329d71e 100644 --- a/website/docs/api-reference/hooks/load-query.md +++ b/website/docs/api-reference/hooks/load-query.md @@ -64,11 +64,6 @@ const queryReference = loadQuery( * `environmentProviderOptions`: *[Optional]* options object * Options passed to an `environmentProvider` used in `prepareSurfaceEntryPoint.js`. -### Flow Type Parameters - -* `TQuery`: Type parameter that should correspond to the Flow type for the specified query. This type is available to import from the the auto-generated file: `.graphql.js`. -* `TEnvironmentProviderOptions`: The type of the `environmentProviderOptions` parameter. - ### Return Value A query reference with the following properties: diff --git a/website/docs/api-reference/hooks/relay-environment-provider.md b/website/docs/api-reference/hooks/relay-environment-provider.md index ca05fd7e74018..882e457cace88 100644 --- a/website/docs/api-reference/hooks/relay-environment-provider.md +++ b/website/docs/api-reference/hooks/relay-environment-provider.md @@ -16,14 +16,53 @@ This component is used to set a Relay environment in React Context. Usually, a * ```js const React = require('React'); +const { + Store, + RecordSource, + Environment, + Network, + Observable, +} = require("relay-runtime"); const {RelayEnvironmentProvider} = require('react-relay'); -const Environment = createNewEnvironment(); +/** + * Custom fetch function to handle GraphQL requests for a Relay environment. + * + * This function is responsible for sending GraphQL requests over the network and returning + * the response data. It can be customized to integrate with different network libraries or + * to add authentication headers as needed. + * + * @param {RequestParameters} params - The GraphQL request parameters to send to the server. + * @param {Variables} variables - Variables used in the GraphQL query. + */ +function fetchFunction(params, variables) { + const response = fetch("http://my-graphql/api", { + method: "POST", + headers: [["Content-Type", "application/json"]], + body: JSON.stringify({ + query: params.text, + variables, + }), + }); + + return Observable.from(response.then((data) => data.json())); +}; + +/** + * Creates a new Relay environment instance for managing (fetching, storing) GraphQL data. + */ +function createEnvironment() { + const network = Network.create(fetchFunction); + const store = new Store(new RecordSource()); + return new Environment({ store, network }); +} + +const environment = createEnvironment(); function Root() { return ( - + ); diff --git a/website/docs/api-reference/hooks/use-lazy-load-query.md b/website/docs/api-reference/hooks/use-lazy-load-query.md index 32a227e8fcd48..1a80e04b4a9a6 100644 --- a/website/docs/api-reference/hooks/use-lazy-load-query.md +++ b/website/docs/api-reference/hooks/use-lazy-load-query.md @@ -50,10 +50,6 @@ function App() { * `fetchKey`: A `fetchKey` can be passed to force a re-evaluation of the current query and variables when the component re-renders, even if the variables didn't change, or even if the component isn't remounted (similarly to how passing a different `key` to a React component will cause it to remount). If the `fetchKey` is different from the one used in the previous render, the current query will be re-evaluated against the store, and it might be refetched depending on the current `fetchPolicy` and the state of the cache. * `networkCacheConfig`: *_[Optional] _* Default value: `{force: true}`. Object containing cache config options for the *network layer*. Note that the network layer may contain an *additional* query response cache which will reuse network responses for identical queries. If you want to bypass this cache completely (which is the default behavior), pass `{force: true}` as the value for this option. -### Flow Type Parameters - -* `TQuery`: Type parameter that should correspond to the Flow type for the specified query. This type is available to import from the the auto-generated file: `.graphql.js`. - ### Return Value * `data`: Object that contains data which has been read out from the Relay store; the object matches the shape of specified query. diff --git a/website/docs/api-reference/hooks/use-mutation.md b/website/docs/api-reference/hooks/use-mutation.md index ac811917c8134..ac87cd519cfff 100644 --- a/website/docs/api-reference/hooks/use-mutation.md +++ b/website/docs/api-reference/hooks/use-mutation.md @@ -70,11 +70,6 @@ function LikeButton() { - -### Flow Type Parameters - -* `TMutation`: Type parameter that should corresponds the Flow type for the mutation query. This type is available to import from the the auto-generated file: `.graphql.js`. - ### Return Value Tuple containing the following values: diff --git a/website/docs/api-reference/hooks/use-refetchable-fragment.md b/website/docs/api-reference/hooks/use-refetchable-fragment.md index f2d80974a672f..94ae6aea9daf6 100644 --- a/website/docs/api-reference/hooks/use-refetchable-fragment.md +++ b/website/docs/api-reference/hooks/use-refetchable-fragment.md @@ -90,8 +90,9 @@ Tuple containing the following values * Arguments: * `variables`: Object containing the new set of variable values to be used to fetch the `@refetchable` query. * These variables need to match GraphQL variables referenced inside the fragment. - * However, only the variables that are intended to change for the refetch request need to be specified; any variables referenced by the fragment that are omitted from this input will fall back to using the value specified in the original parent query. So for example, to refetch the fragment with the exact same variables as it was originally fetched, you can call `refetch({})`. - * Similarly, passing an `id` value for the `$id` variable is _*optional*_, unless the fragment wants to be refetched with a different `id`. When refetching a `@refetchable` fragment, Relay will already know the id of the rendered object. + * If the fragment key passed to `useRefetchableFragment` is optional then all non-optional variables must be passed including, potentially, the object's ID since Relay may not have any existing variables to reuse. + * If the fragment key is non-optional, only the variables that are intended to change for the refetch request need to be specified; any variables referenced by the fragment that are omitted from this input will fall back to using the value specified in the original parent query. So for example, to refetch the fragment with the exact same variables as it was originally fetched, you can call `refetch({})`. + * Similarly, if the fragment key is non-optional, passing an `id` value for the `$id` variable is _*optional*_, unless the fragment wants to be refetched with a different `id`. When refetching a non-nullable `@refetchable` fragment, Relay will already know the id of the rendered object. * `options`: *_[Optional]_* options object * `fetchPolicy`: Determines if cached data should be used, and when to send a network request based on cached data that is available. See the [Fetch Policies](../../guided-tour/reusing-cached-data/fetch-policies/) section for full specification. * `onComplete`: Function that will be called whenever the refetch request has completed, including any incremental data payloads. diff --git a/website/docs/api-reference/hooks/use-subscription.md b/website/docs/api-reference/hooks/use-subscription.md index 2afecf588bb00..01a4822232488 100644 --- a/website/docs/api-reference/hooks/use-subscription.md +++ b/website/docs/api-reference/hooks/use-subscription.md @@ -44,13 +44,8 @@ function UserComponent({ id }) { * `config`: a config of type [`GraphQLSubscriptionConfig`](#type-graphqlsubscriptionconfigtsubscriptionpayload) passed to [`requestSubscription`](../request-subscription/) * `requestSubscriptionFn`: `?(IEnvironment, GraphQLSubscriptionConfig) => Disposable`. An optional function with the same signature as [`requestSubscription`](../request-subscription/), which will be called in its stead. Defaults to `requestSubscription`. - -### Flow Type Parameters - -* `TSubscriptionPayload`: The type of the payloads vended by the subscription. You should pass the flow type imported from the auto-generated `.graphql` file corresponding to the subscription, e.g. use `UserDataSubscription` as the type parameter, from `import type {UserDataSubscription} from './__generated__/UserDataSubscription.graphql'`; - ### Behavior * This is only a thin wrapper around the `requestSubscription` API. It will: diff --git a/website/docs/api-reference/relay-resolvers/docblock-format.md b/website/docs/api-reference/relay-resolvers/docblock-format.md new file mode 100644 index 0000000000000..1747b2c676b8d --- /dev/null +++ b/website/docs/api-reference/relay-resolvers/docblock-format.md @@ -0,0 +1,160 @@ +--- +id: docblock-format +title: 'Docblock Format' +slug: /api-reference/relay-resolvers/docblock-format/ +description: Docblock format for Relay Resolvers +--- + +Relay Resolvers allow you to define additional types and fields in your GraphQL schema that are backed by client-side data. To achieve this, the Relay compiler looks for special `@RelayResolver` docblocks in your code. These docblocks define the types and fields in your schema and also tell Relay where to find the resolver functions that implement them. + +For an overview of Relay Resolvers and how to think about them, see the [Relay Resolvers](../../guides/relay-resolvers/introduction.md) guide. This page documents the different docblock tags that the Relay compiler looks for, and how to use them. + +:::note The Relay compiler only looks at docblocks which include the +`@RelayResolver` tag. Any other docblocks will be ignored. +::: + +## `@RelayResolver TypeName` + +The `@RelayResolver` tag followed by a single name defines a new GraphQL type in your schema. By default it is expected to be followed by an exported function whose name matches the type name. The function should accept an ID as its sole argument and return the JavaScript model/object which is the backing data for the type. See [`@weak`](#weak) for an alternative way to define the backing data for a type. + +```tsx +/** + * @RelayResolver User + */ +export function User(id): UserModel { + return UserModel.getById(id); +} +``` + +See the [Defining Types](../../guides/relay-resolvers/defining-types.md) guide for more information. + +## `@RelayResolver TypeName.fieldName: FieldTypeName` + +If the typename in a `@RelayResolver` tag is followed by a dot and then a field definition, it defines a new field on the type. The portion following the `.` is expected to follow GraphQL's +[schema definition language](https://spec.graphql.org/June2018/#FieldDefinition). + +Field definitions are expected to be followed by an exported function whose name matches the field name. The function should accept the model/object returned by the type resolver as its sole argument and return the value of the field. + +```tsx +/** + * @RelayResolver User.name: String + */ +export function name(user: UserModel): string { + return user.name; +} +``` + +See the [Defining Fields](../../guides/relay-resolvers/defining-fields.md) guide for more information. + +## `@rootFragment` + +Relay Resolvers may also be used to model data that is derived from other data in the graph. These fields will be automatically recomputed by Relay when the data they depend on changes. + +To define a derived field, use the `@rootFragment` tag on an existing field +definition, and follow it with the name of a fragment that defines the data that the field depends on. The resolver function for the field will be passed a fragment key which can be used to read the fragment data using `readFragment()`. + +```tsx +import {readFragment} from 'relay-runtime'; + +/** + * @RelayResolver User.fullName: String + * @rootFragment UserFullNameFragment + */ +export function fullName(key: UserFullNameFragment$key): string { + const user = readFragment( + graphql` + fragment UserFullNameFragment on User { + firstName + lastName + } + `, + key, + ); + return `${user.firstName} ${user.lastName}`; +} +``` + +See [Derived Fields](../../guides/relay-resolvers/derived-fields.md) for more information. + +## `@live` + +When modeling client state that can change over time, a resolver function which returns a single value is not sufficient. To accommodate this, Relay Resolvers allow you to define a field that returns a stream of values over time. This is done by adding the `@live` tag to a _field or type definition_. + +`@live` resolvers must return an object with the shape of a `LiveStateValue` to allow Relay to read the current value and subscribe to changes. + +```tsx +import type {LiveState} from 'relay-runtime'; + +/** + * @RelayResolver Query.counter: Int + * @live + */ +export function counter(): LiveState { + return { + read: () => store.getState().counter, + subscribe: cb => { + return store.subscribe(cb); + }, + }; +} +``` + +See the [Live Fields](../../guides/relay-resolvers/live-fields.md) guide for +more information. + +## `@weak` + +By default, Relay Resolvers expect the backing data for a type to be returned by a resolver function. However, in some cases objects of a given type may not have identifiers. In this case, you can use the `@RelayResolver TypeName` syntax described above followed by the tag `@weak` to define a "weak" type. + +Weak type declarations are expected to be followed by an exported type +definition whose name matches the type name. + +```tsx +/** + * @RelayResolver ProfilePicture + * @weak + */ +export type ProfilePicture = { + url: string; + width: number; + height: number; +}; +``` + +See the [Weak Types](../../guides/relay-resolvers/defining-types.md#Defining a “weak” type) guide for more information including how to define an edge to a weak type. + +## `@deprecated` + +Just like the GraphQL schema definition language, Relay Resolvers support the `@deprecated` tag to mark a field as deprecated. The tag may be followed by a string which will be used as the deprecation reason. Deprecated fields will +receive special treatment in the editor if you are using the +[Relay VSCode extension](../../editor-support.md). + +```tsx +/** + * @RelayResolver User.name: String + * @deprecated Use `fullName` instead. + */ +export function name(user: UserModel): string { + return user.name; +} +``` + +See the [Deprecated](../../guides/relay-resolvers/deprecated.md) guide for more information. + +## Descriptions + +Any free text in the docblock (text not following a tag) will be used as the description for the type or field. This description will be surfaced in the editor if you are using the [Relay VSCode extension](../../editor-support.md). + +```tsx +/** + * @RelayResolver User.name: String + * + * What's in a name? That which we call a rose by any other name would smell + * just as sweet. + */ +export function name(user: UserModel): string { + return user.name; +} +``` + +See the [Descriptions](../../guides/relay-resolvers/descriptions.md) guide for more information. diff --git a/website/docs/api-reference/relay-resolvers/runtime-functions.md b/website/docs/api-reference/relay-resolvers/runtime-functions.md new file mode 100644 index 0000000000000..a2c1a29725c0c --- /dev/null +++ b/website/docs/api-reference/relay-resolvers/runtime-functions.md @@ -0,0 +1,95 @@ +--- +id: runtime-functions +title: "Runtime Functions" +slug: /api-reference/relay-resolvers/runtime-functions/ +description: Runtime functions associated with Relay Resolvers +--- + +This page documents the runtime functions associated with Relay Resolvers. For an overview of Relay Resolvers and how to think about them, see the [Relay Resolvers](../../guides/relay-resolvers/introduction.md) guide. + +## LiveResolverStore + +To use Relay Resolvers you must use our experimental Relay Store implementation `LiveResolverStore` imported from `relay-runtime/lib/store/experimental-live-resolvers/LiveResolverStore`. It behaves identically to the default Relay Store but also supports Relay Resolvers. + +It exposes one additional user-facing method `batchLiveStateUpdates()`. See [Live Fields](../../guides/relay-resolvers/live-fields.md#batching) for more details of how to use this method. + +## `readFragment()` + +Derived resolver fields model data that is derived from other data in the graph. To read the data that a derived field depends on, they must use the `readFragment()` function which is exported from `relay-runtime`. This function accepts a GraphQL fragment and a fragment key, and returns the data for the fragment. + +:::warning +`readFragment()` may only be used in Relay Resolvers. It will throw an error if used in any other context. +::: + +```tsx +import {readFragment} from "relay-runtime"; + +/** + * @RelayResolver User.fullName: String + * @rootFragment UserFullNameFragment + */ +export function fullName(key: UserFullNameFragment$key): string { + const user = readFragment(graphql` + fragment UserFullNameFragment on User { + firstName + lastName + } + `, key); + return `${user.firstName} ${user.lastName}`; +} +``` + +Note that Relay will ensure your field resolver is recomputed any time data in that fragment changes. + +See the [Derived Fields](../../guides/relay-resolvers/derived-fields.md) guide for more information. + +## `suspenseSentinel()` + +Live resolvers model client state that can change over time. If at some point during that field's lifecycle, the data being read is in a pending state, for example if the data is being fetched from an API, the resolver may return the `suspenseSentinel()` to indicate that the data is not yet available. + +Relay expects that when the data is available, the `LiveStateValue` will notify Relay by calling the subscribe callback. + +```tsx +import {suspenseSentinel} from 'relay-runtime'; + +/** + * @RelayResolver Query.myIp: String + * @live + */ +export function myIp(): LiveState { + return { + read: () => { + const state = store.getState(); + const ipLoadObject = state.ip; + if (ipLoadObject.status === "LOADING") { + return suspenseSentinel(); + } + return state.ip; + }, + subscribe: (callback) => { + return store.subscribe(callback); + }, + }; +} +``` + +See the [Live Fields](../../guides/relay-resolvers/live-fields.md) guide for more information. + +## `useClientQuery()` + +If a query contains only client fields, it may not currently be used with hooks like `usePreloadedQuery` and `useLazyLoadQuery` since both of those hooks assume they will need to issue a network request. If you attempt to use these APIs in Flow you will get a type error. + +Instead, for client-only queries, you can use the `useClientQuery` hook: + +```tsx +import {useClientQuery} from 'react-relay'; + +export function MyComponent() { + const data = useClientQuery(graphql` + query MyQuery { + myIp + } + `); + return
{data.myIp}
; +} +``` diff --git a/website/docs/api-reference/relay-runtime/commit-mutation.md b/website/docs/api-reference/relay-runtime/commit-mutation.md index e45eb1053e6b7..9a08ff5a717f0 100644 --- a/website/docs/api-reference/relay-runtime/commit-mutation.md +++ b/website/docs/api-reference/relay-runtime/commit-mutation.md @@ -53,14 +53,6 @@ function likeFeedback(environment: IEnvironment): Disposable { -### Flow Type Parameters - -* `TMutation`: Type parameter that should corresponds the Flow type for the mutation query. This type is available to import from the the auto-generated file: `.graphql.js`. - * Note that this auto-generated type will implement `MutationParameters`. - -:::caution -If you do not **explicitly** pass this type parameter, the variables, optimistic response and response passed to `onCompleted` **will not be type-checked**! -::: ### Return Value diff --git a/website/docs/api-reference/relay-runtime/request-subscription.md b/website/docs/api-reference/relay-runtime/request-subscription.md index 7d2341166fa05..407d1b2a18b0f 100644 --- a/website/docs/api-reference/relay-runtime/request-subscription.md +++ b/website/docs/api-reference/relay-runtime/request-subscription.md @@ -41,10 +41,6 @@ function createSubscription(environment: IEnvironment): Disposable { -### Flow Type Parameters - -* `TSubscriptionPayload`: The type of the payloads vended by the subscription. You should pass the flow type imported from the auto-generated `.graphql` file corresponding to the subscription, e.g. use `UserDataSubscription` as the type parameter, from `import type {UserDataSubscription} from './__generated__/UserDataSubscription.graphql'`; - ### Return Type * A [`Disposable`](#interface-disposable) that clears the subscription. diff --git a/website/docs/api-reference/types/MutationConfig.md b/website/docs/api-reference/types/MutationConfig.md index 4a517246ae310..a0628ccc9f5f1 100644 --- a/website/docs/api-reference/types/MutationConfig.md +++ b/website/docs/api-reference/types/MutationConfig.md @@ -10,7 +10,7 @@ import UploadableMap from './UploadableMap.md'; * `onError`: *_[Optional]_* `(Error) => void`. An optional callback executed if the mutation results in an error. * `onCompleted`: *_[Optional]_* `($ElementType) => void`. An optional callback that is executed when the mutation completes. * The value passed to `onCompleted` is the the mutation fragment, as read out from the store, **after** updaters and declarative mutation directives are applied. This means that data from within unmasked fragments will not be read, and records that were deleted (e.g. by `@deleteRecord`) may also be null. - * `onUnsubscribe`: *_[Optional]_* `() => void`. An optional callback that is executed when the mutation the mutation is unsubscribed, which occurs when the returned `Disposable` is disposed. + * `onUnsubscribe`: *_[Optional]_* `() => void`. An optional callback that is executed when the mutation is unsubscribed, which occurs when the returned `Disposable` is disposed. * `optimisticResponse`: *_[Optional]_* An object whose type matches the raw response type of the mutation. Make sure you decorate your mutation with `@raw_response_type` if you are using this field. * `optimisticUpdater`: *_[Optional]_* [`SelectorStoreUpdater`](#type-selectorstoreupdater). A callback that is executed when `commitMutation` is called, after the `optimisticResponse` has been normalized into the store. * `updater`: *_[Optional]_* [`SelectorStoreUpdater`](#type-selectorstoreupdater). A callback that is executed when a payload is received, after the payload has been written into the store. diff --git a/website/docs/debugging/relay-devtools.md b/website/docs/debugging/relay-devtools.md index c36f66a1b5716..de8ceda919dd3 100644 --- a/website/docs/debugging/relay-devtools.md +++ b/website/docs/debugging/relay-devtools.md @@ -23,7 +23,7 @@ import {FbInternalOnly, OssOnly} from 'docusaurus-plugin-internaldocs-fb/interna The internal version of devtools has the latest updates and the process of installation will be much faster. 1. Before downloading the new Devtools, make sure you've deleted all older versions of the extension. -2. Join [Relay DevTools Support](https://fb.workplace.com/groups/655864995271028) group and you will automatically be added to the cpe_relay_devtools_extension gatekeeper. +2. Join [Relay Support](https://fb.workplace.com/groups/relay.support) group and you will automatically be added to the `cpe_relay_devtools_extension` gatekeeper. 3. Wait 20-30 minutes, and it should be downloaded on your Chrome browser 4. Or run `sudo soloctl -i` on your machine to get the extension immediately diff --git a/website/docs/getting-started/installation-and-setup.md b/website/docs/getting-started/installation-and-setup.md index 82a9d8b84aed6..57a685e534981 100644 --- a/website/docs/getting-started/installation-and-setup.md +++ b/website/docs/getting-started/installation-and-setup.md @@ -69,7 +69,7 @@ module.exports = { src: "./src", language: "javascript", // "javascript" | "typescript" | "flow" schema: "./data/schema.graphql", - exclude: ["**/node_modules/**", "**/__mocks__/**", "**/__generated__/**"], + excludes: ["**/node_modules/**", "**/__mocks__/**", "**/__generated__/**"], } ``` diff --git a/website/docs/glossary/glossary.md b/website/docs/glossary/glossary.md index 07c3b7816cd26..711b2ffb784ab 100644 --- a/website/docs/glossary/glossary.md +++ b/website/docs/glossary/glossary.md @@ -393,7 +393,7 @@ A directive that applies to fragments which enables developers to pass masked da Normally, data is read out using `useFragment`. However, this function can only be called during the render phase. If store data is needed in a outside of the render phase, a developer has several options: * read that data during the render phase, and pass it to the function/have the function close over that data. (See also [#relay]) -* pass a reference to an `@inline` fragment, which can then be accessed (outside of the render phase) using the `readInlineData` directive. +* pass a reference to an `@inline` fragment, which can then be accessed (outside of the render phase) using the `readInlineData` function. This directive causes them to be read out when the parent fragment is read out, and unmasked by the call to `readInlineData`. @@ -672,8 +672,7 @@ Compare to [fragment resource](#fragment-resource). A directive added to queries which tells Relay to generate types that cover the `optimisticResponse` parameter to `commitMutation`. - -See the [documentation](../guided-tour/updating-data/local-data-updates) for more. +See the [guided tour on updating data](../guided-tour/updating-data/graphql-mutations/#optimistic-response) for more. ## Reader @@ -753,7 +752,7 @@ An older version of Relay. This version of Relay had an API that was heavily foc Relay Resolvers is an experimental Relay feature which enables modeling derived state as client-only fields in Relay’s GraphQL graph. -See also [the Relay Resolvers guide](../guides/relay-resolvers). +See also [the Relay Resolvers Introduction](../guides/relay-resolvers/introduction.md). ## Release Buffer diff --git a/website/docs/guided-tour/introduction.md b/website/docs/guided-tour/introduction.md index ad1c0c679d360..a99a99075d522 100644 --- a/website/docs/guided-tour/introduction.md +++ b/website/docs/guided-tour/introduction.md @@ -38,6 +38,20 @@ Before getting started, bear in mind that we assume some level of familiarity wi +## On to the Tutorial + + + +* [Tutorial](https://www.internalfb.com/intern/staticdocs/relay/docs/tutorial/intro/) + + + + + +* [Tutorial](https://relay.dev/docs/tutorial/intro/) + + + diff --git a/website/docs/guided-tour/refetching/refetching-queries-with-different-data.md b/website/docs/guided-tour/refetching/refetching-queries-with-different-data.md index 11b88c1c56d1b..6640c5a9c101c 100644 --- a/website/docs/guided-tour/refetching/refetching-queries-with-different-data.md +++ b/website/docs/guided-tour/refetching/refetching-queries-with-different-data.md @@ -147,7 +147,7 @@ function App(props: Props) { // be cached, so we use the 'store-only' // fetchPolicy to avoid suspending. loadQuery({id: 'different-id'}, {fetchPolicy: 'store-only'}); - } + }, error: () => { setIsRefetching(false); } diff --git a/website/docs/guided-tour/rendering/error-states.md b/website/docs/guided-tour/rendering/error-states.md index dff919b539cc5..c07745359f189 100644 --- a/website/docs/guided-tour/rendering/error-states.md +++ b/website/docs/guided-tour/rendering/error-states.md @@ -17,9 +17,9 @@ import FbErrorBoundary from './fb/FbErrorBoundary.md'; As you may have noticed, we mentioned that using `usePreloadedQuery` will render data from a query that was (or is) being fetched from the server, but we didn't elaborate on how to render UI to show an error if an error occurred during fetch. We will cover that in this section. -We can use [Error Boundary](https://reactjs.org/docs/error-boundaries.html) components to catch errors that occur during render (due to a network error, or any kind of error), and render an alternative error UI when that occurs. The way it works is similar to how `Suspense` works, by wrapping a component tree in an error boundary, we can specify how we want to react when an error occurs, for example by rendering a fallback UI. +We can use [Error Boundary](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary) components to catch errors that occur during render (due to a network error, or any kind of error), and render an alternative error UI when that occurs. The way it works is similar to how `Suspense` works, by wrapping a component tree in an error boundary, we can specify how we want to react when an error occurs, for example by rendering a fallback UI. -[Error boundaries](https://reactjs.org/docs/error-boundaries.html) are simply components that implement the static `getDerivedStateFromError` method: +[Error boundaries](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary) are simply components that implement the static `getDerivedStateFromError` method: ```js const React = require('React'); diff --git a/website/docs/guided-tour/reusing-cached-data/filling-in-missing-data.md b/website/docs/guided-tour/reusing-cached-data/filling-in-missing-data.md index 1a48eb06d389a..ee183a527fe5e 100644 --- a/website/docs/guided-tour/reusing-cached-data/filling-in-missing-data.md +++ b/website/docs/guided-tour/reusing-cached-data/filling-in-missing-data.md @@ -52,9 +52,18 @@ const {ROOT_TYPE, Environment} = require('relay-runtime'); const missingFieldHandlers = [ { handle(field, record, argValues): ?string { + // Make sure to add a handler for the node field if ( record != null && - record.__typename === ROOT_TYPE && + record.getType() === ROOT_TYPE && + field.name === 'node' && + argValues.hasOwnProperty('id') + ) { + return argValues.id + } + if ( + record != null && + record.getType() === ROOT_TYPE && field.name === 'user' && argValues.hasOwnProperty('id') ) { @@ -63,7 +72,7 @@ const missingFieldHandlers = [ } if ( record != null && - record.__typename === ROOT_TYPE && + record.getType() === ROOT_TYPE && field.name === 'story' && argValues.hasOwnProperty('story_id') ) { diff --git a/website/docs/guided-tour/updating-data/graphql-subscriptions.md b/website/docs/guided-tour/updating-data/graphql-subscriptions.md index a8699767025ce..8e36ffc74c9ff 100644 --- a/website/docs/guided-tour/updating-data/graphql-subscriptions.md +++ b/website/docs/guided-tour/updating-data/graphql-subscriptions.md @@ -100,7 +100,7 @@ const {useMemo} = require('React'); function useFeedbackSubscription( input: FeedbackLikeSubscribeData, ) { - const config = useMemo({ + const config = useMemo(() => ({ subscription: graphql` subscription FeedbackLikeSubscription( $input: FeedbackLikeSubscribeData! @@ -113,7 +113,7 @@ function useFeedbackSubscription( } `, variables: {input}, - }, [input]) + }), [input]); return useSubscription(config); } diff --git a/website/docs/guided-tour/updating-data/imperatively-modifying-linked-fields.md b/website/docs/guided-tour/updating-data/imperatively-modifying-linked-fields.md index 8ca9e7643d250..7c98894e2c030 100644 --- a/website/docs/guided-tour/updating-data/imperatively-modifying-linked-fields.md +++ b/website/docs/guided-tour/updating-data/imperatively-modifying-linked-fields.md @@ -17,16 +17,6 @@ keywords: import DocsRating from '@site/src/core/DocsRating'; import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; - - -:::caution - -Because in TypeScript, [getters and setters cannot have different types](https://github.com/microsoft/TypeScript/issues/43662), and the generated types of getters and setters is not the same, `readUpdatableQuery` is currently unusable with TypeScript. `readUpdatableFragment` is usable, as long as the updatable fragment contains only scalar fields. - -::: - - - :::note See also [using readUpdatableQuery to update scalar fields in the store](../imperatively-modifying-store-data). ::: diff --git a/website/docs/guided-tour/updating-data/local-data-updates.md b/website/docs/guided-tour/updating-data/local-data-updates.md index 1ef8381e2c673..90019fbdace84 100644 --- a/website/docs/guided-tour/updating-data/local-data-updates.md +++ b/website/docs/guided-tour/updating-data/local-data-updates.md @@ -59,7 +59,7 @@ const operationDescriptor = createOperationDescriptor(FooQuery, { const payload: FooQueryRawResponse = {...}; -environment.commitPayload(operation, payload); +environment.commitPayload(operationDescriptor, payload); ``` * An `OperationDescriptor` can be created by `createOperationDescriptor`; it takes the query and the query variables. diff --git a/website/docs/guided-tour/updating-data/typesafe-updaters-faq.md b/website/docs/guided-tour/updating-data/typesafe-updaters-faq.md index 3199f0e1c018b..22a2f46ca770f 100644 --- a/website/docs/guided-tour/updating-data/typesafe-updaters-faq.md +++ b/website/docs/guided-tour/updating-data/typesafe-updaters-faq.md @@ -14,16 +14,6 @@ keywords: import DocsRating from '@site/src/core/DocsRating'; import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; - - -:::caution - -Because in TypeScript, [getters and setters cannot have different types](https://github.com/microsoft/TypeScript/issues/43662), and the generated types of getters and setters is not the same, `readUpdatableQuery` is currently unusable with TypeScript. `readUpdatableFragment` is usable, as long as the updatable fragment contains only scalar fields. - -::: - - - # Typesafe Updaters FAQ diff --git a/website/docs/guides/alias-directive.md b/website/docs/guides/alias-directive.md new file mode 100644 index 0000000000000..8639f4a3e289f --- /dev/null +++ b/website/docs/guides/alias-directive.md @@ -0,0 +1,163 @@ +--- +id: alias-directive +title: "@alias Directive" +slug: /guides/alias-directive/ +description: Relay guide to @alias +keywords: +- alias +- directive +- fragment +--- + +:::warning +`@alias` is an experimental feature. To try it out, you will need the latest `@main` release of Relay, and to enable the `enable_fragment_aliases` compiler feature flag in your compiler config. +::: + +The `@alias` directive allows you to expose a spread fragment — either a named fragment spread or an inline fragment — as a named field within your selection. This allows Relay to provide additional type safety in the case where your fragment’s type may not match the parent selection. + +:::info +This document describes why the `@alias` directive was introduced, and how it can be used to improve type safety in your Relay applications. **To learn about it's API, see the [API Reference](../api-reference/graphql/graphql-directives.md#alias).** +::: + +Let’s look at an examples where `@alias` can be useful: + +## Abstract Types + +Imagine you have a component that renders information about a Viewer: + +```ts +function MyViewer({viewerKey}) { + const {name} = useFragment(graphql` + fragment MyViewer on Viewer { + name @required(action: THROW) + }`, viewerKey); + + return `My name is ${name}. That's ${name.length} letters long!`; +} +``` + +To use that component in a component that has a fragment on Node (which Viewer implements), you could write something like this: + +```ts +function MyNode({nodeKey}) { + const node = useFragment(graphql` + fragment MyFragment on Node { + ...MyViewer + }`, nodeKey); + + return +} +``` + +Can you spot the problem? We don’t actually know that the node we are passing to `` is actually a Viewer ``. If `` tries to render a Comment — which also implements Node — we will get a runtime error in `` because the field name is not present on Comment. + +``` +TypeError: Cannot read properties of undefined (reading 'length') +``` + +Not only do we not get a type letting us know that about this potential issue, but even at runtime, there is no way way to check if node implements Viewer because Viewer is an abstract type! + +## Aliased Fragments + +Aliased fragments can solve this problem. Here’s what `` would look like using them: + +```ts +function MyNode({nodeKey}) { + const node = useFragment(graphql` + fragment MyFragment on Node { + ...MyViewer @alias(as: "my_viewer") + }`, nodeKey); + + // Relay returns the fragment key as its own nullable property + if(node.my_viewer == null) { + return null; + } + + // Because `my_viewer` is typed as nullable, Flow/TypeScript will + // show an error if you try to use the `my_viewer` without first + // performing a null check. + // VVVVVVVVVVVVVV + return +} +``` + +With this approach, you can see that Relay exposes the fragment key as its own nullable property, which allows us to check that node actually implements Viewer and even allows Flow to enforce that the component handles the possibility! + +## @skip and @include + +A similar problem can occur when using `@skip` and `@include` directives on fragments. In order to safely use the spread fragment, you need to check if it was fetched. Historically this has required gaining access to the query variable that was used to determine if the fragment was skipped or included. + +With `@alias`, you can now check if the fragment was fetched by simply assigning the fragment an alias, and checking if the alias is null: + +```ts +function MyUser({userKey}) { + const user = useFragment(graphql` + fragment MyFragment on User { + ...ConditionalData @skip(if: $someVar) @alias + }`, userKey); + + if(user.ConditionalData == null) { + return "No data fetched"; + } + return +} +``` + +## Enforced Safety + +We've outlined two different ways that fragments can be unsafe in Relay today without `@alias`. To help prevent runtime issues resulting from these unsafe edge cases, Relay will soon require that all conditionally fetched fragments be aliased. + +To experiment with this validation in your project today, you can enable the experimental `enforce_fragment_alias_where_ambiguous` compiler feature flag for your project. To enable incremental adoption of this enforcement, Relay exposes a directive `@dangerously_unaliased_fixme` which will suppress these enforcement errors. This will allow you to enable the enforcement for all new spreads without first needing to migrate all existing issues. + +The [Relay VSCode extension](../editor-support.md) offers quick fixes to add either `@alias` or `@dangerously_unaliased_fixme` to unsafe fragments. + +## Use with @required + +`@alias` can be used with [`@required(action: NONE)`](./required-directive.md) to group together required fields. In the following example, we group `name` and `email` together as `requiredFields`. If either is null, that null will bubble up to, the `user.requiredFields` field, making it null. This allows us to perform a single check, without impacting the `id` field. + +```ts +function MyUser({userKey}) { + const user = useFragment(graphql` + fragment MyFragment on User { + id + ... @alias(as: "requiredFields") { + name @required(action: NONE) + email @required(action: NONE) + } + }`, userKey); + + if(user.requiredFields == null) { + return `Missing required fields for user ${user.id}`; + } + return `Hello ${user.requiredFields.name} (${user.requiredFields.email}).!`; +} +``` + +:::note +Using `@required` on a fragment spread that has an `@alias` is not currently supported, but we may add support in the future. +::: + +## Under the Hood + +For people familiar with Relay, or curious to learn, here is a brief description of how this feature is implemented: + +Under the hood, `@alias` is implemented entirely within Relay (compiler and runtime). It does not require any server support. The Relay compiler interprets the `@alias` directive, and generates types indicating that the fragment key, or inline fragment data, will be attached to the new field, rather than directly on the parent object. In the Relay runtime artifact, it wraps the fragment node with a new node indicating the name of the alias and additional information about the type of the fragment. + +The Relay compiler also inserts an additional field into the spread which allows it to determine if the fragment has matched: + +```graphql +fragment Foo on Node { + ... on Viewer { + isViewer: __typename # <-- Relay inserts this + name + } +} +``` + +Relay can now check for the existence of the `isViewer` field in the response to know if the fragment matched. + +When Relay reads the content of your fragment out of the store using its runtime artifact, it uses this information to attach the fragment key to this new field, rather than attaching it directly to the parent object. + +### Related + +While `@alias` is a Relay-specific feature, it draws inspiration from fragment modularity as outlined in the GraphQL [RFC Fragment Modularity](https://github.com/graphql/graphql-wg/blob/main/rfcs/FragmentModularity.md). diff --git a/website/docs/guides/relay-resolvers.md b/website/docs/guides/relay-resolvers.md deleted file mode 100644 index 1a3ae6f1ff69c..0000000000000 --- a/website/docs/guides/relay-resolvers.md +++ /dev/null @@ -1,249 +0,0 @@ ---- -id: relay-resolvers -title: "Relay Resolvers" -slug: /guides/relay-resolvers/ -description: Relay guide to Relay Resolvers -keywords: -- resolvers -- derived -- selectors -- reactive ---- - -import DocsRating from '@site/src/core/DocsRating'; -import {FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; - -Relay Resolvers is an experimental Relay feature which enables modeling derived state as client-only fields in Relay’s GraphQL graph. Similar to server [resolvers](https://graphql.org/learn/execution/), a Relay Resolver is a function which defines how to compute the value of a GraphQL field. However, unlike server resolvers, Relay Resolvers are evaluated reactively on the client. A Relay Resolver reads fields off of its parent object and returns a derived result. If any of those fields change, Relay will automatically reevaluate the resolver. - -Relay Resolvers are particularly valuable in apps which store client state in Relay via [client schema extensions](https://relay.dev/docs/guides/client-schema-extensions/), since they allow you to compose together client data, server data — and even other Relay Resolver fields — into fields which update reactively as the underlying data changes. - -Relay Resolvers were originally conceived of as an alternative to Flux-style [selectors](https://redux.js.org/usage/deriving-data-selectors) and can be thought of as providing similar capabilities. - -Concretely, Relay Resolvers are defined as functions annotated with a special docblock syntax. The Relay compiler will automatically recognize these docblocks in any JavaScript file and use them to extend the schema that is available within your project. - -Let’s look at an example Relay Resolver: - -```jsx -import type {UserGreetingResolver$key} from 'UserGreetingResolver.graphql'; -import {graphql} from 'relay-runtime'; -import {readFragment} from 'relay-runtime/store/ResolverFragments'; - -/** - * @RelayResolver - * - * @onType User - * @fieldName greeting - * @rootFragment UserGreetingResolver - * - * A greeting for the user which includes their name and title. - */ -export default function userGreetingResolver(userKey: UserGreetingResolver$key): string { - const user = readFragment(graphql` - fragment UserGreetingResolver on User { - honorific - last_name - }`, userKey); - - return `Hello ${user.honorific} ${user.last_name}!`; -} -``` - -This resolver adds a new field `greeting` to the `User` object type. It reads the `honorific` and `last_name` fields off of the parent `User` and derives a greeting string. The new `greeting` field may now be used by any Relay component throughout your project which has access to a `User`. - -Consuming this new field looks identical to consuming a field defined in the server schema: - -```jsx -function MyGreeting({userKey}) { - const user = useFragment(` - fragment MyGreeting on User { - greeting - }`, userKey); - return

{user.greeting}

; -} -``` - -## Docblock Fields - -The Relay compiler looks for the following fields in any docblocks that includes `@RelayResolver`: - -- `@RelayResolver` (required) -- `@onType` or `@onInterface` (required) The GraphQL type/interface on which the new field should be exposed -- `@fieldName` (required) The name of the new field -- `@rootFragment` (required) The name of the fragment read by `readFragment` -- `@deprecated` (optional) Indicates that the field is [deprecated](https://spec.graphql.org/June2018/#sec--deprecated). May be optionally followed text giving the reason that the field is deprecated. - -The docblock may also contain free text. This free text will be used as the field’s human-readable description, which will be surfaced in Relay’s editor support on hover and in autocomplete results. - -## Relay Resolver Signature - -In order for Relay to be able to call a Relay Resolver, it must conform to a set of conventions: - -1. The resolver function must accept a single argument, which is the key for its root fragment. -2. The resolver function must be the default export of its module (only one resolver per module) -3. The resolver must read its fragment using the special `readFragment` function. -4. The resolver function must be pure -5. The resolver’s return value must be immutable - -Unlike server resolvers, Relay Resolvers may return any JavaScript value. This includes classes, functions and arrays. However, we generally encourage having Relay Resolvers return scalar values and only returning more complex JavaScript values (like functions) as an escape hatch. - - -## Lint Rule - -In many cases, the contents of the docblock can be derived from the javascript implementation. In those cases, the [`relay-resolvers`](https://www.internalfb.com/eslint/relay-resolvers) ESLint rule rule will offer auto-fixes to derive the docblock from the implementation and ensure that the two remain in sync. The lint rule also enforces a naming convention for resolver function and modules names. - - -## How They Work - -When parsing your project, the Relay compiler looks for `@RelayResolver` docblocks and uses them to add special fields to the GraphQL schema. If a query or fragment references one of these fields, Relay’s generated artifact for that query or fragment will automatically include an `import` of the resolver function. *Note that this can happen recursively if the Relay Resolver field you are reading itself reads one or more Relay Resolver fields.* - -When the field is first read by a component, Relay will evaluate the Relay Resolver function and cache the result. Other components that read the same field will read the same cached value. If at any point any of the fields that the resolver reads (via its root fragment) change, Relay will reevaluate the resolver. If the return value changes (determined by `===` equality) Relay will propagate that change to all components (and other Relay Resolvers) that are currently reading the field. - -## Error Handling - -In order to make product code as robust as possible, Relay Resolvers follow the GraphQL spec’s documented [best practice](https://graphql.org/learn/best-practices/#nullability) of returning null when a field resolver errors. Instead of throwing, errors thrown by Relay Resolvers will be logged to your environment's configured `requiredFieldLogger` with an event of kind `"relay_resolver.error"`. If you make use of Relay Resolves you should be sure to configure your environment with a `requiredFieldLogger` which reports those events to whatever system you use for tracking runtime errors. - -If your component requires a non-null value in order to render, and can’t provide a reasonable fallback experience, you can annotate the field access with `@required`. - -## Passing arguments to resolver fields - -For resolvers (and live resolvers) we support two ways of defining field arguments: - -1. GraphQL: Arguments that are defined via @argumentDefinitions on the resolver's fragment. -2. JS Runtime: Arguments that can be passed directly to the resolver function. -3. You can also combine these, and define arguments on the fragment and on the resolver's field itself, Relay will validate the naming (these arguments have to have different names), and pass GraphQL arguments to fragment, and JS arguments to the resolver's function. - - -Let’s look at the example 1: - -## Defining Resolver field with Fragment Arguments - -```js -/** -* @RelayResolver -* @fieldName **my_resolver_field** -* @onType **MyType** -* @rootFragment myResolverFragment -*/ -function myResolver(key) { - const data = readFragment(graphql` - fragment myResolverFragment on MyType - @argumentDefinitions(**my_arg**: {type: "Float!"}) { - field_with_arg(arg: $my_arg) { - __typename - } - } - `, key); - - return data.field_with_arg.__typename; -} -``` - -### Using Resolver field with arguments for Fragment - -This resolver will extend the **MyType** with the new field **my_resolver_field(my_arg: Float!)** and the fragment arguments for **myResolverFragment** can be passed directly to this field. - -```js -const data = useLazyLoadQuery(graphql` - query myQuery($id: ID, $my_arg: Float!) { - node(id: $id) { - ... on MyType { - my_resolver_field(my_arg: $my_arg) - } - } - } -`, { id: "some id", my_arg: 2.5 }); -``` - -For these fragment arguments relay will pass then all queries/fragments where the resolver field is used to the resolver’s fragment. - - -### Defining Resolver field with Runtime (JS) Arguments - -Relay resolvers also support runtime arguments that are not visible/passed to fragments, but are passed to the resolver function itself. - -You can define these fragments using GraphQL’s [Schema Definition Language](https://graphql.org/learn/schema/) in the **@fieldName** - -```js -/** -* @RelayResolver -* @fieldName **my_resolver_field(my_arg: String, my_other_arg: Int)** -* @onType **MyType** -* @rootFragment myResolverFragment -*/ -function myResolver(key, args) { - if (args.my_other_arg === 0) { - return "The other arg is 0"; - } - - const data = readFragment(graphql` - fragment myResolverFragment on MyType - some_field - } - `, key); - return data.some_field.concat(args.my_arg); -} -``` - -### Using Resolver field with runtime arguments - -This resolver will extend **MyType** with the new field **my_resolver_field(my_arg: String, my_other_arg: Int).** - -```js -const data = useLazyLoadQuery(graphql` - query myQuery($id: ID, $my_arg: String!) { - node(id: $id) { - ... on MyType { - my_resolver_field(my_arg: $my_arg, my_other_arg: 1) - } - } - } -`, { id: "some id", my_arg: "hello world!"}); -``` - -### Defining Resolver field with Combined Arguments - -We can also combine both of these approaches and define field arguments both on the resolver’s fragment and on the field itself: - -```js -/** -* @RelayResolver -* @fieldName **my_resolver_field(my_js_arg: String)** -* @onType **MyType** -* @rootFragment myResolverFragment -*/ -function myResolver(key, args) { - const data = readFragment(graphql` - fragment myResolverFragment on MyType - @argumentDefinitions(**my_gql_arg**: {type: "Float!"}) { - field_with_arg(arg: $my_arg) { - __typename - } - } - `, key); - - return `Hello ${args.my_js_arg}, ${data.field_with_arg.__typename}`; -} -``` - -### Using Resolver field with combined arguments - -Relay will extend the **MyType** with the new resolver field that has two arguments: **my_resolver_field(my_js_arg: String, my_gql_arg: Float!) - -** -Example query: - -```js -const data = useLazyLoadQuery(graphql` - query myQuery($id: ID, $my_arg: String!) { - node(id: $id) { - ... on MyType { - my_resolver_field(my_js_arg: "World", my_qql_arg: 2.5) - } - } - } -`, { id: "some id" }); -``` - -## Current Limitations - -- Relay Resolvers are still considered experimental. To use them you must ensure that the `ENABLE_RELAY_RESOLVERS` runtime feature flag is enabled, and that the `enable_relay_resolver_transform` feature flag is enabled in your project’s Relay config file. diff --git a/website/docs/guides/relay-resolvers/defining-fields.md b/website/docs/guides/relay-resolvers/defining-fields.md new file mode 100644 index 0000000000000..01f4c06901c2a --- /dev/null +++ b/website/docs/guides/relay-resolvers/defining-fields.md @@ -0,0 +1,40 @@ +--- +id: defining-fields +title: "Defining Fields" +slug: /guides/relay-resolvers/defining-fields/ +description: How to define fields for your client state schema using Relay Resolvers +--- + +Defining fields on a client type is as simple as defining a resolver function which accepts an instance of your model type as its first argument and returns the field value. Note that the exported function name must match the field name. + +## Syntax + +Relay resolvers are marked via docblocks above a resolver function. `@RelayResolver` is the tag to indicate the start of any relay resolver definition. To define a field on a GraphQL model type `TypeName`, add `TypeName` followed by a dot followed by the field definition using GraphQL's schema definition language: https://spec.graphql.org/June2018/#FieldDefinition + +```js +/** +* @RelayResolver TypeName.fieldName(arg1: ArgTypeName): FieldTypeName +*/ +``` + +A simple field might look something like this: + +```tsx +/** + * @RelayResolver User.name: String + */ +export function name(user: UserModel): string { + return user.name; +} +``` + +:::note +Relay will take care of efficiently recomputing resolvers when any of their inputs (in this case the model instance) change, so you don’t need to worry about memoizing your resolver function. +::: + +This is just a simple resolver that reads from the model type and returns a scalar value. To learn about the full menu of capabilities that resolver fields support see: + +* [Resolver Return Types](./return-types.md) +* [Field Arguments](./field-arguments.md) +* [Live Fields](./live-fields.md) +* [Derived Fields](./derived-fields.md) diff --git a/website/docs/guides/relay-resolvers/defining-types.md b/website/docs/guides/relay-resolvers/defining-types.md new file mode 100644 index 0000000000000..db57dd0c27916 --- /dev/null +++ b/website/docs/guides/relay-resolvers/defining-types.md @@ -0,0 +1,49 @@ +--- +id: defining-types +title: "Defining Types" +slug: /guides/relay-resolvers/defining-types/ +description: How to define types for your client state schema +--- + +You can think of client state resolvers as defining a GraphQL server that runs in the client. Just like with a server-defined GraphQL server you will need to define the _types_ that exist in your schema as well as the _fields_ on those types. Just like a GraphQL server, fields are defined as functions that compute the GraphQL value from the parent object. In Relay Resolvers we call this parent JavaScript object the "model" of the type. + +:::info +Each client state GraphQL type is backed by a JavaScript object type which these docs will refer to as its "model type". Resolvers "on" this type will be passed an instance of this type as their first argument. +::: + +Resolver types are defined using the `@RelayResolver` tag followed by the name of the type you are defining. By default Relay assumes your client types are “strong”, meaning each instance has an ID which is unique within the type. This property allows Relay to apply a number of optimizations, such as memoizing resolver computation. + +### Defining a “strong” type + +Strong types are defined by a docblock followed by an exported function whose name matches the type's name, and which accepts an ID as its only argument and returns an instance of the type’s model. Resolvers that define edges to this type will simply need to return the ID of the object, rather than deriving the model themselves. + +```tsx +/** + * @RelayResolver User + */ +export function User(id: DataID): UserModel { + return UserService.getById(id); +} +``` + +:::tip +Elsewhere in the docs this function is referred to as the “model resolver” for the type. +::: + +Generally objects in your client data store will be able to change over time. To support this Relay Resolvers support resolvers that subscribe to the underlying data source. To learn about this, see the page on [Live Fields](./live-fields.md). + +### Defining a “weak” type + +If your type does not have a unique identifier, you can define it as “weak” by adding the `@weak` docblock tag. Weak types are defined by a docblock followed by an exported type definition matching the types name. Resolvers that define edges to weak types will need to return a fully populated model object matching this type. + +```tsx +/** + * @RelayResolver ProfilePicture + * @weak + */ +export type ProfilePicture = { url: string, height: number, width: number }; +``` + +:::tip +Generally weak types are used for creating a namespace for a set of fields that ultimately "belong" to a parent object. +::: diff --git a/website/docs/guides/relay-resolvers/deprecated.md b/website/docs/guides/relay-resolvers/deprecated.md new file mode 100644 index 0000000000000..6cb0f7f8aebad --- /dev/null +++ b/website/docs/guides/relay-resolvers/deprecated.md @@ -0,0 +1,27 @@ +--- +id: deprecated +title: "Deprecated" +slug: /guides/relay-resolvers/deprecated/ +description: Marking fields in your client state schema as @deprecated +--- + +GraphQL allows you to mark fields as `@deprecated` and provide an optional human-readable reason. Relay Resolvers bring this same convention to your client data. By marking fields in your client state schema as deprecated they will receive the same treatment as deprecated fields in your server GraphQL schema. + +Deprecated fields are surfaced as such in Relay's [VSCode extension](https://relay.dev/docs/editor-support/) in autocomplete and on hover. Additionally, they will be rendered as greyed out and ~~struck through~~ in the editor. + +:::info +GraphQL deprecation reasons are expected to be written in markdown. Relay Resolvers will render these descriptions as markdown in the VSCode extension. +::: + +You can mark a field as deprecated by adding the `@deprecated` docblock tag followed by optional text to specify the reason. + +```tsx +/** + * @RelayResolver Author.fullName: String + * + * @deprecated Google "Falsehoods Programmers Believe About Names" + */ +export function fullName(author: AuthorModel): string { + return `${author.firstName} ${author.lastName}`; +} +``` diff --git a/website/docs/guides/relay-resolvers/derived-fields.md b/website/docs/guides/relay-resolvers/derived-fields.md new file mode 100644 index 0000000000000..0d9eb736d0ff3 --- /dev/null +++ b/website/docs/guides/relay-resolvers/derived-fields.md @@ -0,0 +1,85 @@ +--- +id: derived-fields +title: "Derived Fields" +slug: /guides/relay-resolvers/derived-fields/ +description: Defining field which are a pure function of other fields +--- + +In addition to modeling client state, Relay Resolvers also allow you to define fields which are a pure function of other fields. These fields are called derived fields and can be defined on any type no matter if it's defined on the server or client. + +For globally relevant data, resolvers have a few advantages of alternative solutions like [React Hooks](https://react.dev/learn/reusing-logic-with-custom-hooks): + +* **Global memoization** - Relay Resolvers automatically memoize derived fields. Unlike hooks, this cache is shared by all components in your application, so if two sibling components both read the same field, the computation will only be performed once. +* **Efficient updates** - If your derived resolver recomputes but derives the same value, Relay can avoid rerendering components that read the field. +* **Composable** - Derived fields can be composed with other derived fields, allowing you to build up complex, but explicit computation graphs. +* **Discoverable** - Values in the graph are discoverable via the GraphQL schema and thus are more likely to be discovered and reused instead of reinvented. +* **Documented** - GraphQL's field documentation and structured deprecation model make it easy to understand the purpose of a field and its intended use. + +## Defining a Derived Resolver + +Derived resolvers look like any other resolver except that they read GraphQL data instead of being computed from a parent model type. Derived resolvers read GraphQL data by defining a "root fragment" which is a GraphQL fragment defined on the parent type of the field. + +The root fragment is defined using the `@rootFragment` docblock tag followed by the name of the fragment. This tells Relay to pass the resolver function a fragment key for that fragment. The fragment data may then be read using `readFragment` imported from `relay-runtime`. + +```tsx +import {readFragment} from 'relay-runtime'; + +/** + * @RelayResolver User.fullName: String + * @rootFragment UserFullNameFragment + */ +export function fullName(key: UserFullNameFragment$key): string { + const user = readFragment(graphql` + fragment UserFullNameFragment on User { + firstName + lastName + } + `, key); + return `${user.firstName} ${user.lastName}`; +} +``` + +:::info +Relay will track all the values read from the fragment and automatically recompute the resolver when any of those values change. +::: + +## Composition + +One powerful feature of derived resolvers is that they can read other Relay Resolver fields. This means you can define a derived resolver that combines server data, client data and even other derived resolvers. This allows you to build up complex, but explicit, computation graphs. + +```tsx +/** + * @RelayResolver CheckoutItem.isValid: Boolean + * @rootFragment CheckoutItemFragment + */ +export function isValid(key): boolean { + const item = readFragment(graphql` + fragment CheckoutItemFragment on CheckoutItem { + product { + price + } + quantity + } + `, key); + return item.product.price * item.quantity > 0; +} + +/** + * @RelayResolver ShoppingCart.canCheckout: Boolean + * @rootFragment ShoppingCartFragment + */ +export function canCheckout(key): boolean { + const cart = readFragment(graphql` + fragment ShoppingCartFragment on ShoppingCart { + items { + isValid + } + } + `, key); + return cart.items.every(item => item.isValid); +} +``` + +## Passing Arguments to your @rootFragment + +If a field in a derived resolver's root fragment requires arguments, you can pass them by adding an `@arguments` tag to the docblock tag. The `@argument` tag takes the name of the argument and the type of the argument. The argument type must be a valid GraphQL input type. For more information about arguments and Resolvers see [Field Arguments](./field-arguments.md). diff --git a/website/docs/guides/relay-resolvers/descriptions.md b/website/docs/guides/relay-resolvers/descriptions.md new file mode 100644 index 0000000000000..1e5f4ca6eb61a --- /dev/null +++ b/website/docs/guides/relay-resolvers/descriptions.md @@ -0,0 +1,44 @@ +--- +id: descriptions +title: "Descriptions" +slug: /guides/relay-resolvers/descriptions/ +description: Adding human readable descriptions to your resolver schema +--- + +One killer feature of GraphQL is that makes the data in your schema discoverable. Relay Resolvers bring this structure to your client data. By adding descriptions to your resolvers you can make your client state schema self-documenting as well. + +Descriptions are surfaced by Relay's [VSCode extension](https://relay.dev/docs/editor-support/) in autocomplete and on hover. + +You can add a description to a type by adding free text to the docblock tag: + +:::info +GraphQL descriptions are expected to be written in markdown. Relay Resolvers will render these descriptions as markdown in the VSCode extension. +::: + +## Types + +```tsx +/** + * @RelayResolver Author + * + * An author in our **amazing** CMS. Authors can + * write posts but not necessarily change their permissions. + */ +export function Author(id: DataID): AuthorModel { + return AuthorService.getById(id); +} +``` + +## Fields + +```tsx +/** + * @RelayResolver Author.fullName: String + * + * The author's first and last name. Does not include + * any [honorifics](https://en.wikipedia.org/wiki/Honorific). + */ +export function fullName(author: AuthorModel): string { + return `${author.firstName} ${author.lastName}`; +} +``` diff --git a/website/docs/guides/relay-resolvers/enabling.md b/website/docs/guides/relay-resolvers/enabling.md new file mode 100644 index 0000000000000..77c1a4cf4e2a5 --- /dev/null +++ b/website/docs/guides/relay-resolvers/enabling.md @@ -0,0 +1,54 @@ +--- +id: enabling +title: "Enabling Relay Resolvers" +slug: /guides/relay-resolvers/enabling-resolvers +description: Enabling experimental Relay Resolvers +--- + +Relay Resolvers are still an experimental feature in Relay. As such they require additional configuration to enable. You may also find that the APIs in the documentation are not yet reflected in our community maintained TypeScript types. + +## Runtime + +Relay Resolvers must be enabled in your runtime code by using our experimental `LiveResolverStore` as your Relay store and enabling the `ENABLE_RELAY_RESOLVERS` runtime feature flag: + +```ts +import { Environment, RecordSource, RelayFeatureFlags } from "relay-runtime"; +// highlight-next-line +import LiveResolverStore from "relay-runtime/lib/store/experimental-live-resolvers/LiveResolverStore"; + +RelayFeatureFlags.ENABLE_RELAY_RESOLVERS = true; + +// It is recommended to log errors thrown by Resolvers +function fieldLogger(event) { + if(event.kind === "relay_resolver.error") { + // Log this somewhere! + console.warn(`Resolver error encountered in ${event.owner}.${event.fieldPath}`) + console.warn(event.error) + } +} + +const environment = new Environment({ + network: Network.create(/* your fetch function here */), + store: new LiveResolverStore(new RecordSource()), + relayFieldLogger: fieldLogger +}); + +// ... create your Relay context with your environment +``` + +## Compiler + +You must enable the `"enable_relay_resolver_transform"` feature flag in your relay compiler config: + + +```json title="relay.config.json" +{ + "src": "./src", + "schema": "./schema.graphql", + "language": "typescript", + "featureFlags": { + // highlight-next-line + "enable_relay_resolver_transform": true + } +} +``` diff --git a/website/docs/guides/relay-resolvers/errors.md b/website/docs/guides/relay-resolvers/errors.md new file mode 100644 index 0000000000000..7afc1d2a99f33 --- /dev/null +++ b/website/docs/guides/relay-resolvers/errors.md @@ -0,0 +1,48 @@ +--- +id: errors +title: "Error Handling" +slug: /guides/relay-resolvers/errors/ +description: How Relay handles errors throw by resolvers +--- + +Just like GraphQL servers, Relay Resolvers support field-level error handling. If an individual resolver throws an error, when that field is read, Relay will log that error to the environment's user-provided `relayFieldLogger` logger, and the field will become null. + +This provides important symmetry with GraphQL servers. Resolvers are designed to enable a smooth migration path to allow teams to start with fields defined client-side using Resolvers and then eventually migrate them to a server. + +If a resolver throws an error, Relay will log the error to the user-provided error logger, and will return null for the field which the resolver defines. To enable this behavior at runtime, the Relay compiler will not allow resolver fields to be typed as non-nullable. + +The object passed to the `relayFieldLogger` will have the following shape: + +```ts +type ResolverErrorEvent = { + kind: 'relay_resolver.error', + // The name of the fragment/query in which the field was read + owner: string, + // The path from the owner root to the field which threw the error + fieldPath: string, + // The error thrown by the resolver + error: Error, +} +``` + +An example logger might look like: + +```ts +function fieldLogger(event) { + if(event.kind === "relay_resolver.error") { + // Log this somewhere! + console.warn(`Resolver error encountered in ${event.owner}.${event.fieldPath}`) + console.warn(event.error) + } +} + +const environment = new Environment({ + network: Network.create(/* your fetch function here */), + store: new LiveResolverStore(new RecordSource()), + relayFieldLogger: fieldLogger +}); +``` + +:::note +[Live Resolvers](./live-fields.md) can potentially throw errors when they are first evaluated or when their `.read()` method is called. Both types of errors will be handled identically by Relay. +::: diff --git a/website/docs/guides/relay-resolvers/field-arguments.md b/website/docs/guides/relay-resolvers/field-arguments.md new file mode 100644 index 0000000000000..13489e3566820 --- /dev/null +++ b/website/docs/guides/relay-resolvers/field-arguments.md @@ -0,0 +1,63 @@ +--- +id: field-arguments +title: "Field Arguments" +slug: /guides/relay-resolvers/field-arguments/ +description: Defining field arguments for resolver fields +--- + +## Runtime Arguments + +If your resolver needs access to argument data at runtime, you can simply define arguments in the field definition of your resolver's docblock, and then read the argument as a property on the second argument to your resolver function. + +```tsx +/** + * @RelayResolver User.greet(salutation: String!): String + */ +export function greet(user: UserModel, args: { salutation: string }): string { + return `${args.salutation}, ${user.name}!`; +} +``` + +Consuming this field will require passing the argument to the field in your GraphQL query: + +```graphql +query MyQuery($salutation: String!) { + me { + greet(salutation: $salutation) + } +} +``` + +This, in turn will require passing the argument when you fetch the query. + +## Passing Arguments to your @rootFragment + +If you are defining a [derived resolver](./derived-fields.md) and one of the fields in its root fragment requires arguments, you must define an explicit fragment argument using [@argumentDefinitions](../../api-reference/graphql/graphql-directives.md#argumentdefinitions) in your fragment definition. Your resolver field will then expect this argument to be passed as a field argument. + +```tsx +/** + * @RelayResolver User.fancyGreeting: String + * @rootFragment UserFancyGreetingFragment + */ +export function fancyGreeting(key: UserFancyGreetingFragment$key): string { + const user = readFragment(graphql` + fragment UserFancyGreetingFragment on User @argumentDefinitions( + salutation: {type: "String"}, + ) { + name + greet(salutation: $salutation) + } + `, key); + return `${user.name} says ${user.greet}`; +} +``` + +Consuming this field will require passing the argument to the field in your GraphQL query: + +```graphql +query MyQuery($salutation: String!) { + me { + fancyGreeting(salutation: $salutation) + } +} +``` diff --git a/website/docs/guides/relay-resolvers/introduction.md b/website/docs/guides/relay-resolvers/introduction.md new file mode 100644 index 0000000000000..407b85f0570c3 --- /dev/null +++ b/website/docs/guides/relay-resolvers/introduction.md @@ -0,0 +1,66 @@ +--- +id: introduction +title: "Introduction to Relay Resolvers" +slug: /guides/relay-resolvers/introduction +description: An introduction to Relay Resolvers +--- + +Relay Resolvers are a **experimental** Relay feature which allow you to augment Relay’s GraphQL graph with values that are known only on the client. This allows you to schematize client state in the same way that you model server state, and to use Relay’s familiar data-fetching APIs to access that state. Client state can include both data from client-side data stores as well as derived data that is computed from other values in the graph. + +By modeling derived and client state in the graph, Relay can present a unified data access API for product developers. All globally relevant data that a product engineer wants to access can be discovered and efficiently obtained from the same structured GraphQL schema. Additionally resolvers provide a number of runtime benefits: + +- Global memoization with garbage collection +- Efficient reactive recomputation of resolvers +- Efficient UI updates when data changes + +You can think of resolvers as additional schema types and fields which are defined in your client code and are stitched into your server’s schema. Just like you define resolver methods/functions which model your fields on the server, Relay Resolves are defined using resolver functions. + +## Use Cases for Relay Resolvers + +Relay Resolvers are useful for modeling a number of different kinds of data. Here are some examples of types of data that can be schematized using Relay Resolvers and made available to product code: + +* **User-Created Data** - You can model complex form state, or other data that should outlive a specific component tree +* **Client-Side Database** - Persistent data stores like IndexDB, localStorage, or SQLite +* **Third-Party APIs** - Data that is fetched from a third-party API directly by the client, for example search results from an third-party search provider +* **Encrypted Data** - End-to-end encrypted data that is opaque on the server and thus cannot be modeled in the server schema +* **Legacy Data Stores** - During the adoption of Relay and GraphQL, data from pre-existing data layers, like Redux, can be exposed in the graph to ensure migrated and un-migrated portions of your app always remain in sync + +## Defining a Resolver + +:::warning +Because Resolvers are still an **experimental feature**, before you can begin to use Resolvers in Relay, you'll need to enable them. See [Enabling Relay Resolvers](./enabling.md) for instructions. +::: + +Resolvers are defined using exported functions that are annotated with a special [`@RelayResolver` docblock](../../api-reference/relay-resolvers/docblock-format.md). These docblocks are visible to the Relay compiler, and allow the compiler to build up your client schema and automatically import your function in Relay’s generated artifacts. Resolver functions may be defined in any file in your Relay project, though you may wish to define some convention for where they live within your codebase. + +The simplest resolver augments an existing type and does not have any inputs: + +```tsx +/** + * @RelayResolver Query.greeting: String + */ +export function greeting(): string { + return "Hello World"; +} +``` + +Consuming resolvers is identical to consuming a server field. Product code doesn't need to know which kind of field it is reading. + +```tsx +import {useLazyLoadQuery, graphql} from 'react-relay'; +import {useClientQuery, graphql} from 'react-relay'; + +function Greeting() { + const data = useClientQuery(graphql` + query GreetingQuery { + greeting + }`, {}); + return

{data.greeting}

; +} +``` + +:::note +If your query contains only client-defined fields, you will need to use a a different query API to fetch data. Note how this example uses `useClientQuery` instead of `useLazyLoadQuery` or `usePreloadedQuery`. If your query also contains server data, you can use the standard `useLazyLoadQuery` or `usePreloadedQuery` APIs. + +We intend to remove this requirement in future versions of Relay. +::: diff --git a/website/docs/guides/relay-resolvers/limitations.md b/website/docs/guides/relay-resolvers/limitations.md new file mode 100644 index 0000000000000..8b403bf2d0f3b --- /dev/null +++ b/website/docs/guides/relay-resolvers/limitations.md @@ -0,0 +1,38 @@ +--- +id: limitations +title: "Limitations" +slug: /guides/relay-resolvers/limitations/ +description: Limitations of Relay Resolvers +--- + +Relay Resolvers are do have some limitations. Here we will collect a list of known limitations and provide alternatives where possible. + +## No context or info arguments + +In a full GraphQL implementation, resolvers would have access to a `context` and `info` argument. These arguments are not available in Relay Resolvers today. Supporting context is something we would like to do in the future, but have not yet implemented. + +## No support for abstract types + +Today it is not possible to define an interface or union with multiple concrete types using Relay Resolvers. This is something we would like to support in the future, but have not yet implemented. + +## All fields must be nullable + +Today all resolvers must be typed as nullable in order to support coercing errors to null without having to implement null bubbling. In the future we intend Resolvers to support some version of [strict semantic nullability](https://github.com/graphql/graphql-wg/discussions/1410). + +## Not all GraphQL constructs are supported + +Today Relay Resolvers only support a subset of GraphQL constructs. For example, it's not currently possible to define input types, enums or interfaces using Relay Resolvers. + +## No support for mutations + +Today Relay Resolvers only support the read path. Defining mutation fields is not yet supported. We are working to understand what it means to perform a mutation against a reactive schema, and hope to support them in the future. + +## Resolvers are always evaluated lazily + +Today Relay Resolvers are always evaluated lazily on a per-fragment basis. This has the advantage that if a resolver is not read, it will never be evaluated. However, it can lead to issues with waterfalls if your client schema ends up making async requests to fetch data as its read. We are actively exploring other execution strategies for Relay Resolvers, such as evaluating all fields in a query at request time, but expect the way resolvers are defined to remain stable. + +## Verbose/awkward docblock syntax + +Today defining a resolver requires defining a function with a docblock which uses special syntax and duplicates information already specified in the function's name an types. Futher, in order to enforce that these values match up, Relay emits type assertions in its generated types. While these assertions do ensure safety, they are an awkward developer experience. + +To address these issues we are exploring a more streamlined approach where names and types can be inferred from your Flow or TypeScript code similar to the approach taken by [Grats](https://grats.capt.dev/). This syntax may become available in future versions of Relay. diff --git a/website/docs/guides/relay-resolvers/live-fields.md b/website/docs/guides/relay-resolvers/live-fields.md new file mode 100644 index 0000000000000..9323578ffb635 --- /dev/null +++ b/website/docs/guides/relay-resolvers/live-fields.md @@ -0,0 +1,121 @@ +--- +id: live-fields +title: "Live Fields" +slug: /guides/relay-resolvers/live-fields/ +description: Modeling data that changes over time in Relay Resolvers +--- + +One critical difference between client state and server state is that as client state changes over time, those changes will need to be reflected in your UI. To address this, Relay Resolvers support the ability to be marked as `@live`. Live resolvers are expected to return a `LiveState` shaped object which includes methods which allow Relay to both `read()` the current value and also to `subscribe()` to changes to the value. + +As this value changes over time, Relay will automatically recompute any [derived fields](./derived-fields.md) that depend on this field (including transitive dependencies if the changes cascade), and also efficiently trigger the update of any components/subscribers which have read fields that updated as a result of this change. + +## @live + +To mark a resolver as live, add the `@live` docblock tag to the resolver definition. For example: + +```tsx +import type { LiveState } from 'relay-runtime'; + +/** + * @RelayResolver Query.counter: Int + * @live + */ +export function counter(): LiveState { + return { + read: () => store.getState().counter, + subscribe: (callback) => { + return store.subscribe(callback); + }, + }; + +} +``` + +:::note +Both field resolvers and strong model resolvers, which map an ID to a model, may be annotated as `@live`. +::: + +## The LiveState Type + +The return type of a Live Resolver is known as a `LiveState`. It is conceptually similar to an observable or a signal, if you are familiar with those concepts. Unlike an observable, when a `LiveState` notifies its subscriber of an update, it does not include the new value. Instead, the subscriber (Relay) is expected to call `read()` to get the new value. + +While over-notification (subscription notifications when the read value has not actually changed) is supported, for performance reasons, it is recommended that the provider of the LiveState value confirms that the value has indeed change before notifying Relay of the change. + +The type of a LiveState is defined as follows: + +```ts +export type LiveState = { + /** + * Returns the current value of the live state. + */ + read(): T, + /** + * Subscribes to changes in the live state. The state provider should + * call the callback when the value of the live state changes. + */ + subscribe(cb: () => void): () => void, +}; +``` + +## Creating a LiveState Object + +In most cases, you will want to define a helper function that reads your reactive data store and returns a `LiveState` object. For example, you for a Redux store you might write a wrapper that exposes a `LiveState` for a given selector: + +```ts +type Selector = (state: State) => T; + +function selectorAsLiveState(selector: Selector): LiveState { + let currentValue = selector(store.getState()); + return { + read: () => currentValue, + subscribe: (cb) => { + return store.subscribe(() => { + const newValue = selector(store.getState()); + if (newValue === currentValue) { + return; + } + currentValue = newValue; + cb(); + }); + return unsubscribe; + }, + }; +} +``` + +A Live Resolver that uses this helper might look like this: + +```tsx +/** + * @RelayResolver Query.counter: Int + * @live + */ +export function counter(): LiveState { + return selectorAsLiveState(getCounter); +} + +function getCounter(state) { + return state.counter; +} +``` + +## Batching + +When state changes in your data layer, it's possible that one change could result in notifying many `@live` resolver subscriptions about updates. By default each of these updates will require Relay to do work to determine which components need to be updated. This can lead to significant duplicate work being performed. + +When possible, it is recommended that you batch updates to `@live` resolvers. This can be done by wrapping your state updates in a `batchLiveStateUpdates()` call on your `RelayStore` instance. + +A typical use with a Redux store might look like this: + +```ts +const store = createStore(reducer); +const originalDispatch = store.dispatch; + +function wrapped(action) { + relayStore.batchLiveStateUpdates(() => { + originalDispatch(action); + }) +} + +store.dispatch = wrapped; +``` diff --git a/website/docs/guides/relay-resolvers/return-types.md b/website/docs/guides/relay-resolvers/return-types.md new file mode 100644 index 0000000000000..9f4e1bb070504 --- /dev/null +++ b/website/docs/guides/relay-resolvers/return-types.md @@ -0,0 +1,129 @@ +--- +id: return-types +title: "Return Types" +slug: /guides/relay-resolvers/return-types/ +description: Showing the different types of return values for Relay Resolvers +keywords: +- resolvers +- derived +- selectors +- reactive +--- + +Relay Resolvers support a number of different return types, each of which has different semantics. This page will walk through the different types of supported return values and how they are used. + +## Scalar Types + +The simplest type for a resolver to return is a built-in GraphQL scalar value. Scalar values are values that can be represented as a primitive value in GraphQL, such as a string, number, or boolean. To return a scalar simply define your resolver as returning the scalar type and then return the corresponding JavaScript value from your resolver function. + +```tsx +/** + * @RelayResolver Post.isValid: Boolean + */ +export function isValid(post: PostModel): boolean { + return post.content !== "" && post.author != null; +} +``` + +## List Types + +Resolvers may also return a list of values. To do so, define your resolver as returning a list of the corresponding type and return an array from your resolver function. + +```tsx +/** + * @RelayResolver User.favoriteColors: [String] + */ +export function favoriteColors(user: UserModel): string[] { + return user.favoriteColors; +} +``` + +This pattern can be used for the other types, with the exception of server types, which don't yet support lists. + +## Client-defined GraphQL Types + +Resolvers can also model edges to other GraphQL types in your Resolver schema. If the type was defined as a "strong" type, the resolver function must return an object `{ id: DataID }` where `DataID` is the ID of the object. Relay will take care of invoking the type's model resolver function. + +```tsx +import {DataID} from 'relay-runtime'; +/** + * @RelayResolver Post.author: User + */ +export function author(post: PostModel): { id: DataID } { + return { id: post.authorId }; +} +``` + +If the type was defined as `@weak`, the resolver function must return an object matching the type's model type. + +```tsx +/** + * @RelayResolver User.profilePicture: ProfilePicture + */ +export function profilePicture(user: UserModel): ProfilePicture { + return { + url: user.profilePicture.url, + width: user.profilePicture.width, + height: user.profilePicture.width, + } +} +``` + +:::tip +Relay will emit type assertions in its generated code to help catch errors where a resolver implementation does not match whats declared in its docblock. +::: + +## Server Types + +Relay Resolvers also support modeling edges to types defined on your server schema that implement the [`Node` specification](https://graphql.org/learn/global-object-identification/#node-root-field). Since objects which implement Node each have a globally unique ID, resolvers modeling edges to these server types simply need to return that unique ID. + +At compile-time Relay derives a GraphQL query for each selections on this field and will lazily fetch that data on render. + +```tsx +import {DataID} from 'relay-runtime'; +/** + * @RelayResolver Post.author: User + */ +export function author(post: PostModel): DataID { + return post.authorId; +} +``` + +:::warning +Edges to server types that are only known the client force Relay to fetch data lazily which will force an additional cascading network roundtrip. This is generally not optimal and should be avoided where possible. +::: + +To highlight this point, at compile time, Relay requires that selection that reads client to server edge field annotate the field with the `@waterfall` directive. This is intended to remind the author and reviewer that a tradeoff is being made here and to carefully consider the implications. + +```tsx +function Post() { + const data = useLazyLoadQuery(graphql` + query PostQuery { + post { + author @waterfall { + name + } + } + }`, {}); + return

{data.post.author.name}

; +} +``` + +## JavaScript Values + +There are rare cases where you want to return an arbitrary JavaScript value from your Resolver schema, one which cannot not have a corresponding GraphQL type. As an escape hatch Relay supports a custom return type `RelayResolverValue` that allows you to return any JavaScript value from your resolver. **JavaScript values returned from resolvers should be immutable.** + +Consumers of this field will see a TypeScript/Flow type that is derived from your resolver function's return type. + +```tsx +/** + * @RelayResolver Post.publishDate: RelayResolverValue + */ +export function metadata(post: PostModel): Date { + return post.publishDate; +} +``` + +:::warning +Use of `RelayResolverValue` should be considered an "escape hatch" and may be deprecated in future versions of Relay. In most cases a preferable pattern is to define a custom scalar in your [client schema extensions](../client-schema-extensions.md) and add a type definition for that custom scalar in your Relay config. +::: diff --git a/website/docs/guides/relay-resolvers/suspense.md b/website/docs/guides/relay-resolvers/suspense.md new file mode 100644 index 0000000000000..e93c51923cfcb --- /dev/null +++ b/website/docs/guides/relay-resolvers/suspense.md @@ -0,0 +1,41 @@ +--- +id: suspense +title: "Suspense" +slug: /guides/relay-resolvers/suspense/ +description: Handling loading states for live data +--- + +With [Live Resolvers](./live-fields.md), it's possible that the data you are exposing in the graph may not be synchronously available. For example, if you are fetching data from a remote API, it may take some time for the data to be fetched. Relay Resolvers provide a mechanism for handling this loading state. + +If a Live Resolver returns the "suspense sentinel" value, all consumers of that field will suspend until that field updates with a non-suspense value. + +## Suspense Sentinel + +If a Live Resolver is in a loading state, it may return a special sentinel value to indicate that the data is not yet available. + +```ts +import {suspenseSentinel} from 'relay-runtime'; + +/** + * @RelayResolver Query.myIp: String + * @live + */ +export function myIp(): LiveState { + return { + read: () => { + const state = store.getState(); + const ipLoadObject = state.ip; + if (ipLoadObject.status === "LOADING") { + return suspenseSentinel(); + } + return state.ip; + }, + subscribe: (cb) => { + return store.subscribe(cb); + }, + }; +} +``` + +:::note +If a query or fragment will suspend if it reads any resolver field that is in a suspended state, even if it reads that resolver field indirectly via another resolvers `@rootFragment`. diff --git a/website/docs/guides/required-directive.md b/website/docs/guides/required-directive.md index 81eff43191239..ce6e02a190277 100644 --- a/website/docs/guides/required-directive.md +++ b/website/docs/guides/required-directive.md @@ -51,11 +51,11 @@ This value is not expected to ever be null, but the component **can still render ### `THROW` (unrecoverable) -This value should not be null, and the component **cannot render without it**. If a field with `action: THROW` is null at runtime, the component which reads that field **will throw during render**. The error message includes both the owner and field path. Only use this option if your component is contained within an [error boundary](https://reactjs.org/docs/error-boundaries.html). +This value should not be null, and the component **cannot render without it**. If a field with `action: THROW` is null at runtime, the component which reads that field **will throw during render**. The error message includes both the owner and field path. Only use this option if your component is contained within an [error boundary](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary). ## Locality -A field's `@required` status is **local to the fragment where it is specified**. This allows you to add add/remove the directive without having to think about anything outside the scope of your component. +A field's `@required` status is **local to the fragment where it is specified**. This allows you to add/remove the directive without having to think about anything outside the scope of your component. This choice reflects the fact that some components may be able to recover better from missing data than others. For example, a `` component could probably render something sensible even if the restaurant's address is missing, but a `` component might not. diff --git a/website/docs/guides/testing-relay-components.md b/website/docs/guides/testing-relay-components.md index 2cd077fc60371..724c5aacc43d3 100644 --- a/website/docs/guides/testing-relay-components.md +++ b/website/docs/guides/testing-relay-components.md @@ -252,7 +252,70 @@ test('Error State', () => { }); ``` +#### Component Tests With Deferred Fragments +When using `MockPayloadGenerator` to generate data for a Query that has fragments with `@defer` you may want to generate the deferred data as well. To do so, you can use `MockPayloadGenerator.generateWithDefer` passing the option `generateDeferredPayload`: + +```javascript +// Say you have a component with useFragment +const ChildComponent = (props: {user: ChildComponentFragment_user$key}) => { + const data = useFragment(graphql` + fragment ChildComponentFragment_user on User { + name + } + `, props.user); + return {data?.name}; +}; + +// Say you have a parent component that fetches data with useLazyLoadQuery and `@defer`s the data for the ChildComponent. +const ParentComponent = () => { + const data = useLazyLoadQuery(graphql` + query ParentComponentQuery { + user { + id + ...ChildComponentFragment_user @defer + } + } + `, {}); + return ( + + {id} + + {data?.user && } + + + ); +}; + +const { + createMockEnvironment, + MockPayloadGenerator, +} = require('relay-test-utils'); + +test('Data Render with @defer', () => { + const environment = createMockEnvironment(); + const renderer = ReactTestRenderer.create( + + , + + ); + + // Wrapping in ReactTestRenderer.act will ensure that components + // are fully updated to their final state. + ReactTestRenderer.act(() => { + const operation = environment.mock.getMostRecentOperation(); + const mockData = MockPayloadGenerator.generateWithDefer(operation, null, {generateDeferredPayload: true}); + environment.mock.resolve(mockData); + + // You may need this to make sure all payloads are retrieved + jest.runAllTimers(); + }); + + // At this point operation will be resolved + // and the data for a query will be available in the store + expect(renderer.toJSON()).toEqual(['id', 'name']); +}); +``` ### Fragment Component Tests diff --git a/website/docs/guides/testing-relay-with-preloaded-queries.md b/website/docs/guides/testing-relay-with-preloaded-queries.md index 3dba45a6bcb97..abb0d95704ac2 100644 --- a/website/docs/guides/testing-relay-with-preloaded-queries.md +++ b/website/docs/guides/testing-relay-with-preloaded-queries.md @@ -32,11 +32,8 @@ In short, there are two steps that need to be performed **before rendering the c ```javascript const {RelayEnvironmentProvider} = require('react-relay'); -const { MockPayloadGenerator, createMockEnvironment } = require('relay-test-utils'); -const {render} = require('testing-library-react'); -// at the time of writing, act is not re-exported by our internal testing-library-react -// but is re-exported by the "external" version -const {act} = require('ReactTestUtils'); +const {MockPayloadGenerator, createMockEnvironment} = require('relay-test-utils'); +const {act, render} = require('@testing-library/react'); test("...", () => { // arrange const environment = createMockEnvironment(); @@ -141,7 +138,7 @@ This is more straightforward - it is done via a call to `environment.mock.queueP * Used a different query - the query resolver would not be called, `currentOperation` will be `null` * Query variables don't match - the query resolver would not be called, `currentOperation` will be `null` (make sure to inspect the `variables`). * Also, make sure arrays are in the same order, if any (or better yet, use sets, if at all possible). -* If data returned rom the query is not what you expect, make sure you're generating the right graphql type. +* If data returned from the query is not what you expect, make sure you're generating the right graphql type. * You can tell you're mocking the wrong one if the return values look something like `` diff --git a/website/docs/tutorial/connections-pagination.md b/website/docs/tutorial/connections-pagination.md index dda43701a9de1..0576a639bf649 100644 --- a/website/docs/tutorial/connections-pagination.md +++ b/website/docs/tutorial/connections-pagination.md @@ -327,9 +327,9 @@ We need to modify the Newsfeed component to map over the edges and render each n ``` function Newsfeed() { - const data = useLazyLoadQuery(NewsfeedFragment, {}); + const data = useLazyLoadQuery(NewsfeedQuery, {}); // change-line - const storyEdges = data.newsfeedStories.edges; + const storyEdges = data.viewer.newsfeedStories.edges; return ( <> {storyEdges.map(storyEdge => @@ -376,7 +376,7 @@ Within `Newsfeed`, we can call both `useLazyLoadQuery` and `useFragment`, though ``` export default function Newsfeed() { // change-line - const queryData = useLazyLoadQuery(NewsfeedFragment, {}); + const queryData = useLazyLoadQuery(NewsfeedQuery, {}); // change-line const data = useFragment(NewsfeedContentsFragment, queryData); const storyEdges = data.newsfeedStories.edges; diff --git a/website/docs/tutorial/fragments-1.md b/website/docs/tutorial/fragments-1.md index 2a9d2573a5dcb..e7c17b471efff 100644 --- a/website/docs/tutorial/fragments-1.md +++ b/website/docs/tutorial/fragments-1.md @@ -14,7 +14,7 @@ Go to `Newsfeed.tsx` and find `NewsfeedQuery` so that you can add the new field: ``` const NewsfeedQuery = graphql` query NewsfeedQuery { - top_story { + topStory { title summary // change-line @@ -50,7 +50,7 @@ type Props = { export default function Story({story}: Props) { return ( - + {story.title} // change-line // Add this line @@ -65,7 +65,7 @@ The date should now appear. And thanks to GraphQL, we didn't have to write and d But if you think about it, why should you have had to modify `Newsfeed.tsx`? Shouldn’t React components be self-contained? Why should Newsfeed care about the specific data required by Story? What if the data was required by some child component of Story way down in the hierarchy? What if it was a component that was used in many different places? Then we would have to modify many components whenever its data requirements changed. -The avoid these and many other problems, we can move the data requirements for the Story component into `Story.tsx`. +To avoid these and many other problems, we can move the data requirements for the Story component into `Story.tsx`. We do this by splitting off `Story`’s data requirements into a *fragment* defined in `Story.tsx`. Fragments are separate pieces of GraphQL that the Relay compiler stitches together into complete queries. They allow each component to define its own data requirements, without paying the cost at runtime of each component running its own queries. @@ -142,7 +142,7 @@ export default function Story({story}: Props) { return ( {data.title} - + @@ -210,7 +210,7 @@ The `PosterByline` component used by `Story` renders the poster’s name and pro * Declare a `PosterBylineFragment` on `Actor` and specify the fields it needs (`name`, `profilePicture`). The `Actor` type represents a person or organization that can post a story. * Spread that fragment within `poster` in `StoryFragment`. * Call `useFragment` to retrieve the data. -* Update the Props to accept a `PosterBylineFragment$key` as the `person` prop. +* Update the Props to accept a `PosterBylineFragment$key` as the `poster` prop. It’s worth going through these steps a second time, to get the mechanics of using fragments under your fingers. There are a lot of parts here that need to slot together in the right way. diff --git a/website/docs/tutorial/intro.md b/website/docs/tutorial/intro.md index 12c8cf4ed3ef4..633956e930565 100644 --- a/website/docs/tutorial/intro.md +++ b/website/docs/tutorial/intro.md @@ -36,6 +36,8 @@ When you run `npm run dev`, several processes are started: In the terminal output, these three processes’ log output are marked with tags: `[webpack]` in yellow, `[server]` in green, and `[relay]` in blue. Keep a look out for errors marked with `[relay]` as these are helpful if your GraphQL has any mistakes. +If you encounter errors in the `[relay]` process indicating: ```[relay] thread 'main' panicked at 'Cannot run relay in watch mode if `watchman` is not available (or explicitly disabled).'```, this means watchman is not installed or available on your system. To resolve this, you may need to [install watchman separately](https://facebook.github.io/watchman/docs/install). After installing watchman, try running `npm run dev` again. + Now that these processes are running, you should be able to open [http://localhost:3000](http://localhost:3000/) in your browser. ![Screenshot](/img/docs/tutorial/intro-screenshot-placeholder.png) diff --git a/website/docs/tutorial/mutations-updates.md b/website/docs/tutorial/mutations-updates.md index e712dc35a19ae..c94f670dc0fa6 100644 --- a/website/docs/tutorial/mutations-updates.md +++ b/website/docs/tutorial/mutations-updates.md @@ -142,6 +142,7 @@ const StoryLikeButtonLikeMutation = graphql` This is a lot, let’s break it down: +* The mutation is named `StoryLikeButton` + `Like` + `Mutation` because it must begin with the module name, and end with the GraphQL operation. * The mutation declares variables which are passed from the client to the server when the mutation is dispatched. Each variable has a name (`$id`, `$doesLike`) and a type (`ID!`, `Boolean!`). The `!` after the type indicates that it is required, not optional. * The mutation selects a mutation field defined by the GraphQL schema. Each mutation field that the server defines corresponds to some action that the client can request of the server, such as liking a story. * The mutation field takes arguments (just like any field can do). Here we pass in the mutation variables that we declared as the argument values — for example, the `doesLike` field argument is set to be the `$doesLike` mutation variable. @@ -197,7 +198,7 @@ function StoryLikeButton({story}) { commitMutation({ variables: { id: data.id, - doesViewerLike: !data.doesViewerLike, + doesLike: !data.doesViewerLike, }, }) // end-change @@ -300,7 +301,7 @@ function StoryLikeButton({story}) { commitMutation({ variables: { id: data.id, - doesViewerLike: newDoesLike, + doesLike: newDoesLike, }, // change optimisticUpdater: store => { @@ -371,7 +372,7 @@ function StoryLikeButton({story}) { ### Step 4 — Modify the Updatable Data -Now `upatableData` is an object representing our existing Story as it exists in the local store. We can read and write the fields listed in our fragment: +Now `updatableData` is an object representing our existing Story as it exists in the local store. We can read and write the fields listed in our fragment: ``` function StoryLikeButton({story}) { @@ -570,7 +571,7 @@ export default function StoryCommentsComposer({story}: Props) { // change const connectionID = ConnectionHandler.getConnectionID( data.id, - 'StoryCommentsSectionFragment_comments', + 'StoryCommentsSection_comments', ); // end-change commitMutation({ diff --git a/website/docs/tutorial/organizing-mutations-queries-and-subscriptions.md b/website/docs/tutorial/organizing-mutations-queries-and-subscriptions.md new file mode 100644 index 0000000000000..73b470111a02f --- /dev/null +++ b/website/docs/tutorial/organizing-mutations-queries-and-subscriptions.md @@ -0,0 +1,24 @@ +# Organizing Mutations, Queries, and Subscriptions + +Relay Operations (Mutations, Queries, and Subscriptions) have strict naming requirements. The operation name must begin with the module name, and end with the GraphQL operation type. The name also must be globally unique. + +> Sidenote: This naming scheme originates from trying to enforce the uniqueness constraint. At Meta, Haste (a dependency management system for static resources) enforces that all module names are unique to derive sensible globally unique Relay names. Coupling the module name and Relay name also makes it easier to locate a fragment/query/mutation if you know that name. This makes sense within Meta, and may be less sensible in an OSS setting. + +For example: + +1. A Mutation in the file `MyComponent.js` must be named with the scheme `MyComponent[MyDescriptiveNameHere]Mutation`. +2. A Query in the file `MyComponent.react.js` must be named with the scheme `MyComponent*Query`. + +A NewsFeed component may have mutations/queries that shouldn't logically start with `NewsFeed`, but Relay requires this _if they are defined in that file_. + +### Recommended Structure For Mutations and Subscriptions + +Put Mutations in their own hook module so the name is closer to _what the mutation does_ rather than _which component invokes it_. If the module name is correctly descriptive, it is fine to declare it in the same file. + +If you are adding a Mutation for `Post`, like adding a comment to a post, you may create a new file titled `useAddPostComment.js`. Your mutation (in this file) will then be named `useAddPostCommentMutation`, which is a perfectly descriptive name. + +You may consider putting all of these hooks in a dedicated `hooks` directory. + +### Recommended Structure for Queries + +Root components should have a single query that is tightly coupled to a component, since it describes that component's data dependencies. Queries and fragments should co-locate with their data-use code. diff --git a/website/docs/tutorial/queries-1.md b/website/docs/tutorial/queries-1.md index 228e59bba04ea..5e305ccefb32e 100644 --- a/website/docs/tutorial/queries-1.md +++ b/website/docs/tutorial/queries-1.md @@ -69,8 +69,8 @@ const NewsfeedQuery = graphql` Let’s break this down: -* To embed GraphQL within Javascript, we put a string literal marked with the graphql`` tag. This tag allows the Relay compiler to find and compile the GraphQL within a Javascript codebase. -* Our GraphQL string consists of a query declaration with the keyword `query` and then a query name. +* To embed GraphQL within JavaScript, we put a string literal marked with the graphql`` tag. This tag allows the Relay compiler to find and compile the GraphQL within a JavaScript codebase. +* Our GraphQL string consists of a query declaration with the keyword `query` and then a query name. Note that the query name **must** begin with the module name (in this case Newsfeed). * Inside the query declaration are *fields*, which specify what information to query for*:* * Some fields are *scalar fields* that retrieve a string, number, or other unit of information. * Other fields are *edges* that let us traverse from one node in the graph to another. When a field is an edge, it’s followed by another block `{ }` containing fields for the node at the other end of the edge. Here, the `poster` field is an edge that goes from a Story to a Person who posted it. Once we’ve traversed to the Person, we can include fields about the Person such as their `name`. @@ -79,9 +79,13 @@ This illustrates the part of the graph that this query is asking for: ![Parts of the GraphQL query](/img/docs/tutorial/query-breakdown.png) -Now that we’ve defined the query, we need to modify our React component to fetch it and to use the data returned by the server. +Now that we’ve defined the query, we need to do two things. +1. Run relay compiler so that it knows about the new Graphql query. [npm run relay.] +2. Modify our React component to fetch it and to use the data returned by the server. -Turn back to the `Newsfeed` component and start by deleting the placeholder data. Then replace it with this: +If you open package.json you will find the script `relay` is hooked up to run the relay-compiler. This is what npm run relay does. Once the compiler successfully updates/generated the new compiled query you will be able to find it in the __generated__ folder under src/components/ as NewsfeedQuery.graphql.ts. This project comes with precomputed fragments, so unless you do this step, you will not get the desired results. + +Next, turn back to the `Newsfeed` component and start by deleting the placeholder data. Then, replace it with this: ``` import { useLazyLoadQuery } from "react-relay"; @@ -175,7 +179,7 @@ const MyQuery = graphql` `; ``` -... the Javascript variable `MyQuery` is actually assigned to an object that looks something like this: +... the JavaScript variable `MyQuery` is actually assigned to an object that looks something like this: ``` const MyQuery = { @@ -256,7 +260,7 @@ We’ll revisit types throughout this tutorial. But next, we'll look at an even Queries are the foundation of fetching GraphQL data. We’ve seen: -* How to define a GraphQL query within our app using the graphql`` tagged literal +* How to define a GraphQL query within our app using the graphql`` tagged literal. * How to use the `useLazyLoadQuery` hook to fetch the results of a query when a component renders. * How to import Relay's generated types for type safety. diff --git a/website/docs/tutorial/queries-2.md b/website/docs/tutorial/queries-2.md index ed962565eeab0..3b6ae0dccbd91 100644 --- a/website/docs/tutorial/queries-2.md +++ b/website/docs/tutorial/queries-2.md @@ -27,7 +27,7 @@ If data is lower-priority and should be loaded after the main data has loaded, b We’ve already prepared a hovercard component that you can put to use. However, it has been in a directory called `future` in order to avoid compilation errors since it uses `ImageFragment`. Now that we’re at this stage of the tutorial, you can move the modules in `future` into `src/components`: -``` +```shell mv future/* src/components ``` @@ -219,7 +219,7 @@ You may also notice that this request is made only the first time you hover over
Deep dive: Caching and the Relay Store -In contrast to most other systems, Relay’s caching is not based on queries, but on graph nodes. Relay maintains a local cache of all the nodes it has fetched called the Relay Store. Each node in the Store is identified and retrieved by its ID. If two queries ask for the same information, as identified by node IDs, then the second query will be fulfilled using the cached information retrieved for the first query, and not be fetched. +In contrast to most other systems, Relay’s caching is not based on queries, but on graph nodes. Relay maintains a local cache of all the nodes it has fetched called the Relay Store. Each node in the Store is identified and retrieved by its ID. If two queries ask for the same information, as identified by node IDs, then the second query will be fulfilled using the cached information retrieved for the first query, and not be fetched. Make sure to configure [missing field handlers](/docs/guided-tour/reusing-cached-data/filling-in-missing-data/) to take advantage of this caching behavior. Relay will garbage-collect nodes from the Store if they aren’t “reachable” from any queries that are used, or have been recently used, by any mounted components.
@@ -386,4 +386,4 @@ Although we introduced queries using `useLazyLoadQuery` for simplicity, preloade * Query variables are used by passing them into field arguments. * Preloaded queries are always the best way to go. For user interaction queries, initiate the fetch in the event handler. For the initial query for your screen, initiate the fetch as early as possible in your specific routing system. Use lazy-loaded queries only for quick prototyping, or not at all. -Next we'll briefly look at a way to enhance the hovecard by handling different types of posters differently. After that, we'll see how to handle situations where information that's part of the initial query also needs to be updated and refetched with different variables. +Next we'll briefly look at a way to enhance the hovercard by handling different types of posters differently. After that, we'll see how to handle situations where information that's part of the initial query also needs to be updated and refetched with different variables. diff --git a/website/docs/tutorial/refetchable-fragments.md b/website/docs/tutorial/refetchable-fragments.md index c94b7744bb33e..cf39d0b2776fb 100644 --- a/website/docs/tutorial/refetchable-fragments.md +++ b/website/docs/tutorial/refetchable-fragments.md @@ -50,7 +50,7 @@ You should now see a sidebar with a list of people at the top. ![Contacts list](/img/docs/tutorial/refetchable-contacts-initial.png) -Have a look at `ContactsList.js` and you’ll find this fragment, which is what selects the list of contacts: +Have a look at `ContactsList.tsx` and you’ll find this fragment, which is what selects the list of contacts: ``` const ContactsListFragment = graphql` diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index 1a830d923b070..5b21c208dc9d9 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -192,6 +192,12 @@ module.exports = { infoLink: 'https://www.atlassian.com/', pinned: false, }, + { + caption: 'Réa', + image: '/img/logos/rea.png', + infoLink: 'https://www.rea-app.fr/', + pinned: false, + }, ], }, onBrokenLinks: 'throw', @@ -202,14 +208,8 @@ module.exports = { require.resolve('docusaurus-plugin-internaldocs-fb/docusaurus-preset'), { docs: { - showLastUpdateAuthor: fbContent({ - internal: false, - external: true, - }), - showLastUpdateTime: fbContent({ - internal: false, - external: true, - }), + showLastUpdateAuthor: false, + showLastUpdateTime: false, editUrl: fbContent({ internal: 'https://www.internalfb.com/intern/diffusion/FBS/browse/master/xplat/js/RKJSModules/Libraries/Relay/oss/__github__/website/', @@ -248,10 +248,10 @@ module.exports = { ], }, gtag: { - trackingID: 'UA-44373548-50', + trackingID: 'G-DCSC7FDGL5', }, googleAnalytics: { - trackingID: 'UA-44373548-50', + trackingID: 'G-DCSC7FDGL5', }, }, ], @@ -485,8 +485,8 @@ module.exports = { ], }, algolia: { - appId: 'BH4D9OD16A', - apiKey: '3d7d5825d50ea36bca0e6ad06c926f06', + appId: 'UBPJPW35NS', + apiKey: '26e12c1c268d99b20a16f365f8593df9', indexName: 'relay', contextualSearch: true, }, diff --git a/website/package.json b/website/package.json index dddfcf58c7aa9..64b99d4f5b509 100644 --- a/website/package.json +++ b/website/package.json @@ -1,6 +1,4 @@ { - "name": "relay-website", - "version": "0.0.1", "repository": "facebook/relay", "license": "MIT", "private": true, @@ -37,8 +35,7 @@ }, "resolutions": { "highlight.js": "^10.4.1", - "shelljs": "^0.8.5", - "ansi-html": "0.0.8" + "shelljs": "^0.8.5" }, "prettier": { "arrowParens": "avoid", diff --git a/website/sidebars.js b/website/sidebars.js index 1013f5dd78485..ba4fdb854a796 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -50,16 +50,10 @@ const GuidesRescuedFromOldTutorial = [ 'guided-tour/updating-data/graphql-subscriptions', { 'Updating Data': [ - ...fbContent({ - internal: [ - 'guided-tour/updating-data/imperatively-modifying-store-data', - 'guided-tour/updating-data/imperatively-modifying-linked-fields', - 'guided-tour/updating-data/typesafe-updaters-faq', - ], - external: [ - 'guided-tour/updating-data/imperatively-modifying-store-data-unsafe', - ], - }), + 'guided-tour/list-data/updating-connections', + 'guided-tour/updating-data/imperatively-modifying-store-data', + 'guided-tour/updating-data/imperatively-modifying-linked-fields', + 'guided-tour/updating-data/typesafe-updaters-faq', 'guided-tour/updating-data/local-data-updates', 'guided-tour/updating-data/client-only-data', ], @@ -76,10 +70,11 @@ const Guides = fbContent({ 'guides/testing-relay-components', 'guides/testing-relay-with-preloaded-queries', 'guides/required-directive', - 'guides/relay-resolvers', + 'guides/alias-directive', 'guides/client-schema-extensions', 'guides/type-emission', 'guided-tour/rendering/error-states', + 'guides/fb/client-mutation-id-and-actor-id', GuidesRescuedFromOldTutorial, { EntryPoints: [ @@ -108,10 +103,27 @@ const Guides = fbContent({ 'guides/persisted-queries', 'guides/network-layer', 'guides/client-schema-extensions', + { + 'Relay Resolvers': [ + 'guides/relay-resolvers/introduction', + 'guides/relay-resolvers/enabling', + 'guides/relay-resolvers/defining-types', + 'guides/relay-resolvers/defining-fields', + 'guides/relay-resolvers/return-types', + 'guides/relay-resolvers/field-arguments', + 'guides/relay-resolvers/derived-fields', + 'guides/relay-resolvers/live-fields', + 'guides/relay-resolvers/suspense', + 'guides/relay-resolvers/errors', + 'guides/relay-resolvers/descriptions', + 'guides/relay-resolvers/deprecated', + 'guides/relay-resolvers/limitations', + ], + }, 'guides/testing-relay-components', 'guides/testing-relay-with-preloaded-queries', 'guides/required-directive', - 'guides/relay-resolvers', + 'guides/alias-directive', 'guided-tour/rendering/error-states', GuidesRescuedFromOldTutorial, // TODO(T84797602) release incremental data delivery externally @@ -143,6 +155,7 @@ module.exports = { 'tutorial/refetchable-fragments', 'tutorial/connections-pagination', 'tutorial/mutations-updates', + 'tutorial/organizing-mutations-queries-and-subscriptions', ], Installation: [ 'getting-started/prerequisites', @@ -184,6 +197,12 @@ module.exports = { 'api-reference/relay-runtime/request-subscription', ], }, + { + 'Relay Resolvers': [ + 'api-reference/relay-resolvers/docblock-format', + 'api-reference/relay-resolvers/runtime-functions', + ], + }, 'api-reference/graphql/graphql-directives', 'api-reference/legacy-apis/legacy-apis', ], diff --git a/website/src/compiler-explorer/Editor.js b/website/src/compiler-explorer/Editor.js index fb44a4fc1f399..944660f8c853e 100644 --- a/website/src/compiler-explorer/Editor.js +++ b/website/src/compiler-explorer/Editor.js @@ -42,8 +42,8 @@ const editorOptions = { export default function Editor({text, onDidChange, diagnostics, style}) { const [ref, setRef] = useState(null); - const themeContext = useThemeConfig(); - const editorTheme = themeContext.isDarkTheme ? 'vs-dark' : 'vs'; + const isDarkMode = useIsDarkMode(); + const editorTheme = isDarkMode ? 'vs-dark' : 'vs'; const editor = useMemo(() => { if (ref == null) { @@ -56,7 +56,11 @@ export default function Editor({text, onDidChange, diagnostics, style}) { if (editor == null) { return; } - editor.setValue(text); + + // Calling setValue breaks undo, so we try not to do it if we don't need to. + if (editor.getValue() !== text) { + editor.setValue(text); + } }, [editor, text]); useLayoutEffect(() => { @@ -99,3 +103,35 @@ export default function Editor({text, onDidChange, diagnostics, style}) { return
; } + +function getIsDarkMode() { + return document.documentElement.dataset.theme === 'dark'; +} + +// Docusaurus does not provide a hook for this, so we listen for the data +// attribute on the HTML element to change. +function useIsDarkMode() { + const [mode, setMode] = useState(() => getIsDarkMode()); + useEffect(() => { + const observer = new MutationObserver((mutationsList, observer) => { + for (const mutation of mutationsList) { + if ( + mutation.type === 'attributes' && + mutation.attributeName === 'data-theme' + ) { + setMode(getIsDarkMode()); + } + } + }); + + // Configuration of the observer + const config = {attributes: true}; + + // Start observing the target node + observer.observe(document.documentElement, config); + return () => { + observer.disconnect(); + }; + }, []); + return mode; +} diff --git a/website/src/compiler-explorer/ExplorerState.js b/website/src/compiler-explorer/ExplorerState.js index c78f018fbdfcc..a1f6e4593af02 100644 --- a/website/src/compiler-explorer/ExplorerState.js +++ b/website/src/compiler-explorer/ExplorerState.js @@ -38,6 +38,9 @@ export function useExplorerState() { setLanguage: language => dispatch({type: 'SET_LANGUAGE', language}), setOutputType: outputType => dispatch({type: 'SET_OUTPUT_TYPE', outputType}), + setInputWindow: inputWindow => { + dispatch({type: 'UPDATE_INPUT_WINDOW', inputWindow}); + }, }; }, []); return { @@ -54,6 +57,8 @@ function reducer(state, action) { return {...state, documentText: action.documentText}; case 'SET_OUTPUT_TYPE': return {...state, outputType: action.outputType}; + case 'UPDATE_INPUT_WINDOW': + return {...state, inputWindow: action.inputWindow}; case 'SET_FEATURE_FLAG': const featureFlags = { ...state.featureFlags, diff --git a/website/src/compiler-explorer/ExplorerStateConstants.js b/website/src/compiler-explorer/ExplorerStateConstants.js index 6bcb03161aba9..8400711c4c4d0 100644 --- a/website/src/compiler-explorer/ExplorerStateConstants.js +++ b/website/src/compiler-explorer/ExplorerStateConstants.js @@ -37,18 +37,6 @@ fragment AgeFragment on User { `.trim(); export const FEATURE_FLAGS = [ - { - key: 'enable_flight_transform', - label: 'Flight Transforms', - kind: 'bool', - default: true, - }, - { - key: 'hash_supported_argument', - label: 'Hash Supported Argument', - kind: 'enum', - default: true, - }, {key: 'no_inline', label: '@no_inline', kind: 'enum', default: true}, { key: 'enable_3d_branch_arg_generation', @@ -69,16 +57,47 @@ export const FEATURE_FLAGS = [ default: true, }, { - key: 'enable_client_edges', - label: 'Client Edges', + key: 'skip_printing_nulls', + label: 'Skip Printing Nulls', + kind: 'enum', + default: false, + }, + { + key: 'compact_query_text', + label: 'Compact Query Text', + kind: 'enum', + default: false, + }, + { + key: 'enable_fragment_aliases', + label: '@alias', + kind: 'enum', + default: true, + }, + { + key: 'enforce_fragment_alias_where_ambiguous', + label: 'Enforce @alias where ambiguous', kind: 'enum', default: true, }, + { + key: 'enable_catch_directive_transform', + label: '@catch', + kind: 'enum', + default: false, + }, + { + key: 'disallow_required_on_non_null_fields', + label: 'Disallow Required on Non-Null Fields', + kind: 'bool', + default: false, + }, ]; export const DEFAULT_STATE = { schemaText: DEFAULT_SCHEMA, documentText: DEFAULT_DOCUMENT, + inputWindow: 'schema', outputType: 'operation', featureFlags: Object.fromEntries(FEATURE_FLAGS.map(f => [f.key, f.default])), language: 'typescript', diff --git a/website/src/compiler-explorer/ExplorerStateSerialization.js b/website/src/compiler-explorer/ExplorerStateSerialization.js index 19eb8bc39c957..5cf15586cf56a 100644 --- a/website/src/compiler-explorer/ExplorerStateSerialization.js +++ b/website/src/compiler-explorer/ExplorerStateSerialization.js @@ -44,7 +44,7 @@ export function deserializeState(params) { console.warn('Unexpected encoding version: ' + params.get('enc')); return null; } - const state = {}; + const state = DEFAULT_STATE; for (const key of Object.keys(DEFAULT_STATE)) { const value = params.get(key); if (key == 'schemaText' || key == 'documentText') { diff --git a/website/src/css/custom.css b/website/src/css/custom.css index bfe6ee7a62aed..255e2dcdaad9c 100644 --- a/website/src/css/custom.css +++ b/website/src/css/custom.css @@ -508,18 +508,6 @@ pre.outerPre { border-radius: var(--ifm-button-border-radius); } -.try-it { - padding-top: 10px; -} - -.try-it a.button { - color: #f9f6ef; - border-color: #EAEAE2; - border-style: solid; - border-width: var(--ifm-button-border-width); - border-radius: var(--ifm-button-border-radius); -} - a.hash-link { margin-left: 0; } diff --git a/website/src/pages/compiler-explorer.js b/website/src/pages/compiler-explorer.js index f63e130a2740a..cb6dade133694 100644 --- a/website/src/pages/compiler-explorer.js +++ b/website/src/pages/compiler-explorer.js @@ -15,6 +15,8 @@ import { import {FEATURE_FLAGS} from '../compiler-explorer/ExplorerStateConstants'; import Layout from '@theme/Layout'; import clsx from 'clsx'; +// We have a dynamic require later on which triggers a lint error here. +// eslint-disable-next-line relay-internal/no-mixed-import-and-require import * as React from 'react'; const {useState, useEffect, useLayoutEffect, useMemo} = React; @@ -55,6 +57,7 @@ function CompilerExplorer() { const { state, setOutputType, + setInputWindow, setDocumentText, setSchemaText, setFeatureFlag, @@ -64,6 +67,7 @@ function CompilerExplorer() { const output = results.Ok ?? ''; const schemaDiagnostics = results.Err?.SchemaDiagnostics; const documentDiagnostics = results.Err?.DocumentDiagnostics; + const configError = results.Err?.ConfigError; const padding = 20; const Editor = useMemo(() => { // Loading the Editor component causes Docusaurus' build time pre-rendering to @@ -71,6 +75,55 @@ function CompilerExplorer() { return require('../compiler-explorer/Editor').default; }, []); + const input = () => { + switch (state.inputWindow) { + case 'schema': + return ( + + ); + case 'document': + return ( + + ); + case 'config': + return ( +
+ + +
+ ); + } + }; + return (
- Schema + setInputWindow(selected)} + />
- - Document - - Feature Flags - - + {input()}
- +
diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 09e2d42b4a0f1..b73ec519e7a95 100755 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -76,16 +76,6 @@ const HomeSplash = () => { {siteConfig.tagline} {siteConfig.subtagline} -
@@ -590,25 +580,6 @@ export default function ArtistCard(props) { ]} /> - -

Explore CodeSandbox Example

-
- -
-

Proudly Used Elsewhere

diff --git a/website/static/img/logos/rea.png b/website/static/img/logos/rea.png new file mode 100644 index 0000000000000..eb17c151ca976 Binary files /dev/null and b/website/static/img/logos/rea.png differ diff --git a/website/versioned_docs/version-experimental/RelayHooks-AGuidedTourOfRelay.md b/website/versioned_docs/version-experimental/RelayHooks-AGuidedTourOfRelay.md index 7aa22f18a9409..6187f06a9d1b6 100644 --- a/website/versioned_docs/version-experimental/RelayHooks-AGuidedTourOfRelay.md +++ b/website/versioned_docs/version-experimental/RelayHooks-AGuidedTourOfRelay.md @@ -897,7 +897,7 @@ const { function TabSwitcher() { // We use startTransition to schedule the update - const [startTransition] = useTransition(); + const [_, startTransition] = useTransition(); const [selectedTab, setSelectedTab] = useState('Home'); return ( @@ -1050,9 +1050,9 @@ Additionally, our APIs for refetching ([Re-rendering with Different Data](#re-re As you may have noticed, we mentioned that using `useLazyLoadQuery` will **_fetch_** a query from the server, but we didn't elaborate on how to render UI to show an error if an error occurred during fetch. We will cover that in this section. -We can use [**Error Boundary**](https://reactjs.org/docs/error-boundaries.html) components to catch errors that occur during render (due to a network error, or any kind of error), and render an alternative error UI when that occurs. The way it works is similar to how `Suspense` works, by wrapping a component tree in an error boundary, we can specify how we want to react when an error occurs, for example by rendering a fallback UI. +We can use [**Error Boundary**](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary) components to catch errors that occur during render (due to a network error, or any kind of error), and render an alternative error UI when that occurs. The way it works is similar to how `Suspense` works, by wrapping a component tree in an error boundary, we can specify how we want to react when an error occurs, for example by rendering a fallback UI. -[Error boundaries](https://reactjs.org/docs/error-boundaries.html) are simply components that implement the static **`getDerivedStateFromError`** method: +[Error boundaries](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary) are simply components that implement the static **`getDerivedStateFromError`** method: ```javascript const React = require('React'); @@ -1846,7 +1846,7 @@ const {useState, useTransition} = require('React'); const {graphql, useLazyLoadQuery} = require('react-relay/hooks'); function App() { - const [startTransition] = useTransition(); + const [_, startTransition] = useTransition(); const [variables, setVariables] = useState({id: '4'}); const data = useLazyLoadQuery( @@ -1894,7 +1894,7 @@ const {useState, useTransition} = require('React'); const {graphql, useLazyLoadQuery} = require('react-relay/hooks'); function App() { - const [startTransition] = useTransition(); + const [_, startTransition] = useTransition(); const [state, setState] = useState({ fetchPolicy: 'store-or-network', variables: {id: '4'}, @@ -1954,7 +1954,7 @@ type Props = {| |}; function CommentBody(props: Props) { - const [startTransition] = useTransition(); + const [_, startTransition] = useTransition(); const [data, refetch] = useRefetchableFragment( graphql` fragment CommentBody_comment on Comment @@ -2113,7 +2113,7 @@ type Props = {| |}; function FriendsListComponent(props: Props) { - const [startTransition] = useTransition(); + const [_, startTransition] = useTransition(); const {data, loadNext} = usePaginationFragment( graphql` fragment FriendsListComponent_user on User @@ -2185,7 +2185,7 @@ type Props = {| |}; function FriendsListComponent(props: Props) { - const [startTransition] = useTransition(); + const [_, startTransition] = useTransition(); const { data, loadNext, @@ -2476,7 +2476,7 @@ type Props = {| function FriendsListComponent(props: Props) { const searchTerm = props.searchTerm; - const [startTransition] = useTransition(); + const [_, startTransition] = useTransition(); const {data, loadNext, refetch} = usePaginationFragment( graphql` fragment FriendsListComponent_user on User { diff --git a/website/versioned_docs/version-experimental/RelayHooks-ApiReference.md b/website/versioned_docs/version-experimental/RelayHooks-ApiReference.md index fa7ac546732f1..88c5d34dbbeef 100644 --- a/website/versioned_docs/version-experimental/RelayHooks-ApiReference.md +++ b/website/versioned_docs/version-experimental/RelayHooks-ApiReference.md @@ -381,7 +381,7 @@ type Props = {| |}; function CommentBody(props: Props) { - const [startTransition] = useTransition(); + const [_, startTransition] = useTransition(); const [data, refetch] = useRefetchableFragment( graphql` fragment CommentBody_comment on Comment diff --git a/website/versioned_docs/version-v11.0.0/guided-tour/rendering/error-states.md b/website/versioned_docs/version-v11.0.0/guided-tour/rendering/error-states.md index db25546570f53..11833d6b088b4 100644 --- a/website/versioned_docs/version-v11.0.0/guided-tour/rendering/error-states.md +++ b/website/versioned_docs/version-v11.0.0/guided-tour/rendering/error-states.md @@ -12,9 +12,9 @@ import FbErrorBoundary from './fb/FbErrorBoundary.md'; As you may have noticed, we mentioned that using `usePreloadedQuery` will render data from a query that was (or is) being fetched from the server, but we didn't elaborate on how to render UI to show an error if an error occurred during fetch. We will cover that in this section. -We can use [Error Boundary](https://reactjs.org/docs/error-boundaries.html) components to catch errors that occur during render (due to a network error, or any kind of error), and render an alternative error UI when that occurs. The way it works is similar to how `Suspense` works, by wrapping a component tree in an error boundary, we can specify how we want to react when an error occurs, for example by rendering a fallback UI. +We can use [Error Boundary](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary) components to catch errors that occur during render (due to a network error, or any kind of error), and render an alternative error UI when that occurs. The way it works is similar to how `Suspense` works, by wrapping a component tree in an error boundary, we can specify how we want to react when an error occurs, for example by rendering a fallback UI. -[Error boundaries](https://reactjs.org/docs/error-boundaries.html) are simply components that implement the static `getDerivedStateFromError` method: +[Error boundaries](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary) are simply components that implement the static `getDerivedStateFromError` method: ```js const React = require('React'); diff --git a/website/versioned_docs/version-v11.0.0/guides/testing-relay-with-preloaded-queries.md b/website/versioned_docs/version-v11.0.0/guides/testing-relay-with-preloaded-queries.md index ba42a1f871ea1..cf541ea0c69ae 100644 --- a/website/versioned_docs/version-v11.0.0/guides/testing-relay-with-preloaded-queries.md +++ b/website/versioned_docs/version-v11.0.0/guides/testing-relay-with-preloaded-queries.md @@ -134,7 +134,7 @@ This is more straightforward - it is done via a call to `environment.mock.queueP * Used a different query - the query resolver would not be called, `currentOperation` will be `null` * Query variables don't match - the query resolver would not be called, `currentOperation` will be `null` (make sure to inspect the `variables`). * Also, make sure arrays are in the same order, if any (or better yet, use sets, if at all possible). -* If data returned rom the query is not what you expect, make sure you're generating the right graphql type. +* If data returned from the query is not what you expect, make sure you're generating the right graphql type. * You can tell you're mocking the wrong one if the return values look something like `` diff --git a/website/versioned_docs/version-v12.0.0/guided-tour/rendering/error-states.md b/website/versioned_docs/version-v12.0.0/guided-tour/rendering/error-states.md index dff919b539cc5..c07745359f189 100644 --- a/website/versioned_docs/version-v12.0.0/guided-tour/rendering/error-states.md +++ b/website/versioned_docs/version-v12.0.0/guided-tour/rendering/error-states.md @@ -17,9 +17,9 @@ import FbErrorBoundary from './fb/FbErrorBoundary.md'; As you may have noticed, we mentioned that using `usePreloadedQuery` will render data from a query that was (or is) being fetched from the server, but we didn't elaborate on how to render UI to show an error if an error occurred during fetch. We will cover that in this section. -We can use [Error Boundary](https://reactjs.org/docs/error-boundaries.html) components to catch errors that occur during render (due to a network error, or any kind of error), and render an alternative error UI when that occurs. The way it works is similar to how `Suspense` works, by wrapping a component tree in an error boundary, we can specify how we want to react when an error occurs, for example by rendering a fallback UI. +We can use [Error Boundary](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary) components to catch errors that occur during render (due to a network error, or any kind of error), and render an alternative error UI when that occurs. The way it works is similar to how `Suspense` works, by wrapping a component tree in an error boundary, we can specify how we want to react when an error occurs, for example by rendering a fallback UI. -[Error boundaries](https://reactjs.org/docs/error-boundaries.html) are simply components that implement the static `getDerivedStateFromError` method: +[Error boundaries](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary) are simply components that implement the static `getDerivedStateFromError` method: ```js const React = require('React'); diff --git a/website/versioned_docs/version-v12.0.0/guides/testing-relay-with-preloaded-queries.md b/website/versioned_docs/version-v12.0.0/guides/testing-relay-with-preloaded-queries.md index 90235359089a5..8c2e854bd293a 100644 --- a/website/versioned_docs/version-v12.0.0/guides/testing-relay-with-preloaded-queries.md +++ b/website/versioned_docs/version-v12.0.0/guides/testing-relay-with-preloaded-queries.md @@ -141,7 +141,7 @@ This is more straightforward - it is done via a call to `environment.mock.queueP * Used a different query - the query resolver would not be called, `currentOperation` will be `null` * Query variables don't match - the query resolver would not be called, `currentOperation` will be `null` (make sure to inspect the `variables`). * Also, make sure arrays are in the same order, if any (or better yet, use sets, if at all possible). -* If data returned rom the query is not what you expect, make sure you're generating the right graphql type. +* If data returned from the query is not what you expect, make sure you're generating the right graphql type. * You can tell you're mocking the wrong one if the return values look something like `` diff --git a/website/versioned_docs/version-v13.0.0/guided-tour/rendering/error-states.md b/website/versioned_docs/version-v13.0.0/guided-tour/rendering/error-states.md index dff919b539cc5..c07745359f189 100644 --- a/website/versioned_docs/version-v13.0.0/guided-tour/rendering/error-states.md +++ b/website/versioned_docs/version-v13.0.0/guided-tour/rendering/error-states.md @@ -17,9 +17,9 @@ import FbErrorBoundary from './fb/FbErrorBoundary.md'; As you may have noticed, we mentioned that using `usePreloadedQuery` will render data from a query that was (or is) being fetched from the server, but we didn't elaborate on how to render UI to show an error if an error occurred during fetch. We will cover that in this section. -We can use [Error Boundary](https://reactjs.org/docs/error-boundaries.html) components to catch errors that occur during render (due to a network error, or any kind of error), and render an alternative error UI when that occurs. The way it works is similar to how `Suspense` works, by wrapping a component tree in an error boundary, we can specify how we want to react when an error occurs, for example by rendering a fallback UI. +We can use [Error Boundary](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary) components to catch errors that occur during render (due to a network error, or any kind of error), and render an alternative error UI when that occurs. The way it works is similar to how `Suspense` works, by wrapping a component tree in an error boundary, we can specify how we want to react when an error occurs, for example by rendering a fallback UI. -[Error boundaries](https://reactjs.org/docs/error-boundaries.html) are simply components that implement the static `getDerivedStateFromError` method: +[Error boundaries](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary) are simply components that implement the static `getDerivedStateFromError` method: ```js const React = require('React'); diff --git a/website/versioned_docs/version-v13.0.0/guides/required-directive.md b/website/versioned_docs/version-v13.0.0/guides/required-directive.md index b8a082827cfc6..b0799f710414c 100644 --- a/website/versioned_docs/version-v13.0.0/guides/required-directive.md +++ b/website/versioned_docs/version-v13.0.0/guides/required-directive.md @@ -51,11 +51,11 @@ This value is not expected to ever be null, but the component **can still render ### `THROW` (unrecoverable) -This value should not be null, and the component **cannot render without it**. If a field with `action: THROW` is null at runtime, the component which reads that field **will throw during render**. The error message includes both the owner and field path. Only use this option if your component is contained within an [error boundary](https://reactjs.org/docs/error-boundaries.html). +This value should not be null, and the component **cannot render without it**. If a field with `action: THROW` is null at runtime, the component which reads that field **will throw during render**. The error message includes both the owner and field path. Only use this option if your component is contained within an [error boundary](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary). ## Locality -A field's `@required` status is **local to the fragment where it is specified**. This allows you to add add/remove the directive without having to think about anything outside the scope of your component. +A field's `@required` status is **local to the fragment where it is specified**. This allows you to add/remove the directive without having to think about anything outside the scope of your component. This choice reflects the fact that some components may be able to recover better from missing data than others. For example, a `` component could probably render something sensible even if the restaurant's address is missing, but a `` component might not. @@ -116,7 +116,7 @@ fragment MyFrag on Actor { } ``` -In this situation Relay will generate a union type like: `{__typename: 'User', name: string} | {__typename: '%ignore this%}`. Now you can check the `__typename` field to narrow your object's type down to one that has a non-nullalble `name`. +In this situation Relay will generate a union type like: `{__typename: 'User', name: string} | {__typename: '%ignore this%}`. Now you can check the `__typename` field to narrow your object's type down to one that has a non-nullable `name`. Example diff showing the adoption of this strategy: D24370183 diff --git a/website/versioned_docs/version-v13.0.0/guides/testing-relay-with-preloaded-queries.md b/website/versioned_docs/version-v13.0.0/guides/testing-relay-with-preloaded-queries.md index 3dba45a6bcb97..426791a14032a 100644 --- a/website/versioned_docs/version-v13.0.0/guides/testing-relay-with-preloaded-queries.md +++ b/website/versioned_docs/version-v13.0.0/guides/testing-relay-with-preloaded-queries.md @@ -141,7 +141,7 @@ This is more straightforward - it is done via a call to `environment.mock.queueP * Used a different query - the query resolver would not be called, `currentOperation` will be `null` * Query variables don't match - the query resolver would not be called, `currentOperation` will be `null` (make sure to inspect the `variables`). * Also, make sure arrays are in the same order, if any (or better yet, use sets, if at all possible). -* If data returned rom the query is not what you expect, make sure you're generating the right graphql type. +* If data returned from the query is not what you expect, make sure you're generating the right graphql type. * You can tell you're mocking the wrong one if the return values look something like `` diff --git a/website/versioned_docs/version-v14.0.0/api-reference/hooks/load-query.md b/website/versioned_docs/version-v14.0.0/api-reference/hooks/load-query.md index 1c58c23c019a0..e42ee209cb11c 100644 --- a/website/versioned_docs/version-v14.0.0/api-reference/hooks/load-query.md +++ b/website/versioned_docs/version-v14.0.0/api-reference/hooks/load-query.md @@ -80,7 +80,7 @@ The exact format of the return value is *unstable and highly likely to change*. ### Behavior * `loadQuery()` will fetch data if passed a query, or data and the query if passed a preloadable concrete request. Once both the query and data are available, the data from the query will be written to the store. This differs from the behavior of `preloadQuery_DEPRECATED`, which would only write data to the store if the query was passed to `usePreloadedQuery`. -* the query reference returned from `loadQuery` will be retained by the relay store, preventing it the data from being garbage collected. Once you call `.dispose()` on the query reference, it can be garbage collected. +* the query reference returned from `loadQuery` will be retained by the relay store, preventing the data from being garbage collected. Once you call `.dispose()` on the query reference, it can be garbage collected. * `loadQuery()` will throw an error if it is called during React's render phase. diff --git a/website/versioned_docs/version-v14.0.0/api-reference/relay-runtime/commit-mutation.md b/website/versioned_docs/version-v14.0.0/api-reference/relay-runtime/commit-mutation.md index e9a3bb2f3c376..e45eb1053e6b7 100644 --- a/website/versioned_docs/version-v14.0.0/api-reference/relay-runtime/commit-mutation.md +++ b/website/versioned_docs/version-v14.0.0/api-reference/relay-runtime/commit-mutation.md @@ -22,7 +22,7 @@ See also the [`useMutation`](../use-mutation/) API and [Guide to Updating Data]( import type {FeedbackLikeMutation} from 'FeedbackLikeMutation.graphql'; const React = require('React'); -const {graphql, useMutation} = require('react-relay'); +const {graphql, commitMutation} = require('react-relay'); function likeFeedback(environment: IEnvironment): Disposable { return commitMutation(environment, { diff --git a/website/versioned_docs/version-v14.0.0/api-reference/relay-runtime/fetch-query.md b/website/versioned_docs/version-v14.0.0/api-reference/relay-runtime/fetch-query.md index 75c3df6938556..dd2234323a451 100644 --- a/website/versioned_docs/version-v14.0.0/api-reference/relay-runtime/fetch-query.md +++ b/website/versioned_docs/version-v14.0.0/api-reference/relay-runtime/fetch-query.md @@ -18,7 +18,7 @@ If you want to fetch a query outside of React, you can use the `fetchQuery` func ```js // You should prefer passing an environment that was returned from useRelayEnvironment() -const MyEnvironment = require('MyEnvironment'); +const environment = require('MyEnvironment'); const {fetchQuery} = require('react-relay'); fetchQuery( diff --git a/website/versioned_docs/version-v14.0.0/guided-tour/list-data/advanced-pagination.md b/website/versioned_docs/version-v14.0.0/guided-tour/list-data/advanced-pagination.md index 84e7594eb00cb..52ee6f2ad0163 100644 --- a/website/versioned_docs/version-v14.0.0/guided-tour/list-data/advanced-pagination.md +++ b/website/versioned_docs/version-v14.0.0/guided-tour/list-data/advanced-pagination.md @@ -56,7 +56,7 @@ function CombinedFriendsListComponent(props: Props) { const {data: viewerData, ...viewerPagination} = usePaginationFragment( graphql` - fragment CombinedFriendsListComponent_user on Viewer { + fragment CombinedFriendsListComponent_viewer on Viewer { actor { ... on User { name diff --git a/website/versioned_docs/version-v14.0.0/guided-tour/rendering/error-states.md b/website/versioned_docs/version-v14.0.0/guided-tour/rendering/error-states.md index dff919b539cc5..c07745359f189 100644 --- a/website/versioned_docs/version-v14.0.0/guided-tour/rendering/error-states.md +++ b/website/versioned_docs/version-v14.0.0/guided-tour/rendering/error-states.md @@ -17,9 +17,9 @@ import FbErrorBoundary from './fb/FbErrorBoundary.md'; As you may have noticed, we mentioned that using `usePreloadedQuery` will render data from a query that was (or is) being fetched from the server, but we didn't elaborate on how to render UI to show an error if an error occurred during fetch. We will cover that in this section. -We can use [Error Boundary](https://reactjs.org/docs/error-boundaries.html) components to catch errors that occur during render (due to a network error, or any kind of error), and render an alternative error UI when that occurs. The way it works is similar to how `Suspense` works, by wrapping a component tree in an error boundary, we can specify how we want to react when an error occurs, for example by rendering a fallback UI. +We can use [Error Boundary](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary) components to catch errors that occur during render (due to a network error, or any kind of error), and render an alternative error UI when that occurs. The way it works is similar to how `Suspense` works, by wrapping a component tree in an error boundary, we can specify how we want to react when an error occurs, for example by rendering a fallback UI. -[Error boundaries](https://reactjs.org/docs/error-boundaries.html) are simply components that implement the static `getDerivedStateFromError` method: +[Error boundaries](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary) are simply components that implement the static `getDerivedStateFromError` method: ```js const React = require('React'); diff --git a/website/versioned_docs/version-v14.0.0/guides/required-directive.md b/website/versioned_docs/version-v14.0.0/guides/required-directive.md index b8a082827cfc6..b0799f710414c 100644 --- a/website/versioned_docs/version-v14.0.0/guides/required-directive.md +++ b/website/versioned_docs/version-v14.0.0/guides/required-directive.md @@ -51,11 +51,11 @@ This value is not expected to ever be null, but the component **can still render ### `THROW` (unrecoverable) -This value should not be null, and the component **cannot render without it**. If a field with `action: THROW` is null at runtime, the component which reads that field **will throw during render**. The error message includes both the owner and field path. Only use this option if your component is contained within an [error boundary](https://reactjs.org/docs/error-boundaries.html). +This value should not be null, and the component **cannot render without it**. If a field with `action: THROW` is null at runtime, the component which reads that field **will throw during render**. The error message includes both the owner and field path. Only use this option if your component is contained within an [error boundary](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary). ## Locality -A field's `@required` status is **local to the fragment where it is specified**. This allows you to add add/remove the directive without having to think about anything outside the scope of your component. +A field's `@required` status is **local to the fragment where it is specified**. This allows you to add/remove the directive without having to think about anything outside the scope of your component. This choice reflects the fact that some components may be able to recover better from missing data than others. For example, a `` component could probably render something sensible even if the restaurant's address is missing, but a `` component might not. @@ -116,7 +116,7 @@ fragment MyFrag on Actor { } ``` -In this situation Relay will generate a union type like: `{__typename: 'User', name: string} | {__typename: '%ignore this%}`. Now you can check the `__typename` field to narrow your object's type down to one that has a non-nullalble `name`. +In this situation Relay will generate a union type like: `{__typename: 'User', name: string} | {__typename: '%ignore this%}`. Now you can check the `__typename` field to narrow your object's type down to one that has a non-nullable `name`. Example diff showing the adoption of this strategy: D24370183 diff --git a/website/versioned_docs/version-v14.0.0/guides/testing-relay-components.md b/website/versioned_docs/version-v14.0.0/guides/testing-relay-components.md index b089020a71b64..41649decddc55 100644 --- a/website/versioned_docs/version-v14.0.0/guides/testing-relay-components.md +++ b/website/versioned_docs/version-v14.0.0/guides/testing-relay-components.md @@ -556,7 +556,7 @@ test('Error State', () => { The examples in this guide should work for testing components both with Relay Hooks, Containers or Renderers. When writing tests that involve the `usePreloadedQuery` hook, please also see the `queuePendingOperation` note above. -### toMatchSnaphot(...) +### toMatchSnapshot(...) Even though in all of the examples here you can see assertions with `toMatchSnapshot()`, we keep it that way just to make examples concise. But it's not the recommended way to test your components. diff --git a/website/versioned_docs/version-v14.0.0/guides/testing-relay-with-preloaded-queries.md b/website/versioned_docs/version-v14.0.0/guides/testing-relay-with-preloaded-queries.md index 3dba45a6bcb97..426791a14032a 100644 --- a/website/versioned_docs/version-v14.0.0/guides/testing-relay-with-preloaded-queries.md +++ b/website/versioned_docs/version-v14.0.0/guides/testing-relay-with-preloaded-queries.md @@ -141,7 +141,7 @@ This is more straightforward - it is done via a call to `environment.mock.queueP * Used a different query - the query resolver would not be called, `currentOperation` will be `null` * Query variables don't match - the query resolver would not be called, `currentOperation` will be `null` (make sure to inspect the `variables`). * Also, make sure arrays are in the same order, if any (or better yet, use sets, if at all possible). -* If data returned rom the query is not what you expect, make sure you're generating the right graphql type. +* If data returned from the query is not what you expect, make sure you're generating the right graphql type. * You can tell you're mocking the wrong one if the return values look something like `` diff --git a/website/versioned_docs/version-v14.0.0/tutorial/connections-pagination.md b/website/versioned_docs/version-v14.0.0/tutorial/connections-pagination.md index 4048bd6d80020..78514c376445d 100644 --- a/website/versioned_docs/version-v14.0.0/tutorial/connections-pagination.md +++ b/website/versioned_docs/version-v14.0.0/tutorial/connections-pagination.md @@ -12,7 +12,7 @@ There are three important points to understand: * The list itself has properties, such as whether or not there is a next page available. We handle this with a node that represent the list itself as well as one for the current page. * Pagination is done by *cursors* — opaque symbols that point to the next page of results — rather than offsets. -Imagine we want to show a list of the user’s friends. An a high level, we imagine a graph where the viewer and their friends are each nodes. From the viewer to each friend node is an edge, and the edge itself has properties. +Imagine we want to show a list of the user’s friends. At a high level, we imagine a graph where the viewer and their friends are each nodes. From the viewer to each friend node is an edge, and the edge itself has properties. ![Conceptual graph with properties on its edges](/img/docs/tutorial/connections-conceptual-graph.png) diff --git a/website/versioned_docs/version-v14.0.0/tutorial/fragments-1.md b/website/versioned_docs/version-v14.0.0/tutorial/fragments-1.md index 2a9d2573a5dcb..a6205472eff6b 100644 --- a/website/versioned_docs/version-v14.0.0/tutorial/fragments-1.md +++ b/website/versioned_docs/version-v14.0.0/tutorial/fragments-1.md @@ -14,7 +14,7 @@ Go to `Newsfeed.tsx` and find `NewsfeedQuery` so that you can add the new field: ``` const NewsfeedQuery = graphql` query NewsfeedQuery { - top_story { + topStory { title summary // change-line @@ -468,6 +468,8 @@ const PosterBylineFragment = graphql` Now if you look at the images that our app downloads, you’ll see they’re of the smaller size, saving network bandwidth. Note that although we used integer literals for the value of our fragment arguments, we can also use variables supplied at runtime, as we'll see in later sections. +_Note: The development server provided does not do actual image resizing, it will only append the size as integer to returned image URL as query parameter._ + Field arguments (e.g. `url(height: 100)`) are a feature of GraphQL itself, while fragment arguments (as in `@argumentDefinitions` and `@arguments`) are Relay-specific features. The Relay compiler processes these fragment arguments when it combines fragments into queries. --- diff --git a/website/versioned_docs/version-v14.0.0/tutorial/queries-1.md b/website/versioned_docs/version-v14.0.0/tutorial/queries-1.md index 228e59bba04ea..a64c1ecfab61d 100644 --- a/website/versioned_docs/version-v14.0.0/tutorial/queries-1.md +++ b/website/versioned_docs/version-v14.0.0/tutorial/queries-1.md @@ -130,6 +130,29 @@ The object that `useLazyLoadQuery` returns has the same shape as the query. For Notice that each field selected by the GraphQL query corresponds to a property in the JSON response. +To see the result, we first need to address an error that TypeScript reports with this code as we’ve written it: + +``` +const story = data.topStory; + ^^^^^^^^ +Property 'topStory' does not exist on type 'unknown' +``` + +To fix this, we need to annotate the call to `useLazyLoadQuery` with types that Relay generates. That way, TypeScript will know what type `data` should have based on the fields we’ve selected in our query. Add the following: + +``` +// change-line +import type {NewsfeedQuery as NewsfeedQueryType} from './__generated__/NewsfeedQuery.graphql'; + +function Newsfeed({}) { + const data = useLazyLoadQuery + // change-line + + (NewsfeedQuery, {}); + ... +} +``` + At this point, you should see a story fetched from the server: ![Screenshot](/img/docs/tutorial/queries-basic-screenshot.png) @@ -141,7 +164,6 @@ The server's responses are artifically slowed down to make loading states percep The `useLazyLoadQuery` hook fetches the data when the component is first rendered. Relay also has APIs for pre-fetching the data before your app has even loaded — these are covered later. In any case, Relay uses Suspense to show a loading indicator until the data is available. This is Relay in its most basic form: fetching the results of a GraphQL query when a component is rendered. As the tutorial progresses, we’ll see how Relay’s features fit together to make your app more maintainable — starting with a look at how Relay generates TypeScript types corresponding to each query. -

Deep dive: Suspense for Data Loading @@ -197,34 +219,12 @@ along with various other properties and information. These data structures are c
-* * * - -## Relay and the Type System - -You might notice that TypeScript reports an error with this code as we’ve written it: - -``` -const story = data.topStory; - ^^^^^^^^ -Property 'topStory' does not exist on type 'unknown' -``` - -To fix this, we need to annotate the call to `useLazyLoadQuery` with types that Relay generates. That way, TypeScript will know what type `data` should have based on the fields we’ve selected in our query. Add the following: - -``` -// change-line -import type {NewsfeedQuery as NewsfeedQueryType} from './__generated__/NewsfeedQuery.graphql'; +
+Deep dive: Relay and the Type System -function Newsfeed({}) { - const data = useLazyLoadQuery - // change-line - - (NewsfeedQuery, {}); - ... -} -``` +To fix the TypeScript error we had to import a file that we did not create ourselves: `__generated__/NewsfeedQuery.graphql`. What's in this file? -If we look inside `__generated__/NewsfeedQuery.graphql` we’ll see the following type definition — with the annotation we’ve just added, TypeScript knows that `data` should have this type: +If we look inside it, we’ll see the following type definition — with the annotation we’ve just added, TypeScript knows that `data` should have this type: ``` export type NewsfeedQuery$data = { @@ -248,7 +248,8 @@ export type NewsfeedQuery$data = { Using Relay’s generated types makes your app safer and more maintainable. In addition to TypeScript, Relay supports the Flow type system if you want to use that instead. When using Flow, the extra annotation on `useLazyLoadQuery` is not needed, because Flow directly understands the contents of the graphql`` tagged literal. -We’ll revisit types throughout this tutorial. But next, we'll look at an even more important way that Relay helps us with maintainability. +We’ll revisit types throughout this tutorial. +
* * * diff --git a/website/versioned_docs/version-v14.0.0/tutorial/queries-2.md b/website/versioned_docs/version-v14.0.0/tutorial/queries-2.md index 5b7da89255620..ddfe9c04388bc 100644 --- a/website/versioned_docs/version-v14.0.0/tutorial/queries-2.md +++ b/website/versioned_docs/version-v14.0.0/tutorial/queries-2.md @@ -227,7 +227,7 @@ Relay will garbage-collect nodes from the Store if they aren’t “reachable”
Deep dive: Why GraphQL Needs a Syntax for Variables -You might be wondering why GraphQL even has the concept of variables, instead of just interpolating the value of the variables into the query string. Well, [as mentioned before](../queries-1), the text of the GraphQL query string isn’t available at runtime, because Relay replaces it with a data structure that is more efficient. You can also configure Relay to use *prepared queries*, where the compiler uploads each query to the server at build time and assigns it an ID — in that case, at runtime, Relay is just telling the server “Give me query #1337”, so string interpolation isn't possible and therefore the variables have to come out of band. Even when the query string is available, passing variable values separately eliminates any issues with serializing arbitrary values and escaping strings, above what is required with any HTTP request. +You might be wondering why GraphQL even has the concept of variables, instead of just interpolating the value of the variables into the query string. Well, [as mentioned before](../queries-1), the text of the GraphQL query string isn’t available at runtime, because Relay replaces it with a data structure that is more efficient. You can also configure Relay to use [persisted queries](../guides/persisted-queries.md), where the compiler uploads each query to the server at build time and assigns it an ID — in that case, at runtime, Relay is just telling the server “Give me query #1337”, so string interpolation isn't possible and therefore the variables have to come out of band. Even when the query string is available, passing variable values separately eliminates any issues with serializing arbitrary values and escaping strings, above what is required with any HTTP request.
* * * diff --git a/website/versioned_docs/version-v15.0.0/api-reference/entrypoint-apis/entrypoint-container.md b/website/versioned_docs/version-v15.0.0/api-reference/entrypoint-apis/entrypoint-container.md new file mode 100644 index 0000000000000..77c2a3e44a451 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/entrypoint-apis/entrypoint-container.md @@ -0,0 +1,38 @@ +--- +id: entrypoint-container +title: EntryPointContainer +slug: /api-reference/entrypoint-container/ +description: API reference for EntryPointContainer, a React component used to render the root component of an entrypoint +keywords: + - entrypoint + - container + - root +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +## `EntryPointContainer` + + + +For more information, see the [Defining EntryPoints](../../guides/entrypoints/using-entrypoints/#defining-entrypoints) and [Consuming EntryPoints](../../guides/entrypoints/using-entrypoints/#-entrypoints) guides. + + + +```js +function EntryPointContainer({ + entryPointReference, + props, +}: { + +entryPointReference: PreloadedEntryPoint, + +props: TRuntimeProps, +}): ReactElement +``` + +A React component that renders a preloaded EntryPoint. + +* `entryPointReference`: the value returned from a call to `loadEntryPoint` or acquired from the `useEntryPointLoader` hook. +* `props`: additional runtime props that will be passed to the `Component` + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/entrypoint-apis/load-entrypoint.md b/website/versioned_docs/version-v15.0.0/api-reference/entrypoint-apis/load-entrypoint.md new file mode 100644 index 0000000000000..66c819c8b7d0e --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/entrypoint-apis/load-entrypoint.md @@ -0,0 +1,77 @@ +--- +id: load-entrypoint +title: loadEntryPoint +slug: /api-reference/load-entrypoint/ +description: API reference for loadEntryPoint, which imperatively loads an entrypoint and data for its queries +keywords: + - entrypoint + - preload + - render-as-you-fetch +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +## `loadEntryPoint` + +This function is designed to be used with `EntryPointContainer` to implement the "render-as-you-fetch" pattern. + +EntryPoint references returned from `loadEntryPoint` will leak data to the Relay store (if they have associated queries) unless `.dispose()` is called on them once they are no longer referenced. As such, prefer using `useEntryPointLoader` when possible, which ensures that EntryPoint references are correctly disposed for you. See the [`useEntryPointLoader`](../use-entrypoint-loader) docs for a more complete example. + + + +For more information, see the [Loading EntryPoints](../../guides/entrypoints/using-entrypoints/#loading-entrypoints) guide. + + + +```js +const EntryPoint = require('MyComponent.entrypoint.js'); + +const {loadQuery} = require('react-relay'); + +// Generally, your component should access the environment from the React context, +// and pass that environment to this function. +const getEntrypointReference = environment => loadEntryPoint( + { getEnvironment: () => environment }, + EntryPoint, + {id: '4'}, +); + +// later: pass entryPointReference to EntryPointContainer +// Note that EntryPoint references should have .dispose() called on them, +// which is missing in this example. +``` + +### Arguments + +* `environmentProvider`: A provider for a Relay Environment instance on which to execute the request. If you're starting this request somewhere within a React component, you probably want to use the environment you obtain from using [`useRelayEnvironment`](../use-relay-environment/). +* `EntryPoint`: EntryPoint to load. +* `entryPointParams`: Parameters that will be passed to the EntryPoint's `getPreloadProps` method. + +### Flow Type Parameters + +* `TEntryPointParams`: Type parameter corresponding to the type of the first parameter of the `getPreloadProps` method of the EntryPoint. +* `TPreloadedQueries`: the type of the `queries` parameter to the EntryPoint component. +* `TPreloadedEntryPoints`: the type of the `entrypoints` parameter passed to the EntryPoint component. +* `TRuntimeProps`: the type of the `props` prop passed to `EntryPointContainer`. This object is passed down to the EntryPoint component, also as `props`. +* `TExtraProps`: if an EntryPoint's `getPreloadProps` method returns an object with an `extraProps` property, those extra props will be passed to the EntryPoint component as `extraProps`. +* `TEntryPointComponent`: the type of the EntryPoint. +* `TEntryPoint`: the type of the EntryPoint. + +### Return Value + +An EntryPoint reference with the following properties: + +* `dispose`: a method that will release any query references loaded by this EntryPoint (including indirectly, by way of other EntryPoints) from being retained by the store. This can cause the data referenced by these query reference to be garbage collected. + +The exact format of the return value is *unstable and highly likely to change*. We strongly recommend not using any other properties of the return value, as such code would be highly likely to break when upgrading to future versions of Relay. Instead, pass the result of `loadEntryPoint()` to `EntryPointContainer`. + +### Behavior + +* When `loadEntryPoint()` is called, each of an EntryPoint's associated queries (if it has any) will load their query data and query AST. Once both the query AST and the data are available, the data will be written to the store. This differs from the behavior of `prepareEntryPoint_DEPRECATED`, which would only write the data from an associated query to the store when that query was rendered with `usePreloadedQuery`. +* The EntryPoint reference's associated query references will be retained by the Relay store, preventing it the data from being garbage collected. Once you call `.dispose()` on the EntryPoint reference, the data from the associated queries is liable to be garbage collected. +* `loadEntryPoint` may throw an error if it is called during React's render phase. + + + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/entrypoint-apis/use-entrypoint-loader.md b/website/versioned_docs/version-v15.0.0/api-reference/entrypoint-apis/use-entrypoint-loader.md new file mode 100644 index 0000000000000..8b3b04495e537 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/entrypoint-apis/use-entrypoint-loader.md @@ -0,0 +1,99 @@ +--- +id: use-entrypoint-loader +title: useEntryPointLoader +slug: /api-reference/use-entrypoint-loader/ +description: API reference for useEntryPointLoader, a React hook used to load entrypoints in response to user events +keywords: + - render-as-you-fetch + - entrypoint + - preload +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +## `useEntryPointLoader` + +Hook used to make it easy to safely work with EntryPoints, while avoiding data leaking into the Relay store. It will keep an EntryPoint reference in state, and dispose of it when it is no longer accessible via state. + + + +For more information, see the [Loading EntryPoints](https://www.internalfb.com/intern/wiki/Relay/Guides/entry-points/#loading-entrypoints) guide. + + + +```js +const {useEntryPointLoader, EntryPointContainer} = require('react-relay'); + +const ComponentEntryPoint = require('Component.entrypoint'); + +function EntryPointRevealer(): React.MixedElement { + const environmentProvider = useMyEnvironmentProvider(); + const [ + entryPointReference, + loadEntryPoint, + disposeEntryPoint, + ] = useEntryPointLoader(environmentProvider, ComponentEntryPoint); + + return ( + <> + { + entryPointReference == null && ( + + ) + } + { + entryPointReference != null && ( + <> + + + + + + ) + } + + ); +} +``` + +### Arguments + +* `environmentProvider`: an object with a `getEnvironment` method that returns a relay environment. +* `EntryPoint`: the EntryPoint, usually acquired by importing a `.entrypoint.js` file. + +### Flow Type Parameters + +* `TEntryPointParams`: the type of the first argument to the `getPreloadProps` method of the EntryPoint. +* `TPreloadedQueries`: the type of the `queries` prop passed to the EntryPoint component. +* `TPreloadedEntryPoints`: the type of the `entryPoints` prop passed to the EntryPoint component. +* `TRuntimeProps`: the type of the `props` prop passed to `EntryPointContainer`. This object is passed down to the EntryPoint component, also as `props`. +* `TExtraProps`: if an EntryPoint's `getPreloadProps` method returns an object with an `extraProps` property, those extra props will be passed to the EntryPoint component as `extraProps` and have type `TExtraProps`. +* `TEntryPointComponent`: the type of the EntryPoint component. +* `TEntryPoint`: the type of the EntryPoint. + +### Return value + +A tuple containing the following values: + +* `entryPointReference`: the EntryPoint reference, or `null`. +* `loadEntryPoint`: a callback that, when executed, will load an EntryPoint, which will be accessible as `entryPointReference`. If a previous EntryPoint was loaded, it will dispose of it. It may throw an error if called during React's render phase. + * Parameters + * `params: TEntryPointParams`: the params passed to the EntryPoint's `getPreloadProps` method. +* `disposeEntryPoint`: a callback that, when executed, will set `entryPointReference` to `null` and call `.dispose()` on it. It has type `() => void`. It should not be called during React's render phase. + +### Behavior + +* When the `loadEntryPoint` callback is called, each of an EntryPoint's associated queries (if it has any) will load their query data and query AST. Once both the query AST and the data are available, the data will be written to the store. This differs from the behavior of `prepareEntryPoint_DEPRECATED`, which would only write the data from an associated query to the store when that query was rendered with `usePreloadedQuery`. +* The EntryPoint reference's associated query references will be retained by the Relay store, preventing it the data from being garbage collected. Once you call `.dispose()` on the EntryPoint reference, the data from the associated queries is liable to be garbage collected. +* The `loadEntryPoint` callback may throw an error if it is called during React's render phase. + + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/graphql/graphql-directives.md b/website/versioned_docs/version-v15.0.0/api-reference/graphql/graphql-directives.md new file mode 100644 index 0000000000000..4d5dd12edb272 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/graphql/graphql-directives.md @@ -0,0 +1,218 @@ +--- +id: graphql-directives +title: GraphQL Directives +slug: /api-reference/graphql-and-directives/ +description: API Reference for GraphQL directives +keywords: + - GraphQL + - Directive + - arguments + - argumentDefinitions + - connection + - relay + - inline + - provider +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {FbInternalOnly, OssOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +Relay uses directives to add additional information to GraphQL documents, which are used by the [Relay compiler](../../guides/compiler/) to generate the appropriate runtime artifacts. These directives only appear in your application code and are removed from requests sent to your GraphQL server. + + +**Note:** The Relay compiler will maintain any directives supported by your server (such as `@include` or `@skip`) so they remain part of the request to the GraphQL server and won't alter generated runtime artifacts. + + +**Note:** The Relay compiler will maintain any directives supported by your server (such as `@include` or `@skip`) so they remain part of the request to the GraphQL server and won't alter generated runtime artifacts. Additional directives are documented [here](https://www.internalfb.com/intern/wiki/GraphQL/APIs_and_References/Directives/#graphql-standard). + + +## `@arguments` + +`@arguments` is a directive used to pass arguments to a fragment that was defined using [`@argumentDefinitions`](#argumentdefinitions). For example: + +```graphql +query TodoListQuery($userID: ID) { + ...TodoList_list @arguments(count: $count, userID: $userID) # Pass arguments here +} +``` + +## `@argumentDefinitions` + +`@argumentDefinitions` is a directive used to specify arguments taken by a fragment. For example: + +```graphql +fragment TodoList_list on TodoList @argumentDefinitions( + count: {type: "Int", defaultValue: 10}, # Optional argument + userID: {type: "ID"}, # Required argument +) { + title + todoItems(userID: $userID, first: $count) { # Use fragment arguments here as variables + ...TodoItem_item + } +} +``` + +### Provided Variables +A provided variable is a special fragment variable whose value is supplied by a specified provider function at runtime. This simplifies supplying device attributes, user experiment flags, and other runtime constants to graphql fragments. + +To add a provided variable: +- add an argument with `provider: "[JSModule].relayprovider"` to `@argumentDefinitions` +- ensure that `[JSModule].relayprovider.js` exists and exports a `get()` function + - `get` should return the same value on every call for a given run. +```graphql +fragment TodoItem_item on TodoList +@argumentDefinitions( + include_timestamp: { + type: "Boolean!", + provider: "Todo_ShouldIncludeTimestamp.relayprovider" + }, +) { + timestamp @include(if: $include_timestamp) + text +} +``` + +```javascript +// Todo_ShouldIncludeTimestamp.relayprovider.js +export default { + get(): boolean { + // must always return true or false for a given run + return check('todo_should_include_timestamp'); + }, +}; +``` +Notes: + + + +- Even though fragments declare provided variables in `argumentDefinitions`, their parent cannot pass provided variables through `@arguments`. +- An argument definition cannot specify both a provider and a defaultValue. +- If the modified fragment is included in operations that use hack preloaders (`@preloadable(hackPreloader: true)`), you will need to manually add provided variables when calling `RelayPreloader::gen`. + - Hack's typechecker will fail with `The field __relay_internal__pv__[JsModule] is missing.` + - We strongly encourage switching to [Entrypoints](../../guides/entrypoints/using-entrypoints/) if possible. +- _Unstable / subject to change_ + - Relay transforms provided variables to operation root variables and renames them to `__relay_internal__pv__[JsModule]`. + - Only relevant if you are debugging a query that uses provided variables. + + + + + +- Even though fragments declare provided variables in `argumentDefinitions`, their parent cannot pass provided variables through `@arguments`. +- An argument definition cannot specify both a provider and a defaultValue. +- _Unstable / subject to change_ + - Relay transforms provided variables to operation root variables and renames them to `__relay_internal__pv__[JsModule]`. + - Only relevant if you are debugging a query that uses provided variables. + + + +## `@connection(key: String!, filters: [String])` + +With `usePaginationFragment`, Relay expects connection fields to be annotated with a `@connection` directive. For more detailed information and an example, check out the [docs on `usePaginationFragment`](../../guided-tour/list-data/rendering-connections). + +## `@refetchable(queryName: String!)` + +With `useRefetchableFragment` and `usePaginationFragment`, Relay expects a `@refetchable` directive. The `@refetchable` directive can only be added to fragments that are "refetchable", that is, on fragments that are declared on `Viewer` or `Query` types, or on a type that implements `Node` (i.e. a type that has an id). The `@refetchable` directive will autogenerate a query with the specified `queryName`. This will also generate Flow types for the query, available to import from the generated file: `.graphql.js`. For more detailed information and examples, check out the docs on [`useRefetchableFragment`](../use-refetchable-fragment/) or [`usePaginationFragment`](../use-pagination-fragment/). + +## `@relay(plural: Boolean)` + +When defining a fragment for use with a Fragment container, you can use the `@relay(plural: true)` directive to indicate that container expects the prop for that fragment to be a list of items instead of a single item. A query or parent that spreads a `@relay(plural: true)` fragment should do so within a plural field (ie a field backed by a [GraphQL list](http://graphql.org/learn/schema/#lists-and-non-null). For example: + +```javascript +// Plural fragment definition +graphql` + fragment TodoItems_items on TodoItem @relay(plural: true) { + id + text + } +`; + +// Plural fragment usage: note the parent type is a list of items (`TodoItem[]`) +fragment TodoApp_app on App { + items { + // parent type is a list here + ...TodoItem_items + } +} +``` + +## `@required` + +`@required` is a directive you can add to fields in your Relay queries to declare how null values should be handled at runtime. + +See also [the @required guide](../../guides/required-directive/). + +## `@inline` + +The hooks APIs that Relay exposes allow you to read data from the store only during the render phase. In order to read data from outside of the render phase (or from outside of React), Relay exposes the `@inline` directive. The data from a fragment annotated with `@inline` can be read using `readInlineData`. + +In the example below, the function `processItemData` is called from a React component. It requires an item object with a specific set of fields. All React components that use this function should spread the `processItemData_item` fragment to ensure all of the correct item data is loaded for this function. + +```javascript +import {graphql, readInlineData} from 'react-relay'; + +// non-React function called from React +function processItemData(itemRef) { + const item = readInlineData(graphql` + fragment processItemData_item on Item @inline { + title + price + creator { + name + } + } + `, itemRef); + sendToThirdPartyApi({ + title: item.title, + price: item.price, + creatorName: item.creator.name + }); +} +``` + +```javascript +export default function MyComponent({item}) { + function handleClick() { + processItemData(item); + } + + const data = useFragment( + graphql` + fragment MyComponent_item on Item { + ...processItemData_item + title + } + `, + item + ); + + return ( + + ); +} +``` + +## `@relay(mask: Boolean)` + + It is not recommended to use `@relay(mask: false)`. Please instead consider using the `@inline` fragment. + +`@relay(mask: false)` can be used to prevent data masking; when including a fragment and annotating it with `@relay(mask: false)`, its data will be available directly to the parent instead of being masked for a different container. + +Applied to a fragment definition, `@relay(mask: false)` changes the generated Flow types to be better usable when the fragment is included with the same directive. The Flow types will no longer be exact objects and no longer contain internal marker fields. + +This may be helpful to reduce redundant fragments when dealing with nested or recursive data within a single Component. + +Keep in mind that it is typically considered an **anti-pattern** to create a single fragment shared across many containers. Abusing this directive could result in over-fetching in your application. + +In the example below, the `user` prop will include the data for `id` and `name` fields wherever `...Component_internUser` is included, instead of Relay's normal behavior to mask those fields: + +```javascript +graphql` + fragment Component_internUser on InternUser @relay(mask: false) { + id + name + } +`; +``` + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/hooks/load-query.md b/website/versioned_docs/version-v15.0.0/api-reference/hooks/load-query.md new file mode 100644 index 0000000000000..33966e329d71e --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/hooks/load-query.md @@ -0,0 +1,84 @@ +--- +id: load-query +title: loadQuery +slug: /api-reference/load-query/ +description: API reference for loadQuery, which imperatively fetches data for a query, retains that query and returns a query reference +keywords: + - preload + - fetch + - query + - render-as-you-fetch + - retain + - query reference +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +## `loadQuery` + +This function is designed to be used with the `usePreloadedQuery()` hook to implement the "render-as-you-fetch". + +Query references returned from `loadQuery` will leak data into the Relay store if `.dispose()` is not called on them once they are no longer referenced. As such, prefer calling `useQueryLoader` when possible, which ensures that query references are disposed for you. + +See the [`usePreloadedQuery`](../use-preloaded-query) docs for a more complete example. + +```js +const MyEnvironment = require('MyEnvironment'); +const {loadQuery} = require('react-relay'); + +const query = graphql` + query AppQuery($id: ID!) { + user(id: $id) { + name + } + } +`; + +// Note: you should generally not call loadQuery at the top level. +// Instead, it should be called in response to an event (such a route navigation, +// click, etc.). +const queryReference = loadQuery( + MyEnvironment, + query, + {id: '4'}, + {fetchPolicy: 'store-or-network'}, +); + +// later: pass queryReference to usePreloadedQuery() +// Note that query reference should have .dispose() called on them, +// which is missing in this example. +``` + +### Arguments + +* `environment`: A Relay Environment instance on which to execute the request. If you're starting this request somewhere within a React component, you probably want to use the environment you obtain from using [`useRelayEnvironment`](#userelayenvironment). +* `query`: GraphQL query to fetch, specified using a `graphql` template literal, or a preloadable concrete request, which can be acquired by requiring the file `$Parameters.graphql`. Relay will only generate the `$Parameters` file if the query is annotated with `@preloadable`. +* `variables`: Object containing the variable values to fetch the query. These variables need to match GraphQL variables declared inside the query. +* `options`: *_[Optional]_* options object + * `fetchPolicy`: Determines if cached data should be used, and whether to send a network request based on the cached data that is currently available in the Relay store (for more details, see our [Fetch Policies](../../guided-tour/reusing-cached-data/fetch-policies) and [Garbage Collection](../../guided-tour/reusing-cached-data/availability-of-data) guides): + * "store-or-network": **(default)** *will* reuse locally cached data and will *only* send a network request if any data for the query is missing. If the query is fully cached, a network request will *not* be made. + * "store-and-network": *will* reuse locally cached data and will *always* send a network request, regardless of whether any data was missing from the local cache or not. + * "network-only": *will not* reuse locally cached data, and will *always* send a network request to fetch the query, ignoring any data that might be locally cached in Relay. + * `networkCacheConfig`: *_[Optional]_* Default value: `{force: true}`. Object containing cache config options for the *network layer*. Note that the network layer may contain an *additional* query response cache which will reuse network responses for identical queries. If you want to bypass this cache completely (which is the default behavior), pass `{force: true}` as the value for this option. +* `environmentProviderOptions`: *[Optional]* options object + * Options passed to an `environmentProvider` used in `prepareSurfaceEntryPoint.js`. + +### Return Value + +A query reference with the following properties: + +* `dispose`: a method that will release the query reference from being retained by the store. This can cause the data referenced by the query reference to be garbage collected. + +The exact format of the return value is *unstable and highly likely to change*. We strongly recommend not using any other properties of the return value, as such code would be highly likely to break when upgrading to future versions of Relay. Instead, pass the result of `loadQuery()` to `usePreloadedQuery()`. + +### Behavior + +* `loadQuery()` will fetch data if passed a query, or data and the query if passed a preloadable concrete request. Once both the query and data are available, the data from the query will be written to the store. This differs from the behavior of `preloadQuery_DEPRECATED`, which would only write data to the store if the query was passed to `usePreloadedQuery`. +* the query reference returned from `loadQuery` will be retained by the relay store, preventing it the data from being garbage collected. Once you call `.dispose()` on the query reference, it can be garbage collected. +* `loadQuery()` will throw an error if it is called during React's render phase. + + + + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/hooks/relay-environment-provider.md b/website/versioned_docs/version-v15.0.0/api-reference/hooks/relay-environment-provider.md new file mode 100644 index 0000000000000..882e457cace88 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/hooks/relay-environment-provider.md @@ -0,0 +1,78 @@ +--- +id: relay-environment-provider +title: RelayEnvironmentProvider +slug: /api-reference/relay-environment-provider/ +description: API reference for RelayEnvironmentProvider, which sets a Relay environment in React context +keywords: + - environment + - context +--- + +import DocsRating from '@site/src/core/DocsRating'; + +## `RelayEnvironmentProvider` + +This component is used to set a Relay environment in React Context. Usually, a *single* instance of this component should be rendered at the very root of the application, in order to set the Relay environment for the whole application: + +```js +const React = require('React'); +const { + Store, + RecordSource, + Environment, + Network, + Observable, +} = require("relay-runtime"); + +const {RelayEnvironmentProvider} = require('react-relay'); + +/** + * Custom fetch function to handle GraphQL requests for a Relay environment. + * + * This function is responsible for sending GraphQL requests over the network and returning + * the response data. It can be customized to integrate with different network libraries or + * to add authentication headers as needed. + * + * @param {RequestParameters} params - The GraphQL request parameters to send to the server. + * @param {Variables} variables - Variables used in the GraphQL query. + */ +function fetchFunction(params, variables) { + const response = fetch("http://my-graphql/api", { + method: "POST", + headers: [["Content-Type", "application/json"]], + body: JSON.stringify({ + query: params.text, + variables, + }), + }); + + return Observable.from(response.then((data) => data.json())); +}; + +/** + * Creates a new Relay environment instance for managing (fetching, storing) GraphQL data. + */ +function createEnvironment() { + const network = Network.create(fetchFunction); + const store = new Store(new RecordSource()); + return new Environment({ store, network }); +} + +const environment = createEnvironment(); + +function Root() { + return ( + + + + ); +} + +module.exports = Root; +``` + +### Props + +* `environment`: The Relay environment to set in React Context. Any Relay Hooks (like [`useLazyLoadQuery`](../use-lazy-load-query) or [`useFragment`](../use-fragment)) used in descendants of this provider component will use the Relay environment specified here + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-client-query.md b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-client-query.md new file mode 100644 index 0000000000000..8e1f3a1019674 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-client-query.md @@ -0,0 +1,65 @@ +--- +id: use-client-query +title: useClientQuery +slug: /api-reference/use-client-query/ +description: API reference for useClientQuery, a React hook used to render client only queries +keywords: + - query + - read + - client-query +--- + +import DocsRating from '@site/src/core/DocsRating'; + +`useClientQuery` hook is used to render queries that read _only_ client fields. + +The Relay Compiler fully supports [client-side extensions](../../guides/client-schema-extensions/) of the schema, which allows you to define local fields and types. + +```graphql +# example client extension of the `Query` type +extend type Query { + client_field: String +} +``` + +These client-only fields are not sent to the server, and should be updated +using APIs for local updates, for example `commitPayload`. + +```js +const React = require('React'); + +const {graphql, useClientQuery} = require('react-relay'); + +function ClientQueryComponent() { + const data = useClientQuery( + graphql` + query ClientQueryComponentQuery { + client_field + } + `, + {}, // variables + ); + + return ( +
{data.client_field}
+ ); +} +``` + + +### Arguments + +* `query`: GraphQL query specified using a `graphql` template literal. +* `variables`: Object containing the variable values to fetch the query. These variables need to match GraphQL variables declared inside the query. + +### Return Value + +* `data`: Object that contains data which has been read out from the Relay store; the object matches the shape of specified query. + * The Flow type for data will also match this shape, and contain types derived from the GraphQL Schema. For example, the type of `data` above is: `{| user: ?{| name: ?string |} |}`. + +### Behavior + +* This hooks works as [`useLazyLoadQuery`](../use-lazy-load-query) with `fetchPolicy: store-only`, it does not send the network request. + + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-fragment.md b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-fragment.md new file mode 100644 index 0000000000000..da33250e03959 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-fragment.md @@ -0,0 +1,69 @@ +--- +id: use-fragment +title: useFragment +slug: /api-reference/use-fragment/ +description: API reference for useFragment, a React hook used to read fragment data from the Relay store using a fragment reference +keywords: + - fragment + - read + - fragment reference +--- + +import DocsRating from '@site/src/core/DocsRating'; + +## `useFragment` + +```js +import type {UserComponent_user$key} from 'UserComponent_user.graphql'; + +const React = require('React'); + +const {graphql, useFragment} = require('react-relay'); + +type Props = { + user: UserComponent_user$key, +}; + +function UserComponent(props: Props) { + const data = useFragment( + graphql` + fragment UserComponent_user on User { + name + profile_picture(scale: 2) { + uri + } + } + `, + props.user, + ); + + return ( + <> +

{data.name}

+
+ +
+ + ); +} +``` + +### Arguments + +* `fragment`: GraphQL fragment specified using a `graphql` template literal. +* `fragmentReference`: The *fragment reference* is an opaque Relay object that Relay uses to read the data for the fragment from the store; more specifically, it contains information about which particular object instance the data should be read from. + * The type of the fragment reference can be imported from the generated Flow types, from the file `.graphql.js`, and can be used to declare the type of your `Props`. The name of the fragment reference type will be: `$key`. We use our [lint rule](https://github.com/relayjs/eslint-plugin-relay) to enforce that the type of the fragment reference prop is correctly declared. + +### Return Value + +* `data`: Object that contains data which has been read out from the Relay store; the object matches the shape of specified fragment. + * The Flow type for data will also match this shape, and contain types derived from the GraphQL Schema. For example, the type of `data` above is: `{ name: ?string, profile_picture: ?{ uri: ?string } }`. + +### Behavior + +* The component is automatically subscribed to updates to the fragment data: if the data for this particular `User` is updated anywhere in the app (e.g. via fetching new data, or mutating existing data), the component will automatically re-render with the latest updated data. +* The component will suspend if any data for that specific fragment is missing, and the data is currently being fetched by a parent query. + * For more details on Suspense, see our [Loading States with Suspense](../../guided-tour/rendering/loading-states) guide. + + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-lazy-load-query.md b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-lazy-load-query.md new file mode 100644 index 0000000000000..1a80e04b4a9a6 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-lazy-load-query.md @@ -0,0 +1,77 @@ +--- +id: use-lazy-load-query +title: useLazyLoadQuery +slug: /api-reference/use-lazy-load-query/ +description: API reference for useLazyLoadQuery, a React hook used to lazily fetch query data when a component renders +keywords: + - lazy fetching + - query + - fetch +--- + +import DocsRating from '@site/src/core/DocsRating'; + +## `useLazyLoadQuery` + +Hook used to fetch a GraphQL query during render. This hook can trigger multiple nested or waterfalling round trips if used without caution, and waits until render to start a data fetch (when it can usually start a lot sooner than render), thereby degrading performance. Instead, prefer [`usePreloadedQuery`](../use-preloaded-query). + +```js +const React = require('React'); + +const {graphql, useLazyLoadQuery} = require('react-relay'); + +function App() { + const data = useLazyLoadQuery( + graphql` + query AppQuery($id: ID!) { + user(id: $id) { + name + } + } + `, + {id: 4}, + {fetchPolicy: 'store-or-network'}, + ); + + return

{data.user?.name}

; +} +``` + +### Arguments + +* `query`: GraphQL query specified using a `graphql` template literal. +* `variables`: Object containing the variable values to fetch the query. These variables need to match GraphQL variables declared inside the query. +* `options`: _*[Optional]*_ options object + * `fetchPolicy`: Determines if cached data should be used, and when to send a network request based on the cached data that is currently available in the Relay store (for more details, see our [Fetch Policies](../../guided-tour/reusing-cached-data/fetch-policies) and [Garbage Collection](../../guided-tour/reusing-cached-data/presence-of-data) guides): + * "store-or-network": _*(default)*_ *will* reuse locally cached data and will *only* send a network request if any data for the query is missing. If the query is fully cached, a network request will *not* be made. + * "store-and-network": *will* reuse locally cached data and will *always* send a network request, regardless of whether any data was missing from the local cache or not. + * "network-only": *will* *not* reuse locally cached data, and will *always* send a network request to fetch the query, ignoring any data that might be locally cached in Relay. + * "store-only": *will* *only* reuse locally cached data, and will *never* send a network request to fetch the query. In this case, the responsibility of fetching the query falls to the caller, but this policy could also be used to read and operate on data that is entirely [local](../../guided-tour/updating-data/local-data-updates). + * `fetchKey`: A `fetchKey` can be passed to force a re-evaluation of the current query and variables when the component re-renders, even if the variables didn't change, or even if the component isn't remounted (similarly to how passing a different `key` to a React component will cause it to remount). If the `fetchKey` is different from the one used in the previous render, the current query will be re-evaluated against the store, and it might be refetched depending on the current `fetchPolicy` and the state of the cache. + * `networkCacheConfig`: *_[Optional] _* Default value: `{force: true}`. Object containing cache config options for the *network layer*. Note that the network layer may contain an *additional* query response cache which will reuse network responses for identical queries. If you want to bypass this cache completely (which is the default behavior), pass `{force: true}` as the value for this option. + +### Return Value + +* `data`: Object that contains data which has been read out from the Relay store; the object matches the shape of specified query. + * The Flow type for data will also match this shape, and contain types derived from the GraphQL Schema. For example, the type of `data` above is: `{| user: ?{| name: ?string |} |}`. + +### Behavior + +* It is expected for `useLazyLoadQuery` to have been rendered under a [`RelayEnvironmentProvider`](../relay-environment-provider), in order to access the correct Relay environment, otherwise an error will be thrown. +* Calling `useLazyLoadQuery` will fetch and render the data for this query, and it may [*_suspend_*](../../guided-tour/rendering/loading-states) while the network request is in flight, depending on the specified `fetchPolicy`, and whether cached data is available, or if it needs to send and wait for a network request. If `useLazyLoadQuery` causes the component to suspend, you'll need to make sure that there's a `Suspense` ancestor wrapping this component in order to show the appropriate loading state. + * For more details on Suspense, see our [Loading States with Suspense](../../guided-tour/rendering/loading-states/) guide. +* The component is automatically subscribed to updates to the query data: if the data for this query is updated anywhere in the app, the component will automatically re-render with the latest updated data. +* After a component using `useLazyLoadQuery` has committed, re-rendering/updating the component will not cause the query to be fetched again. + * If the component is re-rendered with *different query variables,* that will cause the query to be fetched again with the new variables, and potentially re-render with different data. + * If the component *unmounts and remounts*, that will cause the current query and variables to be refetched (depending on the `fetchPolicy` and the state of the cache). + +### Differences with `QueryRenderer` + +* `useLazyLoadQuery` no longer takes a Relay environment as a parameter, and thus no longer sets the environment in React Context, like `QueryRenderer` did. Instead, `useLazyLoadQuery` should be used as a descendant of a [`RelayEnvironmentProvider`](../relay-environment-provider), which now sets the Relay environment in Context. Usually, you should render a single `RelayEnvironmentProvider` at the very root of the application, to set a single Relay environment for the whole application. +* `useLazyLoadQuery` will use [Suspense](../../guided-tour/rendering/loading-states) to allow developers to render loading states using Suspense boundaries, and will throw errors if network errors occur, which can be caught and rendered with Error Boundaries. This as opposed to providing error objects or null props to the `QueryRenderer` render function to indicate errors or loading states. +* `useLazyLoadQuery` fully supports fetch policies in order to reuse data that is cached in the Relay store instead of solely relying on the network response cache. +* `useLazyLoadQuery` has better type safety guarantees for the data it returns, which was not possible with QueryRenderer since we couldn't parametrize the type of the data with a renderer api. + + + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-mutation.md b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-mutation.md new file mode 100644 index 0000000000000..ac87cd519cfff --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-mutation.md @@ -0,0 +1,94 @@ +--- +id: use-mutation +title: useMutation +slug: /api-reference/use-mutation/ +description: API reference for useMutation, a React hook used to execute a GraphQL mutation +keywords: + - mutation +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbUseMutationParameter from './fb/FbUseMutationParameter.md'; + +## `useMutation` + +Hook used to execute a mutation in a React component. + +```js +import type {FeedbackLikeMutation} from 'FeedbackLikeMutation.graphql'; +const React = require('React'); + +const {graphql, useMutation} = require('react-relay'); + +function LikeButton() { + const [commit, isInFlight] = useMutation(graphql` + mutation FeedbackLikeMutation($input: FeedbackLikeData!) { + feedback_like(data: $input) { + feedback { + id + viewer_does_like + like_count + } + } + } + `); + + if (isInFlight) { + return ; + } + + return ( + + + ); +} + +module.exports = FriendsList; +``` + +### Arguments + +* `fragment`: GraphQL fragment specified using a `graphql` template literal. + * This fragment must have an `@connection` directive on a connection field, otherwise using it will throw an error. + * This fragment must have a `@refetchable` directive, otherwise using it will throw an error. The `@refetchable` directive can only be added to fragments that are "refetchable", that is, on fragments that are declared on `Viewer` or `Query` types, or on a type that implements `Node` (i.e. a type that has an `id`). + * Note that you *do not* need to manually specify a pagination query yourself. The `@refetchable` directive will autogenerate a query with the specified `queryName`. This will also generate Flow types for the query, available to import from the generated file: `.graphql.js`. +* `fragmentReference`: The *fragment reference* is an opaque Relay object that Relay uses to read the data for the fragment from the store; more specifically, it contains information about which particular object instance the data should be read from. + * The type of the fragment reference can be imported from the generated Flow types, from the file `.graphql.js`, and can be used to declare the type of your `Props`. The name of the fragment reference type will be: `$key`. We use our [lint rule](https://github.com/relayjs/eslint-plugin-relay) to enforce that the type of the fragment reference prop is correctly declared. + +### Return Value + + + + + + + +Object containing the following properties: + +* `data`: Object that contains data which has been read out from the Relay store; the object matches the shape of specified fragment. + * The Flow type for data will also match this shape, and contain types derived from the GraphQL Schema. +* `isLoadingNext`: Boolean value which indicates if a pagination request for the *next* items in the connection is currently in flight, including any incremental data payloads. +* `isLoadingPrevious`: Boolean value which indicates if a pagination request for the *previous* items in the connection is currently in flight, including any incremental data payloads. +* `hasNext`: Boolean value which indicates if the end of the connection has been reached in the "forward" direction. It will be true if there are more items to query for available in that direction, or false otherwise. +* `hasPrevious`: Boolean value which indicates if the end of the connection has been reached in the "backward" direction. It will be true if there are more items to query for available in that direction, or false otherwise. +* `loadNext`: Function used to fetch more items in the connection in the "forward" direction. + * Arguments: + * `count`*:* Number that indicates how many items to query for in the pagination request. + * `options`: *_[Optional]_* options object + * `onComplete`: Function that will be called whenever the refetch request has completed, including any incremental data payloads. If an error occurs during the request, `onComplete` will be called with an `Error` object as the first parameter. + * Return Value: + * `disposable`: Object containing a `dispose` function. Calling `disposable.dispose()` will cancel the pagination request. + * Behavior: + * Calling `loadNext` *will not* cause the component to suspend. Instead, the `isLoadingNext` value will be set to true while the request is in flight, and the new items from the pagination request will be added to the connection, causing the component to re-render. + * Pagination requests initiated from calling `loadNext` will *always* use the same variables that were originally used to fetch the connection, *except* pagination variables (which need to change in order to perform pagination); changing variables other than the pagination variables during pagination doesn't make sense, since that'd mean we'd be querying for a different connection. +* `loadPrevious`: Function used to fetch more items in the connection in the "backward" direction. + * Arguments: + * `count`*:* Number that indicates how many items to query for in the pagination request. + * `options`: *_[Optional]_* options object + * `onComplete`: Function that will be called whenever the refetch request has completed, including any incremental data payloads. If an error occurs during the request, `onComplete` will be called with an `Error` object as the first parameter. + * Return Value: + * `disposable`: Object containing a `dispose` function. Calling `disposable.dispose()` will cancel the pagination request. + * Behavior: + * Calling `loadPrevious` *will not* cause the component to suspend. Instead, the `isLoadingPrevious` value will be set to true while the request is in flight, and the new items from the pagination request will be added to the connection, causing the component to re-render. + * Pagination requests initiated from calling `loadPrevious` will *always* use the same variables that were originally used to fetch the connection, *except* pagination variables (which need to change in order to perform pagination); changing variables other than the pagination variables during pagination doesn't make sense, since that'd mean we'd be querying for a different connection. +* `refetch`: Function used to refetch the connection fragment with a potentially new set of variables. + * Arguments: + * `variables`: Object containing the new set of variable values to be used to fetch the `@refetchable` query. + * These variables need to match GraphQL variables referenced inside the fragment. + * However, only the variables that are intended to change for the refetch request need to be specified; any variables referenced by the fragment that are omitted from this input will fall back to using the value specified in the original parent query. So for example, to refetch the fragment with the exact same variables as it was originally fetched, you can call `refetch({})`. + * Similarly, passing an `id` value for the `$id` variable is _*optional*_, unless the fragment wants to be refetched with a different `id`. When refetching a `@refetchable` fragment, Relay will already know the id of the rendered object. + * `options`: *_[Optional]_* options object + * `fetchPolicy`: Determines if cached data should be used, and when to send a network request based on cached data that is available. See the [Fetch Policies](../../guided-tour/reusing-cached-data/fetch-policies/) section for full specification. + * `onComplete`: Function that will be called whenever the refetch request has completed, including any incremental data payloads. + * Return value: + * `disposable`: Object containing a `dispose` function. Calling `disposable.dispose()` will cancel the refetch request. + * Behavior: + * Calling `refetch` with a new set of variables will fetch the fragment again *with the newly provided variables*. Note that the variables you need to provide are only the ones referenced inside the fragment. In this example, it means fetching the translated body of the currently rendered Comment, by passing a new value to the `lang` variable. + * Calling `refetch` will re-render your component and may cause it to *[suspend](../../guided-tour/rendering/loading-states)*, depending on the specified `fetchPolicy` and whether cached data is available or if it needs to send and wait for a network request. If refetch causes the component to suspend, you'll need to make sure that there's a `Suspense` boundary wrapping this component. + * For more details on Suspense, see our [Loading States with Suspense](../../guided-tour/rendering/loading-states/) guide. + + + +### Behavior + +* The component is automatically subscribed to updates to the fragment data: if the data for this particular `User` is updated anywhere in the app (e.g. via fetching new data, or mutating existing data), the component will automatically re-render with the latest updated data. +* The component will suspend if any data for that specific fragment is missing, and the data is currently being fetched by a parent query. + * For more details on Suspense, see our [Loading States with Suspense](../../guided-tour/rendering/loading-states/) guide. +* Note that pagination (`loadNext` or `loadPrevious`), *will not* cause the component to suspend. + +### Differences with `PaginationContainer` + +* A pagination query no longer needs to be specified in this api, since it will be automatically generated by Relay by using a `@refetchable` fragment. +* This api supports simultaneous bi-directional pagination out of the box. +* This api no longer requires passing a `getVariables` or `getFragmentVariables` configuration functions, like the `PaginationContainer` does. + * This implies that pagination no longer has a between `variables` and `fragmentVariables`, which were previously vaguely defined concepts. Pagination requests will always use the same variables that were originally used to fetch the connection, *except* pagination variables (which need to change in order to perform pagination); changing variables other than the pagination variables during pagination doesn't make sense, since that'd mean we'd be querying for a different connection. +* This api no longer takes additional configuration like `direction` or `getConnectionFromProps` function (like Pagination Container does). These values will be automatically determined by Relay. +* Refetching no longer has a distinction between `variables` and `fragmentVariables`, which were previously vaguely defined concepts. Refetching will always correctly refetch and render the fragment with the variables you provide (any variables omitted in the input will fallback to using the original values in the parent query). +* Refetching will unequivocally update the component, which was not always true when calling `refetchConnection` from `PaginationContainer` (it would depend on what you were querying for in the refetch query and if your fragment was defined on the right object type). + + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-preloaded-query.md b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-preloaded-query.md new file mode 100644 index 0000000000000..57c0abcdd6780 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-preloaded-query.md @@ -0,0 +1,84 @@ +--- +id: use-preloaded-query +title: usePreloadedQuery +slug: /api-reference/use-preloaded-query/ +description: API reference for usePreloadedQuery, a React hook used to read query data from the Relay store using a query reference +keywords: + - read + - query + - query reference +--- + +import DocsRating from '@site/src/core/DocsRating'; + +## `usePreloadedQuery` + +Hook used to access data fetched by an earlier call to [`loadQuery`](../load-query) or with the help of [`useQueryLoader`](../use-query-loader). This implements the "render-as-you-fetch" pattern: + +* Call the `loadQuery` callback returned from `useQueryLoader`. This will store a query reference in React state. + * You can also call the imported `loadQuery` directly, which returns a query reference. In that case, store the item in state or in a React ref, and call `dispose()` on the value when you are no longer using it. +* Then, in your render method, consume the query reference with `usePreloadedQuery()`. This call will suspend if the query is still pending, throw an error if it failed, and otherwise return the query results. +* This pattern is encouraged over `useLazyLoadQuery()` as it can allow fetching data earlier while not blocking rendering. + +For more information, see the [Rendering Queries](../../guided-tour/rendering/queries) guide. + +```js + +import type {AppQueryType} from 'AppQueryType.graphql'; + +const React = require('React'); + +const {graphql, useQueryLoader, usePreloadedQuery} = require('react-relay'); + +const AppQuery = graphql` + query AppQuery($id: ID!) { + user(id: $id) { + name + } + } +`; + +type Props = { + initialQueryRef: PreloadedQuery, +}; + +function NameLoader(props) { + const [queryReference, loadQuery] = useQueryLoader( + AppQuery, + props.initialQueryRef, /* e.g. provided by router */ + ); + + return (<> + + + {queryReference != null + ? + : null + } + + ); +} + +function NameDisplay({ queryReference }) { + const data = usePreloadedQuery(AppQuery, queryReference); + + return

{data.user?.name}

; +} +``` + +### Arguments + +* `query`: GraphQL query specified using a `graphql` template literal. +* `preloadedQueryReference`: A `PreloadedQuery` query reference, which can be acquired from [`useQueryLoader`](../use-query-loader) or by calling [`loadQuery()`](../load-query) . + +### Return Value + +* `data`: Object that contains data which has been read out from the Relay store; the object matches the shape of specified query. + * The Flow type for data will also match this shape, and contain types derived from the GraphQL Schema. For example, the type of `data` above is: `{ user: ?{ name: ?string } }`. + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-query-loader.md b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-query-loader.md new file mode 100644 index 0000000000000..4129e47f2b11a --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-query-loader.md @@ -0,0 +1,95 @@ +--- +id: use-query-loader +title: useQueryLoader +slug: /api-reference/use-query-loader/ +description: API reference for useQueryLoader, a React hook used to imperatively fetch data for a query in response to a user event +keywords: + - query + - fetch + - preload + - render-as-you-fetch +--- + +import DocsRating from '@site/src/core/DocsRating'; + +## `useQueryLoader` + +Hook used to make it easy to safely load and retain queries. It will keep a query reference stored in state, and dispose of it when the component is disposed or it is no longer accessible via state. + +This hook is designed to be used with [`usePreloadedQuery`](../use-preloaded-query) to implement the "render-as-you-fetch" pattern. For more information, see the [Fetching Queries for Render](../../guided-tour/rendering/queries/) guide. + +```js +import type {PreloadedQuery} from 'react-relay'; + +const {useQueryLoader, usePreloadedQuery} = require('react-relay'); + +const AppQuery = graphql` + query AppQuery($id: ID!) { + user(id: $id) { + name + } + } +`; + +function QueryFetcherExample() { + const [ + queryReference, + loadQuery, + disposeQuery, + ] = useQueryLoader( + AppQuery, + ); + + if (queryReference == null) { + return ( + + ); + } + + return ( + <> + + + + + + ); +} + +function NameDisplay({ queryReference }) { + const data = usePreloadedQuery(AppQuery, queryReference); + + return

{data.user?.name}

; +} +``` + +### Arguments + +* `query`: GraphQL query specified using a `graphql` template literal. +* `initialQueryRef`: _*[Optional]*_ An initial `PreloadedQuery` to be used as the initial value of the `queryReference` stored in state and returned by `useQueryLoader`. + +### Return value + +A tuple containing the following values: + +* `queryReference`: the query reference, or `null`. +* `loadQuery`: a callback that, when executed, will load a query, which will be accessible as `queryReference`. If a previous query was loaded, it will dispose of it. It will throw an error if called during React's render phase. + * Parameters + * `variables`: the variables with which the query is loaded. + * `options`: `LoadQueryOptions`. An optional options object, containing the following keys: + * `fetchPolicy`: _*[Optional]*_ Determines if cached data should be used, and when to send a network request based on the cached data that is currently available in the Relay store (for more details, see our [Fetch Policies](../../guided-tour/reusing-cached-data/fetch-policies) and [Garbage Collection](../../guided-tour/reusing-cached-data/presence-of-data) guides): + * "store-or-network": _*(default)*_ *will* reuse locally cached data and will *only* send a network request if any data for the query is missing. If the query is fully cached, a network request will *not* be made. + * "store-and-network": *will* reuse locally cached data and will *always* send a network request, regardless of whether any data was missing from the local cache or not. + * "network-only": *will* *not* reuse locally cached data, and will *always* send a network request to fetch the query, ignoring any data that might be locally cached in Relay. + * `networkCacheConfig`: *_[Optional]_* Default value: `{force: true}`. Object containing cache config options for the *network layer*. Note that the network layer may contain an *additional* query response cache which will reuse network responses for identical queries. If you want to bypass this cache completely (which is the default behavior), pass `{force: true}` as the value for this option. +* `disposeQuery`: a callback that, when executed, will set `queryReference` to `null` and call `.dispose()` on it. It has type `() => void`. It should not be called during React's render phase. + +### Behavior + +* The `loadQuery` callback will fetch data if passed a query, or data and the query if passed a preloadable concrete request. Once both the query and data are available, the data from the query will be written to the store. This differs from the behavior of `preloadQuery_DEPRECATED`, which would only write data to the store if the query was passed to `usePreloadedQuery`. +* This query reference will be retained by the Relay store, preventing the data from being garbage collected. Once `.dispose()` is called on the query reference, the data is liable to be garbage collected. +* The `loadQuery` callback will throw an error if it is called during React's render phase. + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-refetchable-fragment.md b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-refetchable-fragment.md new file mode 100644 index 0000000000000..f2d80974a672f --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-refetchable-fragment.md @@ -0,0 +1,121 @@ +--- +id: use-refetchable-fragment +title: useRefetchableFragment +slug: /api-reference/use-refetchable-fragment/ +description: API reference for useRefetchableFragment, a React hook used to refetch fragment data +keywords: + - refetch + - fragment +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {FbInternalOnly, OssOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbUseRefetchableFragmentApiReferenceCodeExample from './fb/FbUseRefetchableFragmentApiReferenceCodeExample.md'; +import FbUseRefetchableFragmentReturnValue from './fb/FbUseRefetchableFragmentReturnValue.md'; + +## `useRefetchableFragment` + +You can use `useRefetchableFragment` when you want to fetch and re-render a fragment with different data: + + + + + + + +```js +import type {CommentBody_comment$key} from 'CommentBody_comment.graphql'; + +const React = require('React'); + +const {graphql, useRefetchableFragment} = require('react-relay'); + + +type Props = { + comment: CommentBody_comment$key, +}; + +function CommentBody(props: Props) { + const [data, refetch] = useRefetchableFragment( + graphql` + fragment CommentBody_comment on Comment + @refetchable(queryName: "CommentBodyRefetchQuery") { + body(lang: $lang) { + text + } + } + `, + props.comment, + ); + + return ( + <> +

{data.body?.text}

+ + + ); +} + +module.exports = CommentBody; +``` + +
+ +### Arguments + +* `fragment`: GraphQL fragment specified using a `graphql` template literal. This fragment must have a `@refetchable` directive, otherwise using it will throw an error. The `@refetchable` directive can only be added to fragments that are "refetchable", that is, on fragments that are declared on `Viewer` or `Query` types, or on a type that implements `Node` (i.e. a type that has an `id`). + * Note that you *do not* need to manually specify a refetch query yourself. The `@refetchable` directive will autogenerate a query with the specified `queryName`. This will also generate Flow types for the query, available to import from the generated file: `.graphql.js`. +* `fragmentReference`: The *fragment reference* is an opaque Relay object that Relay uses to read the data for the fragment from the store; more specifically, it contains information about which particular object instance the data should be read from. + * The type of the fragment reference can be imported from the generated Flow types, from the file `.graphql.js`, and can be used to declare the type of your `Props`. The name of the fragment reference type will be: `$key`. We use our [lint rule](https://github.com/relayjs/eslint-plugin-relay) to enforce that the type of the fragment reference prop is correctly declared. + +### Return Value + + + + + + + +Tuple containing the following values + +* [0] `data`: Object that contains data which has been read out from the Relay store; the object matches the shape of specified fragment. + * The Flow type for data will also match this shape, and contain types derived from the GraphQL Schema. +* [1] `refetch`: Function used to refetch the fragment with a potentially new set of variables. + * Arguments: + * `variables`: Object containing the new set of variable values to be used to fetch the `@refetchable` query. + * These variables need to match GraphQL variables referenced inside the fragment. + * However, only the variables that are intended to change for the refetch request need to be specified; any variables referenced by the fragment that are omitted from this input will fall back to using the value specified in the original parent query. So for example, to refetch the fragment with the exact same variables as it was originally fetched, you can call `refetch({})`. + * Similarly, passing an `id` value for the `$id` variable is _*optional*_, unless the fragment wants to be refetched with a different `id`. When refetching a `@refetchable` fragment, Relay will already know the id of the rendered object. + * `options`: *_[Optional]_* options object + * `fetchPolicy`: Determines if cached data should be used, and when to send a network request based on cached data that is available. See the [Fetch Policies](../../guided-tour/reusing-cached-data/fetch-policies/) section for full specification. + * `onComplete`: Function that will be called whenever the refetch request has completed, including any incremental data payloads. + * Return value: + * `disposable`: Object containing a `dispose` function. Calling `disposable.dispose()` will cancel the refetch request. + * Behavior: + * Calling `refetch` with a new set of variables will fetch the fragment again *with the newly provided variables*. Note that the variables you need to provide are only the ones referenced inside the fragment. In this example, it means fetching the translated body of the currently rendered Comment, by passing a new value to the `lang` variable. + * Calling `refetch` will re-render your component and may cause it to _*[suspend](../../guided-tour/rendering/loading-states)*_, depending on the specified `fetchPolicy` and whether cached data is available or if it needs to send and wait for a network request. If refetch causes the component to suspend, you'll need to make sure that there's a `Suspense` boundary wrapping this component. + * For more details on Suspense, see our [Loading States with Suspense](../../guided-tour/rendering/loading-states/) guide. + + + +### Behavior + +* The component is automatically subscribed to updates to the fragment data: if the data for this particular `User` is updated anywhere in the app (e.g. via fetching new data, or mutating existing data), the component will automatically re-render with the latest updated data. +* The component will suspend if any data for that specific fragment is missing, and the data is currently being fetched by a parent query. + * For more details on Suspense, see our [Loading States with Suspense](../../guided-tour/rendering/loading-states/) guide. + +### Differences with `RefetchContainer` + +* A refetch query no longer needs to be specified in this api, since it will be automatically generated by Relay by using a `@refetchable` fragment. +* Refetching no longer has a distinction between `refetchVariables` and `renderVariables`, which were previously vaguely defined concepts. Refetching will always correctly refetch and render the fragment with the variables you provide (any variables omitted in the input will fallback to using the original values from the parent query). +* Refetching will unequivocally update the component, which was not always true when calling refetch from `RefetchContainer` (it would depend on what you were querying for in the refetch query and if your fragment was defined on the right object type). + + + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-relay-environment.md b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-relay-environment.md new file mode 100644 index 0000000000000..098764d7ed419 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-relay-environment.md @@ -0,0 +1,37 @@ +--- +id: use-relay-environment +title: useRelayEnvironment +slug: /api-reference/use-relay-environment/ +description: API reference for useRelayEnvironment, a React hook used to access the Relay environment from context +keywords: + - environment + - context +--- + +import DocsRating from '@site/src/core/DocsRating'; + +## `useRelayEnvironment` + +Hook used to access a Relay environment that was set by a [`RelayEnvironmentProvider`](../relay-environment-provider): + +```js +const React = require('React'); + +const {useRelayEnvironment} = require('react-relay'); + +function MyComponent() { + const environment = useRelayEnvironment(); + + const handler = useCallback(() => { + // For example, can be used to pass the environment to functions + // that require a Relay environment. + commitMutation(environment, ...); + }, [environment]) + + return (...); +} + +module.exports = MyComponent; +``` + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-subscription.md b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-subscription.md new file mode 100644 index 0000000000000..01a4822232488 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/hooks/use-subscription.md @@ -0,0 +1,66 @@ +--- +id: use-subscription +title: useSubscription +slug: /api-reference/use-subscription/ +description: API reference for useSubscription, a React hook used to subscribe and unsubscribe from a subscription +keywords: + - subscription +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import GraphQLSubscriptionConfig from '../types/GraphQLSubscriptionConfig.md'; + +## `useSubscription` + +Hook used to subscribe and unsubscribe to a subscription. + +```js +import {graphql, useSubscription} from 'react-relay'; +import {useMemo} from 'react'; + +const subscription = graphql` + subscription UserDataSubscription($input: InputData!) { + # ... + } +`; + +function UserComponent({ id }) { + // IMPORTANT: your config should be memoized. + // Otherwise, useSubscription will re-render too frequently. + const config = useMemo(() => ({ + variables: {id}, + subscription, + }), [id, subscription]); + + useSubscription(config); + + return (/* ... */); +} +``` + +### Arguments + +* `config`: a config of type [`GraphQLSubscriptionConfig`](#type-graphqlsubscriptionconfigtsubscriptionpayload) passed to [`requestSubscription`](../request-subscription/) +* `requestSubscriptionFn`: `?(IEnvironment, GraphQLSubscriptionConfig) => Disposable`. An optional function with the same signature as [`requestSubscription`](../request-subscription/), which will be called in its stead. Defaults to `requestSubscription`. + + + +### Behavior + +* This is only a thin wrapper around the `requestSubscription` API. It will: + * Subscribe when the component is mounted with the given config + * Unsubscribe when the component is unmounted + * Unsubscribe and resubscribe with new values if the environment, config or `requestSubscriptionFn` changes. +* If you have the need to do something more complicated, such as imperatively requesting a subscription, please use the [`requestSubscription`](../request-subscription/) API directly. +* See the [GraphQL Subscriptions Guide](../../guided-tour/updating-data/graphql-subscriptions/) for a more detailed explanation of how to work with subscriptions. + + + +:::note +`useSubscription` doesn't automatically add `client_subscription_id`. You may need to provide an arbitrary `client_subscription_id` to `config.variables.input` +::: + + + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/legacy-apis/legacy-apis.md b/website/versioned_docs/version-v15.0.0/api-reference/legacy-apis/legacy-apis.md new file mode 100644 index 0000000000000..0c06eb626c42b --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/legacy-apis/legacy-apis.md @@ -0,0 +1,18 @@ +--- +id: legacy-apis +title: Legacy APIs +slug: /api-reference/legacy-apis/ +description: API reference for legacy APIs +keywords: + - QueryRenderer + - Container +--- + +API references for our previous legacy APIs are available in our previous docs website: + +- [`QueryRenderer`](https://relay.dev/docs/en/v10.1.3/query-renderer) +- [`Fragment Container`](https://relay.dev/docs/en/v10.1.3/fragment-container) +- [`Refetch Container`](https://relay.dev/docs/en/v10.1.3/refetch-container) +- [`Pagination Container`](https://relay.dev/docs/en/v10.1.3/pagination-container) +- [`Mutations`](https://relay.dev/docs/en/v10.1.3/mutations) +- [`Subscriptions`](https://relay.dev/docs/en/v10.1.3/subscriptions) diff --git a/website/versioned_docs/version-v15.0.0/api-reference/relay-runtime/commit-mutation.md b/website/versioned_docs/version-v15.0.0/api-reference/relay-runtime/commit-mutation.md new file mode 100644 index 0000000000000..9a08ff5a717f0 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/relay-runtime/commit-mutation.md @@ -0,0 +1,65 @@ +--- +id: commit-mutation +title: commitMutation +slug: /api-reference/commit-mutation/ +description: API reference for commitMutation, which imperatively executes a mutation +keywords: + - mutation +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import MutationConfig from '../types/MutationConfig.md'; +import Disposable from '../types/Disposable.md'; + +## `commitMutation` + +Imperatively execute a mutation. + +See also the [`useMutation`](../use-mutation/) API and [Guide to Updating Data](../../guided-tour/updating-data/). + +```js +import type {FeedbackLikeMutation} from 'FeedbackLikeMutation.graphql'; +const React = require('React'); + +const {graphql, commitMutation} = require('react-relay'); + +function likeFeedback(environment: IEnvironment): Disposable { + return commitMutation(environment, { + mutation: graphql` + mutation FeedbackLikeMutation($input: FeedbackLikeData!) { + feedback_like(data: $input) { + feedback { + id + viewer_does_like + like_count + } + } + } + `, + variables: { + input: { + id: '123', + }, + }, + }); +} +``` + +### Arguments + +* `environment`: `IEnvironment`. A Relay environment. +* `config`: [`MutationConfig`](#type-mutationconfigtmutationconfig-mutationparameters). + + + + +### Return Value + +* A [`Disposable`](#interface-disposable) which: + * If called while before the request completes, will cancel revert any optimistic updates and prevent the `onComplete` and `onError` callbacks from being executed. It will not necessarily cancel any network request. Will cause the `onUnsubscribe` callback to be called. + * If called after the initial request completes, will do nothing. + + + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/relay-runtime/fetch-query.md b/website/versioned_docs/version-v15.0.0/api-reference/relay-runtime/fetch-query.md new file mode 100644 index 0000000000000..3f977530ddbad --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/relay-runtime/fetch-query.md @@ -0,0 +1,111 @@ +--- +id: fetch-query +title: fetchQuery +slug: /api-reference/fetch-query/ +description: API reference for fetchQuery, which imperatively fetches data for a query and returns an observable +keywords: + - observable + - query + - fetch +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +## `fetchQuery` + +If you want to fetch a query outside of React, you can use the `fetchQuery` function from `react-relay`: + +```js +// You should prefer passing an environment that was returned from useRelayEnvironment() +const MyEnvironment = require('MyEnvironment'); +const {fetchQuery} = require('react-relay'); + +fetchQuery( + environment, + graphql` + query AppQuery($id: ID!) { + user(id: $id) { + name + } + } + `, + {id: 4}, +) +.subscribe({ + start: () => {...}, + complete: () => {...}, + error: (error) => {...}, + next: (data) => {...} +}); +``` + +### Arguments + +* `environment`: A Relay Environment instance to execute the request on. If you're starting this request somewhere within a React component, you probably want to use the environment you obtain from using [`useRelayEnvironment`](../use-relay-environment/). +* `query`: GraphQL query to fetch, specified using a `graphql` template literal. +* `variables`: Object containing the variable values to fetch the query. These variables need to match GraphQL variables declared inside the query. +* `options`: *_[Optional]_* options object + * `networkCacheConfig`: *_[Optional]_ *Object containing cache config options + * `force`: Boolean value. If true, will bypass the network response cache. Defaults to true. + +### Flow Type Parameters + +* `TQuery`: Type parameter that should correspond to the Flow type for the specified query. This type is available to import from the the auto-generated file: `.graphql.js`. It will ensure that the type of the data provided by the observable matches the shape of the query, and enforces that the `variables` passed as input to `fetchQuery` match the type of the variables expected by the query. + +### Return Value + +* `observable`: Returns an observable instance. To start the request, `subscribe` or `toPromise` must be called on the observable. Exposes the following methods: + * `subscribe`: Function that can be called to subscribe to the observable for the network request. Keep in mind that this subscribes you only to the fetching of the query, not to any subsequent changes to the data within the Relay Store. + * Arguments: + * `observer`: Object that specifies observer functions for different events occurring on the network request observable. May specify the following event handlers as keys in the observer object: + * `start`: Function that will be called when the network requests starts. It will receive a single `subscription` argument, which represents the subscription on the network observable. + * `complete`: Function that will be called if and when the network request completes successfully. + * `next`: Function that will be called every time a payload is received from the network. It will receive a single `data` argument, which represents a snapshot of the query data read from the Relay store at the moment a payload was received from the server. + * `error`: Function that will be called if an error occurs during the network request. It will receive a single `error` argument, containing the error that occurred. + * `unsubscribe`: Function that will be called whenever the subscription is unsubscribed. It will receive a single `subscription` argument, which represents the subscription on the network observable. + * Return Value: + * `subscription`: Object representing a subscription to the observable. Calling `subscription.unsubscribe()` will cancel the network request. + * `toPromise`: + * Return Value: + * `promise`: Returns a promise that will resolve when the first network response is received from the server. If the request fails, the promise will reject. Cannot be cancelled. + + + +> The `next` function may be called multiple times when using Relay's [Incremental Data Delivery](../../guides/incremental-data-delivery/) capabilities to receive multiple payloads from the server. + + + +### Behavior + +* `fetchQuery` will automatically save the fetched data to the in-memory Relay store, and notify any components subscribed to the relevant data. +* `fetchQuery` will **NOT** retain the data for the query, meaning that it is not guaranteed that the data will remain saved in the Relay store at any point after the request completes. If you wish to make sure that the data is retained outside of the scope of the request, you need to call `environment.retain()` directly on the query to ensure it doesn't get deleted. See our section on [Controlling Relay's GC Policy](../../guided-tour/reusing-cached-data/availability-of-data) for more details. +* `fetchQuery` will automatically de-dupe identical network requests (same query and variables) that are in flight at the same time, and that were initiated with `fetchQuery`. + + +### Behavior with `.toPromise()` + +If desired, you can convert the request into a Promise using `**.toPromise()**`. Note that toPromise will start the query and return a Promise that will resolve when the *first* piece of data returns from the server and *cancel further processing*. That means any deferred or 3D data in the query may not be processed. **We generally recommend against using toPromise() for this reason.** + +```js +const {fetchQuery} = require('react-relay'); + +fetchQuery( + environment, + graphql` + query AppQuery($id: ID!) { + user(id: $id) { + name + } + } + `, + {id: 4}, +) +.toPromise() // NOTE: don't use, this can cause data to be missing! +.then(data => {...}) +.catch(error => {...}; +``` + +* `toPromise` Returns a promise that will resolve when the first network response is received from the server. If the request fails, the promise will reject. Cannot be cancelled. + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/relay-runtime/request-subscription.md b/website/versioned_docs/version-v15.0.0/api-reference/relay-runtime/request-subscription.md new file mode 100644 index 0000000000000..407d1b2a18b0f --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/relay-runtime/request-subscription.md @@ -0,0 +1,55 @@ +--- +id: request-subscription +title: requestSubscription +slug: /api-reference/request-subscription/ +description: API reference for requestSubscription, which imperatively establishes a GraphQL subscription +keywords: + - subscription +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import GraphQLSubscriptionConfig from '../types/GraphQLSubscriptionConfig.md'; +import Disposable from '../types/Disposable.md'; + +## `requestSubscription` + +Imperative API for establishing a GraphQL Subscription. +See also the [`useSubscription`](../use-subscription/) API and the [Guide to Updating Data](../../guided-tour/updating-data/). + +```js +import {graphql, requestSubscription} from 'react-relay'; + +const subscription = graphql` + subscription UserDataSubscription($input: InputData!) { + # ... + } +`; + +function createSubscription(environment: IEnvironment): Disposable { + return requestSubscription(environment, { + subscription, + variables: {input: {userId: '4'}}, + }); +} +``` + +### Arguments + +* `environment`: A Relay Environment +* `config`: `GraphQLSubscriptionConfig` + + + +### Return Type + +* A [`Disposable`](#interface-disposable) that clears the subscription. + + + +### Behavior + +* Imperatively establish a subscription. +* See the [GraphQL Subscriptions Guide](../../guided-tour/updating-data/graphql-subscriptions/) for a more detailed explanation of how to work with subscriptions. + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/relay-runtime/store.md b/website/versioned_docs/version-v15.0.0/api-reference/relay-runtime/store.md new file mode 100644 index 0000000000000..c5cae4a3ceaaa --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/relay-runtime/store.md @@ -0,0 +1,590 @@ +--- +id: store +title: Store +slug: /api-reference/store/ +description: API reference for the Relay store +keywords: + - store +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +The Relay Store can be used to programmatically update client-side data inside [`updater` functions](../../guided-tour/updating-data/graphql-mutations/). The following is a reference of the Relay Store interface. + +Table of Contents: + +- [RecordSourceSelectorProxy](#recordsourceselectorproxy) +- [RecordProxy](#recordproxy) +- [ConnectionHandler](#connectionhandler) + +## RecordSourceSelectorProxy + +The `RecordSourceSelectorProxy` is the type of the `store` that [`updater` functions](../../guided-tour/updating-data/graphql-mutations/) receive as an argument. The following is the `RecordSourceSelectorProxy` interface: + +```javascript +interface RecordSourceSelectorProxy { + create(dataID: string, typeName: string): RecordProxy; + delete(dataID: string): void; + get(dataID: string): ?RecordProxy; + getRoot(): RecordProxy; + getRootField(fieldName: string): ?RecordProxy; + getPluralRootField(fieldName: string): ?Array; + invalidateStore(): void; +} +``` + +### `create(dataID: string, typeName: string): RecordProxy` + +Creates a new record in the store given a `dataID` and the `typeName` as defined by the GraphQL schema. Returns a [`RecordProxy`](#recordproxy) which serves as an interface to mutate the newly created record. + +#### Example + +```javascript +const record = store.create(dataID, 'Todo'); +``` + +### `delete(dataID: string): void` + +Deletes a record from the store given its `dataID`. + +#### Example + +```javascript +store.delete(dataID); +``` + +### `get(dataID: string): ?RecordProxy` + +Retrieves a record from the store given its `dataID`. Returns a [`RecordProxy`](#recordproxy) which serves as an interface to mutate the record. + +#### Example + +```javascript +const record = store.get(dataID); +``` + +### `getRoot(): RecordProxy` + +Returns the [`RecordProxy`](#recordproxy) representing the root of the GraphQL document. + +#### Example + +Given the GraphQL document: + +```graphql +viewer { + id +} +``` + +Usage: + +```javascript +// Represents root query +const root = store.getRoot(); +``` + +### `getRootField(fieldName: string): ?RecordProxy` + +Retrieves a root field from the store given the `fieldName`, as defined by the GraphQL document. Returns a [`RecordProxy`](#recordproxy) which serves as an interface to mutate the record. + +#### Example + +Given the GraphQL document: + +```graphql +viewer { + id +} +``` + +Usage: + +```javascript +const viewer = store.getRootField('viewer'); +``` + +### `getPluralRootField(fieldName: string): ?Array` + +Retrieves a root field that represents a collection from the store given the `fieldName`, as defined by the GraphQL document. Returns an array of [`RecordProxies`](#recordproxy). + +#### Example + +Given the GraphQL document: + +```graphql +nodes(first: 10) { + # ... +} +``` + +Usage: + +```javascript +const nodes = store.getPluralRootField('nodes'); +``` + +### `invalidateStore(): void` + +Globally invalidates the Relay store. This will cause any data that was written to the store before invalidation occurred to be considered stale, and will be considered to require refetch the next time a query is checked with `environment.check()`. + +#### Example + +```javascript +store.invalidateStore(); +``` + +After global invalidation, any query that is checked before refetching it will be considered stale: + +```javascript +environment.check(query) === 'stale' +``` + +## RecordProxy + +The `RecordProxy` serves as an interface to mutate records: + +```javascript +interface RecordProxy { + copyFieldsFrom(sourceRecord: RecordProxy): void; + getDataID(): string; + getLinkedRecord(name: string, arguments?: ?Object): ?RecordProxy; + getLinkedRecords(name: string, arguments?: ?Object): ?Array; + getOrCreateLinkedRecord( + name: string, + typeName: string, + arguments?: ?Object, + ): RecordProxy; + getType(): string; + getValue(name: string, arguments?: ?Object): mixed; + setLinkedRecord( + record: RecordProxy, + name: string, + arguments?: ?Object, + ): RecordProxy; + setLinkedRecords( + records: Array, + name: string, + arguments?: ?Object, + ): RecordProxy; + setValue(value: mixed, name: string, arguments?: ?Object): RecordProxy; + invalidateRecord(): void; +} +``` + +### `getDataID(): string` + +Returns the `dataID` of the current record. + +#### Example + +```javascript +const id = record.getDataID(); +``` + +### `getType(): string` + +Gets the type of the current record, as defined by the GraphQL schema. + +#### Example + +```javascript +const type = user.getType(); // User +``` + +### `getValue(name: string, arguments?: ?Object): mixed` + +Gets the value of a field in the current record given the field name. + +#### Example + +Given the GraphQL document: + +```graphql +viewer { + id + name +} +``` + +Usage: + +```javascript +const name = viewer.getValue('name'); +``` + +Optionally, if the field takes arguments, you can pass a bag of `variables`. + +#### Example + +Given the GraphQL document: + +```graphql +viewer { + id + name(arg: $arg) +} +``` + +Usage: + +```javascript +const name = viewer.getValue('name', {arg: 'value'}); +``` + +### `getLinkedRecord(name: string, arguments?: ?Object): ?RecordProxy` + +Retrieves a record associated with the current record given the field name, as defined by the GraphQL document. Returns a `RecordProxy`. + +#### Example + +Given the GraphQL document: + +```graphql +rootField { + viewer { + id + name + } +} +``` + +Usage: + +```javascript +const rootField = store.getRootField('rootField'); +const viewer = rootField.getLinkedRecord('viewer'); +``` + +Optionally, if the linked record takes arguments, you can pass a bag of `variables` as well. + +#### Example + +Given the GraphQL document: + +```graphql +rootField { + viewer(arg: $arg) { + id + } +} +``` + +Usage: + +```javascript +const rootField = store.getRootField('rootField'); +const viewer = rootField.getLinkedRecord('viewer', {arg: 'value'}); +``` + +### `getLinkedRecords(name: string, arguments?: ?Object): ?Array` + +Retrieves the set of records associated with the current record given the field name, as defined by the GraphQL document. Returns an array of `RecordProxies`. + +#### Example + +Given the GraphQL document: + +```graphql +rootField { + nodes { + # ... + } +} +``` + +Usage: + +```javascript +const rootField = store.getRootField('rootField'); +const nodes = rootField.getLinkedRecords('nodes'); +``` + +Optionally, if the linked record takes arguments, you can pass a bag of `variables` as well. + +#### Example + +Given the GraphQL document: + +```graphql +rootField { + nodes(first: $count) { + # ... + } +} +``` + +Usage: + +```javascript +const rootField = store.getRootField('rootField'); +const nodes = rootField.getLinkedRecords('nodes', {count: 10}); +``` + +### `getOrCreateLinkedRecord(name: string, typeName: string, arguments?: ?Object)` + +Retrieves a record associated with the current record given the field name, as defined by the GraphQL document. If the linked record does not exist, it will be created given the type name. Returns a `RecordProxy`. + +#### Example + +Given the GraphQL document: + +```graphql +rootField { + viewer { + id + } +} +``` + +Usage: + +```javascript +const rootField = store.getRootField('rootField'); +const newViewer = rootField.getOrCreateLinkedRecord('viewer', 'User'); // Will create if it doesn't exist +``` + +Optionally, if the linked record takes arguments, you can pass a bag of `variables` as well. + +### `setValue(value: mixed, name: string, arguments?: ?Object): RecordProxy` + +Mutates the current record by setting a new value on the specified field. Returns the mutated record. + +Given the GraphQL document: + +```graphql +viewer { + id + name +} +``` + +Usage: + +```javascript +viewer.setValue('New Name', 'name'); +``` + +Optionally, if the field takes arguments, you can pass a bag of `variables`. + +```javascript +viewer.setValue('New Name', 'name', {arg: 'value'}); +``` + +### `copyFieldsFrom(sourceRecord: RecordProxy): void` + +Mutates the current record by copying the fields over from the passed in record `sourceRecord`. + +#### Example + +```javascript +const record = store.get(id1); +const otherRecord = store.get(id2); +record.copyFieldsFrom(otherRecord); // Mutates `record` +``` + +### `setLinkedRecord(record: RecordProxy, name: string, arguments?: ?Object)` + +Mutates the current record by setting a new linked record on the given field name. + +#### Example + +Given the GraphQL document: + +```graphql +rootField { + viewer { + id + } +} +``` + +Usage: + +```javascript +const rootField = store.getRootField('rootField'); +const newViewer = store.create(/* ... */); +rootField.setLinkedRecord(newViewer, 'viewer'); +``` + +Optionally, if the linked record takes arguments, you can pass a bag of `variables` as well. + +### `setLinkedRecords(records: Array, name: string, variables?: ?Object)` + +Mutates the current record by setting a new set of linked records on the given field name. + +#### Example + +Given the GraphQL document: + +```graphql +rootField { + nodes { + # ... + } +} +``` + +Usage: + +```javascript +const rootField = store.getRootField('rootField'); +const newNode = store.create(/* ... */); +const newNodes = [...rootField.getLinkedRecords('nodes'), newNode]; +rootField.setLinkedRecords(newNodes, 'nodes'); +``` + +Optionally, if the linked record takes arguments, you can pass a bag of `variables` as well. + +### `invalidateRecord(): void` + +Invalidates the record. This will cause any query that references this record to be considered stale until the next time it is refetched, and will be considered to require a refetch the next time such a query is checked with `environment.check()`. + +#### Example + +```javascript +const record = store.get('4'); +record.invalidateRecord(); +``` + +After invalidating a record, any query that references the invalidated record and that is checked before refetching it will be considered stale: + +```javascript +environment.check(query) === 'stale' +``` + +## ConnectionHandler + +`ConnectionHandler` is a utility module exposed by `relay-runtime` that aids in the manipulation of connections. `ConnectionHandler` exposes the following interface: + +```javascript +interface ConnectionHandler { + getConnection( + record: RecordProxy, + key: string, + filters?: ?Object, + ): ?RecordProxy, + createEdge( + store: RecordSourceProxy, + connection: RecordProxy, + node: RecordProxy, + edgeType: string, + ): RecordProxy, + insertEdgeBefore( + connection: RecordProxy, + newEdge: RecordProxy, + cursor?: ?string, + ): void, + insertEdgeAfter( + connection: RecordProxy, + newEdge: RecordProxy, + cursor?: ?string, + ): void, + deleteNode(connection: RecordProxy, nodeID: string): void +} +``` + +### `getConnection(record: RecordProxy, key: string, filters?: ?Object)` + +Given a record and a connection key, and optionally a set of filters, `getConnection` retrieves a [`RecordProxy`](#recordproxy) that represents a connection that was annotated with a `@connection` directive. + +First, let's take a look at a plain connection: + +```graphql +fragment FriendsFragment on User { + friends(first: 10) { + edges { + node { + id + } + } + } +} +``` + +Accessing a plain connection field like this is the same as other regular fields: + +```javascript +// The `friends` connection record can be accessed with: +const user = store.get(userID); +const friends = user && user.getLinkedRecord('friends'); + +// Access fields on the connection: +const edges = friends && friends.getLinkedRecords('edges'); +``` + +When using [usePaginationFragment](../use-pagination-fragment/), we usually annotate the actual connection field with `@connection` to tell Relay which part needs to be paginated: + +```graphql +fragment FriendsFragment on User { + friends(first: 10, orderby: "firstname") @connection( + key: "FriendsFragment_friends", + ) { + edges { + node { + id + } + } + } +} +``` + +For connections like the above, `ConnectionHandler` helps us find the record: + +```javascript +import {ConnectionHandler} from 'relay-runtime'; + +// The `friends` connection record can be accessed with: +const user = store.get(userID); +const friends = ConnectionHandler.getConnection( + user, // parent record + 'FriendsFragment_friends', // connection key + {orderby: 'firstname'} // 'filters' that is used to identify the connection +); +// Access fields on the connection: +const edges = friends.getLinkedRecords('edges'); +``` + +### Edge creation and insertion + +#### `createEdge(store: RecordSourceProxy, connection: RecordProxy, node: RecordProxy, edgeType: string)` + +Creates an edge given a [`store`](#recordsourceselectorproxy), a connection, the edge node, and the edge type. + +#### `insertEdgeBefore(connection: RecordProxy, newEdge: RecordProxy, cursor?: ?string)` + +Given a connection, inserts the edge at the beginning of the connection, or before the specified `cursor`. + +#### `insertEdgeAfter(connection: RecordProxy, newEdge: RecordProxy, cursor?: ?string)` + +Given a connection, inserts the edge at the end of the connection, or after the specified `cursor`. + +#### Example + +```javascript +const user = store.get(userID); +const friends = ConnectionHandler.getConnection(user, 'FriendsFragment_friends'); +const newFriend = store.get(newFriendId); +const edge = ConnectionHandler.createEdge(store, friends, newFriend, 'UserEdge'); + +// No cursor provided, append the edge at the end. +ConnectionHandler.insertEdgeAfter(friends, edge); + +// No cursor provided, insert the edge at the front: +ConnectionHandler.insertEdgeBefore(friends, edge); +``` + +### `deleteNode(connection: RecordProxy, nodeID: string): void` + +Given a connection, deletes any edges whose node id matches the given id. + +#### Example + +```javascript +const user = store.get(userID); +const friends = ConnectionHandler.getConnection(user, 'FriendsFragment_friends'); +ConnectionHandler.deleteNode(friends, idToDelete); +``` + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/types/CacheConfig.md b/website/versioned_docs/version-v15.0.0/api-reference/types/CacheConfig.md new file mode 100644 index 0000000000000..e7690fbdf7259 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/types/CacheConfig.md @@ -0,0 +1,8 @@ +#### Type `CacheConfig` + +* An object with the following fields: + * `force`: *_[Optional]_* A boolean. If true, causes a query to be issued unconditionally, regardless of the state of any configured response cache. + * `poll`: *_[Optional]_* A number. Causes a query to live-update by polling at the specified interval, in milliseconds. (This value will be passed to `setTimeout`). + * `liveConfigId`: *_[Optional]_* A string. Causes a query to live-update by calling GraphQLLiveQuery; it represents a configuration of gateway when doing live query. + * `metadata`: *_[Optional]_* An object. User-supplied metadata. + * `transactionId`: *_[Optional]_* A string. A user-supplied value, intended for use as a unique id for a given instance of executing an operation. diff --git a/website/versioned_docs/version-v15.0.0/api-reference/types/Disposable.md b/website/versioned_docs/version-v15.0.0/api-reference/types/Disposable.md new file mode 100644 index 0000000000000..76dcc19d99490 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/types/Disposable.md @@ -0,0 +1,4 @@ +#### Interface `Disposable` + +* An object with the following key: + * `dispose`: `() => void`. Disposes of the resource. diff --git a/website/versioned_docs/version-v15.0.0/api-reference/types/GraphQLSubscriptionConfig.md b/website/versioned_docs/version-v15.0.0/api-reference/types/GraphQLSubscriptionConfig.md new file mode 100644 index 0000000000000..901d88b94322c --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/types/GraphQLSubscriptionConfig.md @@ -0,0 +1,17 @@ +import SelectorStoreUpdater from './SelectorStoreUpdater.md'; +import CacheConfig from './CacheConfig.md'; + +#### Type `GraphQLSubscriptionConfig` + +* An object with the following fields: + * `cacheConfig`: *_[Optional]_* [`CacheConfig`](#type-cacheconfig) + * `subscription`: `GraphQLTaggedNode`. A GraphQL subscription specified using a `graphql` template literal + * `variables`: The variables to pass to the subscription + * `onCompleted`: *_[Optional]_* `() => void`. An optional callback that is executed when the subscription is established + * `onError`: *_[Optional]_* `(Error) => {}`. An optional callback that is executed when an error occurs + * `onNext`: *_[Optional]_* `(TSubscriptionPayload) => {}`. An optional callback that is executed when new data is received + * `updater`: *_[Optional]_* [`SelectorStoreUpdater`](#type-selectorstoreupdater). + + + + diff --git a/website/versioned_docs/version-v15.0.0/api-reference/types/MutationConfig.md b/website/versioned_docs/version-v15.0.0/api-reference/types/MutationConfig.md new file mode 100644 index 0000000000000..4a517246ae310 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/types/MutationConfig.md @@ -0,0 +1,31 @@ +import CacheConfig from './CacheConfig.md'; +import SelectorStoreUpdater from './SelectorStoreUpdater.md'; +import UploadableMap from './UploadableMap.md'; + +#### Type `MutationConfig` + +* An object with the following fields: + * `cacheConfig`: *_[Optional]_* [`CacheConfig`](#type-cacheconfig) + * `mutation`: `GraphQLTaggedNode`. A mutation specified using a GraphQL literal + * `onError`: *_[Optional]_* `(Error) => void`. An optional callback executed if the mutation results in an error. + * `onCompleted`: *_[Optional]_* `($ElementType) => void`. An optional callback that is executed when the mutation completes. + * The value passed to `onCompleted` is the the mutation fragment, as read out from the store, **after** updaters and declarative mutation directives are applied. This means that data from within unmasked fragments will not be read, and records that were deleted (e.g. by `@deleteRecord`) may also be null. + * `onUnsubscribe`: *_[Optional]_* `() => void`. An optional callback that is executed when the mutation the mutation is unsubscribed, which occurs when the returned `Disposable` is disposed. + * `optimisticResponse`: *_[Optional]_* An object whose type matches the raw response type of the mutation. Make sure you decorate your mutation with `@raw_response_type` if you are using this field. + * `optimisticUpdater`: *_[Optional]_* [`SelectorStoreUpdater`](#type-selectorstoreupdater). A callback that is executed when `commitMutation` is called, after the `optimisticResponse` has been normalized into the store. + * `updater`: *_[Optional]_* [`SelectorStoreUpdater`](#type-selectorstoreupdater). A callback that is executed when a payload is received, after the payload has been written into the store. + * `uploadables`: *_[Optional]_* [`UploadableMap`](#type-uploadablemap). An optional uploadable map. + * `variables`: `$ElementType`. The variables to pass to the mutation. + + + + + + + +#### Type `MutationParameters` + +* An object with the following fields: + * `response`: An object + * `variables`: An object + * `rawResponse`: An optional object diff --git a/website/versioned_docs/version-v15.0.0/api-reference/types/SelectorStoreUpdater.md b/website/versioned_docs/version-v15.0.0/api-reference/types/SelectorStoreUpdater.md new file mode 100644 index 0000000000000..02f93a2aebd71 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/types/SelectorStoreUpdater.md @@ -0,0 +1,6 @@ +import useBaseUrl from '@docusaurus/useBaseUrl'; + +#### Type `SelectorStoreUpdater` + +* A function with signature `(store: RecordSourceSelectorProxy, data) => void` +* This interface allows you to *imperatively* write and read data directly to and from the Relay store. This means that you have full control over how to update the store in response to the subscription payload: you can *create entirely new records*, or *update or delete existing ones*. The full API for reading and writing to the Relay store is available here. diff --git a/website/versioned_docs/version-v15.0.0/api-reference/types/UploadableMap.md b/website/versioned_docs/version-v15.0.0/api-reference/types/UploadableMap.md new file mode 100644 index 0000000000000..0050b91169e39 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/api-reference/types/UploadableMap.md @@ -0,0 +1,3 @@ +#### Type `UploadableMap` + +* An object whose values are [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) or [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob). diff --git a/website/versioned_docs/version-v15.0.0/community/learning-resources.md b/website/versioned_docs/version-v15.0.0/community/learning-resources.md new file mode 100644 index 0000000000000..a2dd71c5a3a05 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/community/learning-resources.md @@ -0,0 +1,34 @@ +--- +id: learning-resources +title: Community Learning Resources +slug: /community-learning-resources/ +--- + +import useBaseUrl from '@docusaurus/useBaseUrl'; +import DocsRating from '@site/src/core/DocsRating'; +import {FbInternalOnly, OssOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +## Relay example projects + +These projects serve as an example of how to use Relay in real world applications. + +- [github.com/relayjs/relay-examples](https://github.com/relayjs/relay-examples) + +## Guides and articles: + +- [How to use @argumentsDefinitions to define local variables to your fragments](https://medium.com/entria/relay-modern-argumentdefinitions-d53769dbb95d) (by Entria) +- [Deep Dive of Updater Relay Store function. How to update your store properly after a mutation or subscription](https://medium.com/entria/wrangling-the-client-store-with-the-relay-modern-updater-function-5c32149a71ac) (by Entria) +- [Optimistic Update: how to update your UI before server responds](https://medium.com/entria/relay-modern-optimistic-update-a09ba22d83c9) (by Entria) +- [Relay Network Deep Dive - how to incrementally improve your network layer to manage complex data fetching requirements](https://medium.com/entria/relay-modern-network-deep-dive-ec187629dfd3) (by Entria) +- [Relay Modern with TypeScript - how to configure Relay Modern to make it with TypeScript](https://medium.com/@sibelius/relay-modern-migration-to-typescript-c26ab0ee749c) (by @sibelius) +- [Collection of random thoughts and discoveries around Relay](https://mrtnzlml.com/docs/relay) + + + +## Relay Modern articles + +Note: you can find many more resources by looking at the Relay Modern Documentation. + + + + diff --git a/website/versioned_docs/version-v15.0.0/debugging/declarative-mutation-directives.md b/website/versioned_docs/version-v15.0.0/debugging/declarative-mutation-directives.md new file mode 100644 index 0000000000000..cfe156a170312 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/debugging/declarative-mutation-directives.md @@ -0,0 +1,34 @@ +--- +id: declarative-mutation-directives +title: Debugging Declarative Mutation Directives +slug: /debugging/declarative-mutation-directives/ +description: Debugging declarative mutation directives +keywords: +- debugging +- troubleshooting +- declarative mutation directive +- deleteRecord +- handlerProvider +- appendEdge +- prependEdge +- appendNode +- prependNode +--- + +import FbEnvHandlerExample from './fb/FbEnvHandlerExample.md'; + +If you see an error similar to: + +``` +RelayFBHandlerProvider: No handler defined for `deleteRecord`. [Caught in: An uncaught error was thrown inside `RelayObservable`.] +``` + +or + +``` +RelayModernEnvironment: Expected a handler to be provided for handle `deleteRecord`. +``` + +This probably means that you are using a Relay environment to which a `handlerProvider` is passed. However, the handler provider does not know how to accept the handles `"deleteRecord"`, `"appendEdge"` or `"prependEdge"`. If this is the case, you should return `MutationHandlers.DeleteRecordHandler`, `MutationHandlers.AppendEdgeHandler`, or `MutationHandlers.PrependEdgeHandler` respectively (these can be imported from `relay-runtime`). + + diff --git a/website/versioned_docs/version-v15.0.0/debugging/disallowed-id-types-error.md b/website/versioned_docs/version-v15.0.0/debugging/disallowed-id-types-error.md new file mode 100644 index 0000000000000..a3febedc1c864 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/debugging/disallowed-id-types-error.md @@ -0,0 +1,43 @@ +--- +id: disallowed-id-types-error +title: Disallowed Types for `id` Fields +slug: /debugging/disallowed-id-types-error +description: Disallowed types for `id` fields +keywords: +- debugging +- troubleshooting +- disallowed +- id +- Object Identification +--- + +import DocsRating from '@site/src/core/DocsRating'; + +If you see an error from the compiler that reads something like: + +``` +Disallowed type `String` of field `id` on parent type `Foo` cannot be used by Relay to identify entities +``` + +This means that your GraphQL schema defines an object with a field named `id` that doesn't have a valid type. Valid types for this field are `ID` or `ID!` unless configured otherwise. While there may be a valid reason in your application to have that field defined that way, the Relay compiler and runtime will mishandle that field and cause refresh or data updating issues. + +## Disallowing `id` fields without type `ID` + +Recall that Relay uses [Object Identification](../../guides/graphql-server-specification/#object-identification) to know which GraphQL objects to refetch. In the usual case, those GraphQL objects implement the [`Node` interface](https://graphql.org/learn/global-object-identification/#node-interface) and thus come with an `id` field with type `ID`. However, there are types in your GraphQL model that may not require unique identification. For that reason, Relay (by default) does not restrict object definitions, allowing `id` fields with non-`ID` types (e.g. `String`) to be defined. + +This poses a bit of difficulty to both Relay's compiler and runtime. In short, the runtime and compiler only properly handle `id` fields as defined by the `Node` interface. Any selections made with non-`Node` `id` fields will likely exhibit undesirable and unintended Relay behavior on your components (e.g. components not re-rendering on re-fetched data). + +### The significance of the `ID` type + +[Global Object Identification in GraphQL](https://graphql.org/learn/global-object-identification/)) is what underlies Relay's Object Identification. The `id` field supplied by the `Node` interface is specified to be a unique identifier that can be used for storage or caching. + +## Fix: Define your `id` fields as `ID` + +To ensure Relay correctly manages objects fetched to your application, here are two recommended courses of action: + +* Ensure all fields named `id` are typed with `ID` +* Rename any fields named `id` (with a type that isn't `ID`) to a different name (e.g. `special_purpose_id`) + +If your application truly requires that `id` field's definition to remain as-is and you are aware of the problems that may arise, you can add your GraphQL type and the type of its `id` field to the allowlist in `nonNodeIdFields` of the [Relay Compiler's Configuration](https://github.com/facebook/relay/tree/main/packages/relay-compiler). Note that this will only bypass the error for that combination of object type and `id` field type. + + diff --git a/website/versioned_docs/version-v15.0.0/debugging/inconsistent-typename-error.md b/website/versioned_docs/version-v15.0.0/debugging/inconsistent-typename-error.md new file mode 100644 index 0000000000000..6944b91f4c2dd --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/debugging/inconsistent-typename-error.md @@ -0,0 +1,45 @@ +--- +id: inconsistent-typename-error +title: Inconsistent Typename Error +slug: /debugging/inconsistent-typename-error/ +description: Debugging inconsistent typename errors in Relay +keywords: +- debugging +- troubleshooting +- inconsistent typename +- RelayResponseNormalizer +- globally unique id +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {FbInternalOnly, OssOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +## Inconsistent `__typename` error + +The GraphQL server likely violated the globally unique ID requirement by returning the same ID for different objects. + +If you're seeing an error like: + +> `RelayResponseNormalizer: Invalid record '543'. Expected __typename to be consistent, but the record was assigned conflicting types Foo and Bar. The GraphQL server likely violated the globally unique ID requirement by returning the same ID for different objects.` + +the server implementation of one of the types is not spec compliant. We require the `id` field to be globally unique. This is a problem because Relay stores objects in a normalized key-value store and one of the object just overwrote the other. This means your app is broken in some subtle or less subtle way. + +## Common cause + +The most common reason for this error is that 2 objects backed by an ID are using the plain ID as the id field, such as a `User` and `MessagingParticipant`. + +Less common reasons could be using array indices or auto-increment IDs from some database that might not be unique to this type. + +## Fix: Make your type spec compliant + +The best way to fix this is to make your type spec compliant. For the case of 2 different types backed by the same ID, a common solution is to prefix the ID of the less widely used type with a unique string and then base64 encode the result. This can be accomplished fairly easily by implementing a `NodeTokenResolver` using the helper trait `NodeTokenResolverWithPrefix`. When the `NodeTokenResolver` is registered is allows you to load your type using `node(id: $yourID)` GraphQL call and your type can return an encoded ID. + + + +### Example + +See [D17996531](https://www.internalfb.com/diff/D17996531) for an example on how to fix this. It created a `FusionPlatformComponentsCategoryNodeResolver` added the trait `TGraphQLNodeMixin` to the conflicting class so that it generates the base64 encoded ID. + + + + diff --git a/website/versioned_docs/version-v15.0.0/debugging/relay-devtools.md b/website/versioned_docs/version-v15.0.0/debugging/relay-devtools.md new file mode 100644 index 0000000000000..de8ceda919dd3 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/debugging/relay-devtools.md @@ -0,0 +1,73 @@ +--- +id: relay-devtools +title: Relay DevTools +slug: /debugging/relay-devtools/ +description: Debugging guide for the Relay DevTools +keywords: +- debugging +- troubleshooting +- extension +- devtools +- browser +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {FbInternalOnly, OssOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +## Installation + + + +### Internal version (preferred) + +The internal version of devtools has the latest updates and the process of installation will be much faster. + +1. Before downloading the new Devtools, make sure you've deleted all older versions of the extension. +2. Join [Relay Support](https://fb.workplace.com/groups/relay.support) group and you will automatically be added to the `cpe_relay_devtools_extension` gatekeeper. +3. Wait 20-30 minutes, and it should be downloaded on your Chrome browser +4. Or run `sudo soloctl -i` on your machine to get the extension immediately + +### Internal Version for Edgium users + +1. On `C:\Users\\AppData\Local\Google\Chrome\User Data\\Extensions` search for files manifest.json with Relay Developer Tools on it +2. Get the path to this folder e.g `...\Extensions\\\` +3. On edge://extensions/ click load upacked (you might need to Allow extensions for other stores). + +### External version + +The external version of devtools is less prone to bugs but does not always contain the latest updates and you have to download the extension from the chrome store. +Follow this link and install it from the [chrome store](https://chrome.google.com/webstore/detail/relay-developer-tools/ncedobpgnmkhcmnnkcimnobpfepidadl). + + + + + +Follow this link and install it from the [chrome store](https://chrome.google.com/webstore/detail/relay-developer-tools/ncedobpgnmkhcmnnkcimnobpfepidadl). + + + +--- + +## How to Navigate the Relay DevTools Extension + +You should have a new tab called Relay in your Chrome DevTools. In this tab, you will be able to select between 2 panels: the **network panel** and the **store panel**. + +### Network Panel + +The network panel allows users to view individual requests in an active environment. Users can scroll through these requests, search for the requests and view the details of each request. The details of each request includes the status, the variables, and the responses for the request. + +### Store Panel + +The store panel allows users to view individual records from the store data in an active environment. Users can scroll through these records, search for the records, and view the details of each request. Users can also copy the the store data in JSON format to the clipboard. The details of each record includes the ID, the typename, and all of the data in the record. If one of the fields in the data is a reference to another record, users can click on the reference, which will take them to that record. + +--- + +## Multiple Environments + +As you switch through the store and network panels, you'll notice that there is also a dropdown menu on the left side of the developer tools. This dropdown allows users to switch between environments. The requests in the network tab and the store data are grouped by environment, so users can easily shuffle between active environments. + +## Give Feedback + +Look in the top-right corner of the extension panel! + + diff --git a/website/versioned_docs/version-v15.0.0/debugging/why-null.md b/website/versioned_docs/version-v15.0.0/debugging/why-null.md new file mode 100644 index 0000000000000..0f2a1c9c453e3 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/debugging/why-null.md @@ -0,0 +1,116 @@ +--- +id: why-null +title: "Why Is My Field Null?" +slug: /debugging/why-null/ +description: Get help figuring out why a given field is unexpectedly null. +keywords: +- "null" +- missing +- optional +- nullthrows +--- + +import {FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import DocsRating from '@site/src/core/DocsRating'; + +There are a number of reasons that a field read by Relay can be null and some of them are obscure or unintuitive. When debugging an unexpectedly null value, it can be helpful to understand both the common cases and edge cases that can cause a field to be read as null. This document enumerates the cases that can lead to null or missing values with tips for figuring determining which case you are in. + +## Server Returned Null + +The simplest reason a field might be null is that the server explicitly returned null. This can happen in two cases: + +1. The server’s field resolver returned an explicit null +2. The field resolver throws. In this case GraphQL will return null for that field. *This is true even if the server resolver’s return type is non-nullable.* The one exceptions is fields annotated as non-null. In that case server should *never* return null. If an exception is encountered the entire parent object will be nulled out. + + + +:::note +At Meta, non-nullable fields are implemented using [`KillsParentOnException`](https://www.internalfb.com/intern/wiki/Graphql-for-hack-developers/fields/return-type/#non-nullable-fields). +::: + + + +**🕵️‍♀️ How to tell:** Inspect the server’s response using Relay Dev tools, or in your browser’s dev tools’s network tab, to see if the field is null. + +## Graph Relationship Change + +If a different query/mutation/subscription observes a relationship change in the graph, you may end up trying to read fields off of an object which your query never fetched. + +Imagine you have a query that reads your best friend’s name: + +```graphql +query MyQuery { + me { + best_friend { + # id: 1 + name + } + } +} +``` + +After you get your query response, *who* your best friend is *changes* on the server. Then a *different* query/mutation/subscription fetches a different set of fields off of `best_friend`. + +```graphql +query OtherQuery { + me { + best_friend { + # new id: 2 + # Note: name is not fetched here + age + } + } +} +``` + +Because the Relay store is normalized, we will update the `me` record to indicate that `best_friend` linked field now points to the user with ID 2, and the only information we know about that user is their age. + +This will trigger a rerender of `MyQuery`. However, when we try to read the `name` field off of the user with ID 2, we won’t find it, since the only thing we know about the user with ID 2 is their `age`. Note that a relationship “change” in this case, could also mean a relationship that is new. For example, if you start with no best friend but a subsequent response returns *some* best friend, but does not fetch all fields your component needs. + +**Note**: In theory, Relay *could* refetch your query when this state is encountered, but some queries are not safe to re-issue arbitrarily, and more generally, UI state changing in a way that’s not tied to a direct user action can lead to confusion. For this reason, we have chosen not to perform refetches in this scenario. + +**🕵️‍♀️ How to tell:** You can place a breakpoint/`console.log` at the finale return statement of `readWithIdentifier` in `FragmentResource` ([code pointer](https://github.com/facebook/relay/blob/2b9876fcbf0845cd23728d4d720712525ff424c4/packages/react-relay/relay-hooks/FragmentResource.js#L475). This is the point in Relay at which we know that we are missing data, but there is not query in flight to get it. + +## Inconsistent Server Response + +This is a **rare edge case**, *but* if the server does not correctly implement the [field stability](https://graphql.org/learn/global-object-identification/#field-stability) semantics of the id field, it’s possible that a field could be present in one part of the response, but *explicitly null* in another. + +```graphql +{ + me { + id: 1 + name: "Alice" + } + me_elsewhere_in_the_graph { + id: 1 # Note this is the same as the `me` field above... + name: null + } +} +``` + +In this case, Relay first learns that user 1’s `name` is Alice, but later in the query finds that user 1’s `name` has now `null`. Because Relay stores data in a normalized store, user 1 can only have one value for `name` and Relay will end in a state where user 1’s `name` is `null`. + +**🕵️‍♀️ How to tell:** Relay is smart enough to detect when this has happened, and will [log an error to the console](https://github.com/facebook/relay/blob/2b9876fcbf0845cd23728d4d720712525ff424c4/packages/relay-runtime/store/RelayResponseNormalizer.js#L505) in dev that looks like: “RelayResponseNormalizer: Invalid record. The record contains two instances of the same id: 1 with conflicting field, name and its values: Alice and null.". Additionally, you can manually inspect the query response. + +Note that if the unstable field is a linked field (edge to another object), this type of bug can cause a Graph Relationship Change (described above) to occur *within a single response*. For example, if a user with the same `id` appears in two places in the response, but their `best_friend` is different in those two locations. + +**🕵️‍♀️ How to tell:** Relay is also smart enough to detect this case, and will show a [similar console warning](https://github.com/facebook/relay/blob/2b9876fcbf0845cd23728d4d720712525ff424c4/packages/relay-runtime/store/RelayResponseNormalizer.js#L844) in dev. + + + +:::note +Because these errors existing in the code base and can cause noisy console outout, these warnings are [disabled](https://www.internalfb.com/code/www/[5b26a6bd37e8]/html/shared/core/WarningFilter.js?lines=559) in dev mode. +::: + + + + +## Client-side Deletion or Incomplete Update + +Imperative store updates, or optimistic updates could have deleted the record or field. If an imperative store update, or optimistic update, writes a new record to the store, it may not supply a value for a field which you expected to be able to read. This is a fundamental problem, since an updater cannot statically know all the data that might be accessed off of a new object. + +**🕵️‍♀️ How to tell:** Due to React and Relay’s batching, it’s not always possible to associate a component update with the store update that triggered it. Here, your best bet is to set a breakpoint in your component for when your value is null, and then use the Relay Dev Tools to look at the last few updates. + +This can happen due to a newly created object which did not supply a specific field or, as mentioned above, an update which causes a new or changed relationship in the graph. In this case, use the “How do tell” tip from that section. + + diff --git a/website/versioned_docs/version-v15.0.0/editor-support.md b/website/versioned_docs/version-v15.0.0/editor-support.md new file mode 100644 index 0000000000000..0c4e7810f7c79 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/editor-support.md @@ -0,0 +1,55 @@ +--- +id: editor-support +title: Editor Support +slug: /editor-support/ +keywords: +- LSP +- Language Server Protocol +- VS Code +- VSCode +--- + +import useBaseUrl from '@docusaurus/useBaseUrl'; + +*TL;DR: We have a [VS Code Extension](https://marketplace.visualstudio.com/items?itemName=meta.relay)* + +--- + +The Relay compiler has a rich understanding of the GraphQL embedded in your code. We want to use that understanding to imporve the developer experience of writing apps with Relay. So, starting in [v14.0.0](https://github.com/facebook/relay/releases/tag/v14.0.0), the new Rust Relay compiler can provide language features directly in your code editor. This means: + +#### Relay compiler errors surface as red squiggles directly in your editor + + + +#### Autocomplete throughout your GraphQL tagged template literals + + + +#### Hover to see type information and documentation about Relay-specific features + + + +#### `@deprecated` fields are rendered using ~~strikethrough~~ + + + +#### Click-to-definition for fragments, fields and types + + + +#### Quick fix suggestions for common errors + + + +## Language Server + +The editor support is implemented using the [Language Server Protocol](https://microsoft.github.io/language-server-protocol/) which means it can be used by a variety of editors, but in tandem with this release, [Terence Bezman](https://twitter.com/b_ez_man) from [Coinbase](https://www.coinbase.com/) has contributed an official VS Code extension. + +[**Find it here!**](https://marketplace.visualstudio.com/items?itemName=meta.relay) + +## Why Have a Relay-Specific Editor Extension? + +The GraphQL foundation has an official language server and [VS Code extension](https://marketplace.visualstudio.com/items?itemName=GraphQL.vscode-graphql) which provides editor support for GraphQL generically. This can provide a good baseline experience, but for Relay users, getting this information directly from the Relay compiler offers a number of benefits: + +* Relay compiler errors can surface directly in the editor as “problems”, often with suggested quick fixes +* Hover information is aware Relay-specific features and directives and can link out to relevant documentation diff --git a/website/versioned_docs/version-v15.0.0/error-reference/unknown-field.md b/website/versioned_docs/version-v15.0.0/error-reference/unknown-field.md new file mode 100644 index 0000000000000..e8e7099bf5f23 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/error-reference/unknown-field.md @@ -0,0 +1,36 @@ +--- +id: unknown-field +title: "Why Is My Field Not Found?" +slug: /error-reference/unknown-field/ +description: Get help figuring out why a given field is not found. +keywords: +- no such field on type +- missing +- field +- type +- compilation +- error +--- + +import {FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import DocsRating from '@site/src/core/DocsRating'; + +## [ERROR] Error in the project `some_project`: ✖︎ The type `Some_Type` has no field `some_unknown_field`. + +### Field name typo + +In case of a missing field in a type, the Relay compiler tries to find and suggest a field replacement. For example: ```Error in the project `some_project`: ✖︎ The type `UserInfo` has no field `mail`. Did you mean `email`?``` + + + +### The field exists in another schema + +Relay Compiler uses schemas in order to resolve types and their fields. The type's schema is determined by the file path and the mapping from file path to schema, which is configured in the "schema" or "schemaDir" properties of your Relay compiler config. If you expect this field to exist, make sure you're using the right schema. + +:::note +At meta there are various project config files that are listed [here](https://www.internalfb.com/intern/wiki/Relay-team/Rust_compiler_resources/#where-are-the-project-co). +::: + + + + diff --git a/website/versioned_docs/version-v15.0.0/getting-started/installation-and-setup.md b/website/versioned_docs/version-v15.0.0/getting-started/installation-and-setup.md new file mode 100644 index 0000000000000..57a685e534981 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/getting-started/installation-and-setup.md @@ -0,0 +1,150 @@ +--- +id: installation-and-setup +title: Installing in a Project +slug: /getting-started/installation-and-setup/ +description: Relay installation and setup guide +keywords: +- installation +- setup +- compiler +- babel-plugin-relay +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +# Installation + +In many situations, the easiest way to install Relay is with the `create-relay-app` package written by Tobias Tengler. Contrary to what the name suggests, this package *installs* Relay on your existing app. + +It currently supports apps built on Next, Vite, and Create React App. If you aren't on one of those platforms, or if it doesn't work for you for some reason, proceed to the manual steps below. + +To run it, make sure you have a clean working directory and run: + +``` +npm create @tobiastengler/relay-app +``` + +(or use `yarn` or `pnpm` instead of `npm` as you prefer). + +When it's done it will print some "Next Steps" for you to follow. + +More details about this script can be found at its [GitHub repository](https://github.com/tobias-tengler/create-relay-app). + +* * * + +# Manual Installation + +Install React and Relay using `yarn` or `npm`: + +```sh +yarn add react react-dom react-relay +``` + +## Set up the compiler + +Relay's ahead-of-time compilation requires the [Relay Compiler](../../guides/compiler/), which you can install via `yarn` or `npm`: + +```sh +yarn add --dev relay-compiler +``` + +This installs the bin script `relay-compiler` in your node_modules folder. It's recommended to run this from a `yarn`/`npm` script by adding a script to your `package.json` file: + +```js +"scripts": { + "relay": "relay-compiler" +} +``` + +## Compiler configuration + +Create the configuration file: + +```javascript +// relay.config.js +module.exports = { + // ... + // Configuration options accepted by the `relay-compiler` command-line tool and `babel-plugin-relay`. + src: "./src", + language: "javascript", // "javascript" | "typescript" | "flow" + schema: "./data/schema.graphql", + excludes: ["**/node_modules/**", "**/__mocks__/**", "**/__generated__/**"], +} +``` + +This configuration also can be specified in `"relay"` section of the `package.json` file. +For more details, and configuration options see: [Relay Compiler Configuration](https://github.com/facebook/relay/tree/main/packages/relay-compiler) + + +## Set up babel-plugin-relay + +Relay requires a Babel plugin to convert GraphQL to runtime artifacts: + +```sh +yarn add --dev babel-plugin-relay graphql +``` + +Add `"relay"` to the list of plugins your `.babelrc` file: + +```javascript +{ + "plugins": [ + "relay" + ] +} +``` + +Please note that the `"relay"` plugin should run before other plugins or +presets to ensure the `graphql` template literals are correctly transformed. See +Babel's [documentation on this topic](https://babeljs.io/docs/plugins/#pluginpreset-ordering). + +Alternatively, instead of using `babel-plugin-relay`, you can use Relay with [babel-plugin-macros](https://github.com/kentcdodds/babel-plugin-macros). After installing `babel-plugin-macros` and adding it to your Babel config: + +```javascript +const graphql = require('babel-plugin-relay/macro'); +``` + +## Running the compiler + +After making edits to your application files, just run the `relay` script to generate new compiled artifacts: + +```sh +yarn run relay +``` + +Alternatively, you can pass the `--watch` option to watch for file changes in your source code and automatically re-generate the compiled artifacts (**Note:** Requires [watchman](https://facebook.github.io/watchman) to be installed): + +```sh +yarn run relay --watch +``` + +For more details, check out our [Relay Compiler docs](../../guides/compiler/). + +## JavaScript environment requirements + +The Relay packages distributed on NPM use the widely-supported ES5 +version of JavaScript to support as many browser environments as possible. + +However, Relay expects modern JavaScript global types (`Map`, `Set`, +`Promise`, `Object.assign`) to be defined. If you support older browsers and +devices which may not yet provide these natively, consider including a global +polyfill in your bundled application, such as [core-js][] or +[@babel/polyfill](https://babeljs.io/docs/usage/polyfill/). + +A polyfilled environment for Relay using [core-js][] to support older browsers +might look like: + +```javascript +require('core-js/es6/map'); +require('core-js/es6/set'); +require('core-js/es6/promise'); +require('core-js/es6/object'); + +require('./myRelayApplication'); +``` + +[core-js]: https://github.com/zloirock/core-js + + + diff --git a/website/versioned_docs/version-v15.0.0/getting-started/prerequisites.md b/website/versioned_docs/version-v15.0.0/getting-started/prerequisites.md new file mode 100644 index 0000000000000..2c8cdd98a88c6 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/getting-started/prerequisites.md @@ -0,0 +1,49 @@ +--- +id: prerequisites +title: Prerequisites +slug: /getting-started/prerequisites/ +description: Prerequisites for setting up Relay +keywords: +- prerequisites +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + + +Before getting started with Relay, bear in mind that we assume that the following infrastructure has already been set up, as well as some level of familiarity with the topics below. + +## JavaScript + +Relay is a framework built in JavaScript, so we assume familiarity with the JavaScript language. + +## React + +Relay is a framework for data management with the primary supported binding for React applications, so we assume that you are already familiar with [React](https://reactjs.org/). + +## GraphQL + +We also assume basic understanding of [GraphQL](http://graphql.org/learn/). In order to start using Relay, you will also need: + +### A GraphQL Schema + +A description of your data model with an associated set of resolve methods that know how to fetch any data your application could ever need. + +GraphQL is designed to support a wide range of data access patterns. In order to understand the structure of an application's data, Relay requires that you follow certain conventions when defining your schema. These are documented in the [GraphQL Server Specification](../../guides/graphql-server-specification). + +- **[graphql-js](https://github.com/graphql/graphql-js)** on [npm](https://www.npmjs.com/package/graphql) + + General-purpose tools for building a GraphQL schema using JavaScript + +- **[graphql-relay-js](https://github.com/graphql/graphql-relay-js)** on [npm](https://www.npmjs.com/package/graphql-relay) + + JavaScript helpers for defining connections between data, and mutations, in a way that smoothly integrates with Relay. + +### A GraphQL Server + +Any server can be taught to load a schema and speak GraphQL. Our [examples](https://github.com/relayjs/relay-examples) use Express. + +- **[express-graphql](https://github.com/graphql/express-graphql)** on [npm](https://www.npmjs.com/package/express-graphql) + + + diff --git a/website/versioned_docs/version-v15.0.0/getting-started/step-by-step-guide.md b/website/versioned_docs/version-v15.0.0/getting-started/step-by-step-guide.md new file mode 100644 index 0000000000000..33b519abb5284 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/getting-started/step-by-step-guide.md @@ -0,0 +1,314 @@ +--- +id: step-by-step-guide +title: Server Setup Example +slug: /getting-started/step-by-step-guide/ +description: Step-by-step guide for setting up Relay +keywords: +- setup +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + + +Relay is a framework for managing and declaratively fetching GraphQL data. It allows developers to declare *what* data each component needs via GraphQL, and then aggregate these dependencies and efficiently fetch the data in fewer round trips. In this guide we'll introduce the key concepts for using Relay in a React app one at a time. + +## Step 1: Create React App + +For this example we'll start with a standard install of [Create React App](https://create-react-app.dev). Create React App makes it easy to get a fully configured React app up and running and also supports configuring Relay. To get started, create a new app with: + +```bash +# NPM +npx create-react-app your-app-name + +# Yarn +yarn create react-app your-app-name +``` + +At this point we should be able to change to the app's directory and run it: + +```bash +# NPM +cd your-app-name +npm start + +# Yarn +cd your-app-name +yarn start +``` + +For troubleshooting and more setup options, see the [Create React App documentation](https://create-react-app.dev/docs/getting-started). + +## Step 2: Fetch GraphQL (without Relay) + +If you're exploring using GraphQL with Relay, we highly recommend starting with a basic approach and using as few libraries as possible. GraphQL servers can generally be accessed using plain-old HTTP requests, so we can use the [`fetch` API](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) to request some data from our server. For this guide we'll use GitHub's GraphQL API as the server, but if you already have a GraphQL server feel free to use that instead. + +### 2.1: GitHub GraphQL Authentication + +To start we'll need an authentication token for the GitHub API (if you're using your own GraphQL endpoint, you can skip this step): + +* Open [github.com/settings/tokens](https://github.com/settings/tokens). +* Ensure that at least the `repo` scope is selected. +* Generate a token +* Create a file `./your-app-name/.env.local` and add the following contents, replacing `` with your authentication token: + +``` +# your-app-name/.env.local +REACT_APP_GITHUB_AUTH_TOKEN= +``` + +### 2.2: A fetchGraphQL Helper + +Next let's update the home screen of our app to show the name of the Relay repository. We'll start with a common approach to fetching data in React, calling our fetch function after the component mounts (note: later we'll see some limitations of this approach and a better alternative that works with React Concurrent Mode and Suspense). First we'll create a helper function to load data from the server. Again, this example will use the GitHub API, but feel free to replace it with the appropriate URL and authentication mechanism for your own GraphQL server: + +```javascript +// your-app-name/src/fetchGraphQL.js +async function fetchGraphQL(text, variables) { + const REACT_APP_GITHUB_AUTH_TOKEN = process.env.REACT_APP_GITHUB_AUTH_TOKEN; + + // Fetch data from GitHub's GraphQL API: + const response = await fetch('https://api.github.com/graphql', { + method: 'POST', + headers: { + Authorization: `bearer ${REACT_APP_GITHUB_AUTH_TOKEN}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + query: text, + variables, + }), + }); + + // Get the response as JSON + return await response.json(); +} + +export default fetchGraphQL; +``` + +### 2.3: Fetching GraphQL From React + +Now we can use our `fetchGraphQL` function to fetch some data in our app. Open `src/App.js` and edit it as follows: + +```javascript +// your-app-name/src/App.js +import React from 'react'; +import './App.css'; +import fetchGraphQL from './fetchGraphQL'; + +const { useState, useEffect } = React; + +function App() { + // We'll load the name of a repository, initially setting it to null + const [name, setName] = useState(null); + + // When the component mounts we'll fetch a repository name + useEffect(() => { + let isMounted = true; + fetchGraphQL(` + query RepositoryNameQuery { + # feel free to change owner/name here + repository(owner: "facebook" name: "relay") { + name + } + } + `).then(response => { + // Avoid updating state if the component unmounted before the fetch completes + if (!isMounted) { + return; + } + const data = response.data; + setName(data.repository.name); + }).catch(error => { + console.error(error); + }); + + return () => { + isMounted = false; + }; + }, []); + + // Render "Loading" until the query completes + return ( +
+
+

+ {name != null ? `Repository: ${name}` : "Loading"} +

+
+
+ ); +} + +export default App; +``` + +## Step 3: When To Use Relay + +At this point we can fetch data with GraphQL and render it with React. This is a reasonable solution that can be sufficient for many apps. However, this approach doesn't necessarily scale. As our app grows in size and complexity, or the number of people working on the app grows, a simple approach like this can become limiting. Relay provides a number of features designed to help keep applications fast and reliable even as they grow in size and complexity: collocating data dependencies in components with GraphQL fragments, data consistency, mutations, etc. Check out [Thinking in GraphQL](../../principles-and-architecture/thinking-in-graphql/) and [Thinking in Relay](../../principles-and-architecture/thinking-in-relay/) for an overview of how Relay makes it easier to work with GraphQL. + + +## Step 4: Adding Relay To Our Project + +In this guide we'll demonstrate installing the *experimental* release of Relay Hooks, a new, hooks-based Relay API that supports React Concurrent Mode and Suspense. + +First we'll add the necessary packages. Note that Relay is comprised of three key pieces: a compiler (which is used at build time), a core runtime (that is React agnostic), and a React integration layer. + +```bash +# NPM Users +npm install --save relay-runtime react-relay +npm install --save-dev relay-compiler babel-plugin-relay + +# Yarn Users +yarn add relay-runtime react-relay +yarn add --dev relay-compiler babel-plugin-relay +``` + +### 4.1 Configure Relay Compiler + +Next let's configure Relay compiler. We'll need a copy of the schema as a `.graphql` file. If you're using the GitHub GraphQL API, you can download a copy directly from the Relay example app: + +```bash +cd your-app-name +curl https://raw.githubusercontent.com/relayjs/relay-examples/main/issue-tracker/schema/schema.graphql > schema.graphql +``` +*Note:* On Windows, the `.graphql` file has to be explicitly saved with UTF-8 encoding, not the default UTF-16. See this [issue](https://github.com/prisma-labs/get-graphql-schema/issues/30) for more details. + +If you're using your own API we suggest using the [`get-graphql-schema`](https://www.npmjs.com/package/get-graphql-schema) utility to download your schema into a `.graphql` file. + +Now that we have a schema we can modify `package.json` to run the compiler first whenever we build or start our app: + +```json +// your-app-name/package.json +{ + ... + "scripts": { + ... + "start": "yarn run relay && react-scripts start", + "build": "yarn run relay && react-scripts build", + "relay": "yarn run relay-compiler" + ... + }, + "relay": { + "src": "./src/", + "schema": "./schema.graphql", + "language": "javascript" + } + ... +} +``` + +At this point, you should be able to run the following successfully: + +```bash +cd your-app-name +yarn start +``` + +If it works, your app will open at [localhost:3000](http://localhost:3000). Now when we write GraphQL in our app, Relay will detect it and generate code to represent our queries in `your-app-name/src/__generated__/`. We recommend checking in these generated files to source control. + +### 4.2 Configure Relay Runtime + +Now that the compiler is configured we can set up the runtime - we have to tell Relay how to connect to our GraphQL server. We'll reuse the `fetchGraphQL` utility we built above. Assuming you haven't modified it (or at least that it still takes `text` and `variables` as arguments), we can now define a Relay `Environment`. An `Environment` encapsulates how to talk to our server (a Relay `Network`) with a cache of data retrieved from that server. We'll create a new file, `src/RelayEnvironment.js` and add the following: + +```javascript +// your-app-name/src/RelayEnvironment.js +import {Environment, Network, RecordSource, Store} from 'relay-runtime'; +import fetchGraphQL from './fetchGraphQL'; + +// Relay passes a "params" object with the query name and text. So we define a helper function +// to call our fetchGraphQL utility with params.text. +async function fetchRelay(params, variables) { + console.log(`fetching query ${params.name} with ${JSON.stringify(variables)}`); + return fetchGraphQL(params.text, variables); +} + +// Export a singleton instance of Relay Environment configured with our network function: +export default new Environment({ + network: Network.create(fetchRelay), + store: new Store(new RecordSource()), +}); +``` + +## Step 5: Fetching a Query With Relay + +Now that Relay is installed and configured we can change `App.js` to use it instead. We'll prepare our data as the app starts, and wait for it to be ready in ``. Replace the contents of `src/App.js` with the following: + +```javascript +import React from 'react'; +import './App.css'; +import graphql from 'babel-plugin-relay/macro'; +import { + RelayEnvironmentProvider, + loadQuery, + usePreloadedQuery, +} from 'react-relay/hooks'; +import RelayEnvironment from './RelayEnvironment'; + +const { Suspense } = React; + +// Define a query +const RepositoryNameQuery = graphql` + query AppRepositoryNameQuery { + repository(owner: "facebook", name: "relay") { + name + } + } +`; + +// Immediately load the query as our app starts. For a real app, we'd move this +// into our routing configuration, preloading data as we transition to new routes. +const preloadedQuery = loadQuery(RelayEnvironment, RepositoryNameQuery, { + /* query variables */ +}); + +// Inner component that reads the preloaded query results via `usePreloadedQuery()`. +// This works as follows: +// - If the query has completed, it returns the results of the query. +// - If the query is still pending, it "suspends" (indicates to React that the +// component isn't ready to render yet). This will show the nearest +// fallback. +// - If the query failed, it throws the failure error. For simplicity we aren't +// handling the failure case here. +function App(props) { + const data = usePreloadedQuery(RepositoryNameQuery, props.preloadedQuery); + + return ( +
+
+

{data.repository.name}

+
+
+ ); +} + +// The above component needs to know how to access the Relay environment, and we +// need to specify a fallback in case it suspends: +// - tells child components how to talk to the current +// Relay Environment instance +// - specifies a fallback in case a child suspends. +function AppRoot(props) { + return ( + + + + + + ); +} + +export default AppRoot; +``` + +Note that you'll have to restart the app - `yarn start` - so that Relay compiler can see the new query and generate code for it. See the [Relay Compiler setup docs](../installation-and-setup/#set-up-relay-compiler) for how to run Relay Compiler in watch mode, to regenerate code as you modify queries. + +## Step 6: Explore! + +At this point we have an app configured to use Relay. We recommend checking out the following for information and ideas about where to go next: + +* The [Guided Tour](../../guided-tour/) describes how to implement many common use-cases. +* The [API Reference](../../api-reference/use-fragment/) has full details on the Relay Hooks APIs. +* The [Example App](https://github.com/relayjs/relay-examples/tree/main/issue-tracker) is a more sophisticated version of what we've started building here. It includes routing integration and uses React Concurrent Mode and Suspense for smooth transitions between pages. + + + diff --git a/website/versioned_docs/version-v15.0.0/glossary/glossary.md b/website/versioned_docs/version-v15.0.0/glossary/glossary.md new file mode 100644 index 0000000000000..07c3b7816cd26 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/glossary/glossary.md @@ -0,0 +1,967 @@ +--- +id: glossary +title: Glossary +slug: /glossary/ +description: Relay terms glossary +keywords: +- glossary +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {FbInternalOnly, OssOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +## 3D + +Data-Driven Dependencies. Facebook's way of including the code to render a particular component if and only if it will actually be rendered. Canonical use cases are + +* **Fields that are typically null**, and which are only rendered when not null. +* **Unions**. For example, the core news feed item has many different variants, each of which is a separate React component. Which one we render depends on the data (i.e. is "data-driven"). On a given feed, it is likely that most variants will not be rendered, and need not be downloaded. +* **Component can have different rendering strategies, depending on the data.** + + + +See the [@match](#match) directive, [@module](#module) directive and [the 3D guide](../guides/data-driven-dependencies). + + + + + +See the [@match](#match) directive and the [@module](#module) directive. + + + +## Abstract Type + +GraphQL unions and interfaces are abstract types. See [interface](#interface-graphql). + +## Abstract Type Refinement + +See [type refinement](#type-refinement). If type refinement is a way of conditionally including fields if a type implements a particular concrete type (such as `... on User { name }`), abstract type refinement refers to conditionally including fields if a type implements a particular abstract type (i.e. interface). So, `... on Actor { field }`. + +## @arguments + +A [directive](#directive) that modifies a [fragment spread](#fragment-spread) and is used to pass arguments (defined with [`@argumentDefinitions`](#argumentdefinitions)) to that fragment. + +```graphql +...Story_story @arguments(storyId: "1234") +``` + +## @argumentDefinitions + +A directive that modifies a fragment definition and defines the names of the local arguments that the fragment can take, as well as their type. + +```graphql +fragment Store_story on Story + @argumentDefinitions(storyId: {type: "ID!"}) { + # etc +} +``` + +If a variable is used in a fragment but not included in an `@argumentDefinitions` directive, Relay will require that the fragment is only spread in queries which declare these variables, or in fragments which ultimately are spread in such a query. + +Compare with [variables](#variables) and see the [relevant section](../guided-tour/rendering/variables) in the guided tour. + +## Artifact + +Files that are generated by the Relay compiler, typically ending in `.graphql.js`. + + + +See [a guide to Relay artifacts](https://www.internalfb.com/intern/wiki/Relay-team/generated-artifacts/). + + + +## AST + +Abstract Syntax Tree. In Relay, there are two types of ASTs, [normalization](#normalization-ast) and [reader](#reader-ast) ASTs. + +The default export of a `.graphql.js` file is an AST. + +The Relay compiler parses and transforms GraphQL literals, and generates Relay ASTs (see [artifact](#artifact)). Doing this work at compile time allows the Relay runtime to be faster. + +## Availability + +The concept of availability refers to whether there is enough non-stale, non-invalidated data in the store to fulfill a particular request immediately, or whether a request to server needs to be made in order to fulfill that request. + +## Babel Transform + +A build-time transformation of the Javascript codebase, which turns calls to + +```javascript +graphql`...` +``` + +into `require(NAME_OF_GENERATED_ARTIFACT)` calls. + +## Client Schema Extension + +[The GraphQL spec](https://spec.graphql.org/June2018/#sec-Schema-Extension) allow you to define new types, new fields on types, new directives, etc. locally. + +Relay supports adding types and fields in client schema extension files. Developers use this feature to add fields that contain purely local state that is associated with items on the graph. For example, an `is_selected` field on a `User`. + +## CacheConfig + +A value used to control how a query's response may be cached. Ultimately passed to `environment.execute`. + +## Check + +One of the core functions of the store. Given an operation, determines whether the store has all of the data necessary to render that operation. Calls `DataChecker.check`, which synchronously starts with the root node associated with the operation and walks the data in the store. + +In practice, exposed as a method on `environment`. + +In conjunction with the fetch policy, used by `loadQuery` (and other methods) to determine whether it is necessary to make a network request call to fulfill a query. + +## Commit + +After receiving a network response, the payload is committed, or written to the store. + +Commit is also the verb used to describe initiating a mutation and writing its data to the store. + +## Compiler + +The piece of code which scans your Javascript files for `graphql` tagged nodes and generates the appropriate files (`.graphql.js` files, `$Parameters.js` files, etc.) + +The generated output from the compiler is committed and checked into the repository. + +## Concrete Request + +An Abstract Syntax Tree representing a query, subscription or mutation. + +The default export of a `.graphql.js` file corresponding to a query, subscription or mutation. + +In addition, calls to `graphql`...`` are turned into concrete requests at build time via the Relay Babel transform. + +**See the important safety notes at Preloadable Concrete Request.** + +## Config + +A file or javascript object which controls, among other things, which files are scanned by the Relay [compiler](#compiler) for your project. + +## @connection + +A directive which declares that a field implements the [connection](#connection) spec. + +## Connection + + +A field implementing the connection spec. See here for more details on the spec, and the section of the guided tour on rendering list data and pagination. + + + +A field implementing the connection spec. See the section of the guided tour on rendering list data and pagination. + + +See also [`usePaginationFragment`](../api-reference/use-pagination-fragment). + +## Container + +A term for a higher order component that provided a child component with the data from queries and fragments. Associated with Relay Modern. + +You should use the Relay hooks API when possible. + +## Data Checker + +A class exposing a single method, `check`, which synchronously starts with the root node associated with the operation and walks the data in the store. It determines whether the data in the store suffices to fulfill a given operation. + +Called by `store.check`. + +## DataID + +The globally-unique identifier of a record. Can be generated on the client with [missing field handlers](#missing-field-handler). Usually corresponds to an Ent's ID (if available), but guaranteed to equal the value of the `__id` field. + +[`updater`](#updater) and [`optimisticUpdater`](#optimisticupdater) functions are passed instances of [`RelaySourceSelectorProxy`](#recordproxy). Calling `.get(id)` with the DataID on a `RelaySourceSelectorProxy` will look up that item in the store, and return a proxy of it. + +## Data Masking + +Refers to the idea that a component should not be able to access any data it does declare in its fragment or query, even inadvertently. This prevents accidental coupling of data between components, and means that every component can be refactored in isolation. It negates the risk that removing a field in a child component will accidentally break a different component, allowing components to *move fast, with stable infrastructure*. + +Also refers to the practice of hiding the data of child components from their parents, in keeping with the idea. + +In Relay, a query declared like `query FooQuery { viewer { ...subcomponent_``viewer_name } }` will not be able to access the data declared by `subcomponent_viewer_name` without access to the `ReaderFragment` representing the `subcomponent_viewer_name` fragment. + +See the [Thinking in Relay guide](../principles-and-architecture/thinking-in-relay#data-masking). + +## @defer + +A directive which can be added to a fragment spread or inline fragment to avoid blocking on that fragment's data. + + +See the [documentation](https://www.internalfb.com/intern/wiki/Relay/Web/incremental-data-delivery-defer-stream/#defer). + + +## Definition + +In the compiler, a definition refers to the text within a GraphQL literal where an operation or fragment was defined. + +## Descriptor + +Can refer to an `OperationDescriptor` or `RequestDescriptor`. Descriptors are types used internally to the Relay codebase, and generally, refer to an object containing the minimum amount of information needed to uniquely identify an operation or request, such as (for a `RequestIdentifier`), a node, identifier and variables. + +## DevTools + +An awesome Chrome extension for debugging Relay network requests, the Relay store and Relay events. Helpful for answering questions like "Why am I not seeing the data I expect to see?" "Why did this component suspend?" etc. + +See the [documentation](https://relay.dev/docs/debugging/relay-devtools/). + +## Document + +In the compiler, a Document refers to a GraphQL literal that contains one or more operation or fragment [definitions](#definition). Relay requires that GraphQL literals in JavaScript files contain a single definition. + +## Directive + +A special instruction, starting with `@` and contained in a `graphql` literal or graphql file, which provides special instructions to the relay compiler or to the server. Examples include `@defer`, `@stream` and `@match`. + +## Disposable + +Any object which contains a `.dispose` method which takes no parameters and provides no return value. Many objects in Relay (such query references and entrypoint references) and the return value of many methods (such as calls to `.subscribe` or `.retain`) are disposables. + +## Entrypoint + +A lightweight object containing information on the components which need to be loaded (as in the form of calls to `JSResource`) and which queries need to be loaded (in the form of preloadable concrete requests) before a particular route, popover or other piece of conditionally loaded UI can be rendered. + +All queries which are required for the initial rendering of a piece of UI should be included in that UI's entrypoint. + +Entrypoints can contain queries and other entrypoints. + +See also [preloadable concrete request](#preloadable-concrete-request) and [JSResource](#jsresource). + +## Environment + +An object bringing together many other Relay objects, most importantly a store and a network. Also, includes a publish queue, operation loader, scheduler and [missing fields handlers](#missing-field-handler). + +Set using a `RelayEnvironmentProvider` and passed down through React context. + +All non-internal Relay hooks require being called within a Relay environment context. + +## Execute + +Executing a query, mutation or subscription (collectively, an operation) roughly means "create a lazy observable that, when subscribed to, will make a network request fulfilling the operation and write the returned data to the store." + +A variety of `execute` methods are exposed on the Relay environment. + +## Fetch Policy + +A string that determines in what circumstances to make a network request in which circumstances to fulfill the query using data in the store, if available. Either `network-only`, `store-and-network`, `store-or-network` or `store-only`. (Some methods do not accept all fetch policies.) + +## Field + +Basically, anything you can select using a query, mutation, subscription or fragment. For example, `viewer`, `comment_create(input: $CommentCreateData)` and `name` are all fields. + +The GraphQL schema comprises many fields. + +## Fragment + +Fragment is an overloaded term, and has at least two distinct meanings in Relay. + +### Fragments as a GraphQL concept + +The fundamental reusable unit of GraphQL. Unlike queries, subscriptions and mutations, fragments cannot be queried on their own and must be embedded within a request. + +Fragments can be spread into queries, mutations, subscriptions and other fragments. + +Fragments can be standalone (as in `fragment Component_user on User { name }`) or inline, as in the `... on User { name }` in `query MyQuery { node(id: $id) { ... on User { name } } }`. + +Fragments are always defined on a particular type (`User` in the example), which defines what fields can be selected within it. + +### Fragments within Relay + +Within Relay, a fragment refers to the fields that are read out for a given fragment/operation. The term is also used colloquially to refer to reader ASTs. So, e.g. the following query and fragment might have identical reader ASTs: + +```graphql +query Foo { + actor { name } +} +``` + +``` +fragment Bar on Query { + actor { name } +} +``` + +## Fragment Identifier + +A string, providing enough information to provide the data for a particular fragment. For example: + +`1234{"scale":2}/Story_story/{"scale":2}/"4567"` + +This identifies by its persist ID (`1234`), followed by the variables it accepts, followed by the `Story_story` fragment (which does not have a persist id) and the variables it uses, followed by the Data ID (likely, the `id` field) of whatever Story happened to be referenced. + +## Fragment Reference + +A parameter passed to `useFragment`. Obtained by accessing the value onto which a fragment was spread in another [query](#query), fragment, subscription or mutation. For example, + +```javascript +const queryData = usePreloadedQuery( + graphql`query ComponentQuery { viewer { account_user { ...Component_name } } }`, + {}, +); + +// queryData.viewer is the FragmentReference +// Though this would usually happen in another file, you can +// extract the value of Component_name as follows: +const fragmentData = useFragment( + graphql`fragment Component_name on User { name }`, + queryData?.viewer?.account_user, +); +``` + +Just like a query reference and a graphql tagged literal describing a query (i.e. a concrete request) can be used to access the data from a query, a fragment reference and a graphql tagged literal describing a fragment (i.e. a reader fragment) can be used to access the data referenced from a fragment. + +## Fragment Resource + +An internal class supporting lazily loaded queries. Exposes two important methods: + +* `read`, which is meant to be called during a component's render phase. It will attempt to fulfill a query from the store (by calling `environment.lookup`) and suspend if the data is not available. It furthermore writes the results from the attempted read (whether a promise, error or result) to an internal cache, and updates that cached value when the promise resolves or rejects. +* `subscribe`, which is called during the commit phase, and establishes subscriptions to the relay store. + +If the component which calls `.read` successfully loads a query, but suspends on a subsequent hook before committing, the data from that query can be garbage collected before the component ultimately renders. Thus, components which rely on `FragmentResource` are at risk of rendering null data. + +Compare to [query resource](#query-resource). + +## Fragment Spec Resolver + +TODO + +## Fragment Spread + +A fragment spread is how one fragment is contained in a query, subscription, mutation or other fragment. In the following example, `...Component_name` is a fragment spread: + +```graphql +query ComponentQuery { + viewer { + account_user { + ...Component_name + } + } +} +``` + +In order for a fragment to be spread in a particular location, the types must match. For example, if `Component_name` was defined as follows: `fragment Component_name on User { name }`, this spread would be valid, as `viewer.account_user` has type `User`. + +## Garbage Collection + +Relay can periodically garbage collect data from queries which are no longer being retained. + +See more information in the [guided tour](https://relay.dev/docs/guided-tour/reusing-cached-data/presence-of-data/#garbage-collection-in-relay). + +## GraphQLTaggedNode + +This is the type of the call to + +```js +graphql`...` +``` + +It is the union of ReaderFragment, ReaderInlineDataFragment, ConcreteRequest, and ConcreteUpdatableQuery. + + + +Note that Flow can be configured to understand that the type of a GraphQL literal is the type of the default export of the generated `.graphql.js` file. + + + + + +Note that Flow is configured to understand that the type of a GraphQL literal is the type of the default export of the generated `.graphql.js` file. + + + +## Handler + +TODO + +## ID + +Relay treats ids specially. In particular, it does the following two things: + +* The compiler automatically adds a selection of the `id` field on every type where the `id` field has type `ID` or `ID!`. +* When [normalizing](#normalization) data, if an object has an `id` property, that field is used as its ID in the store. + +There are types in the schema where the `id` field does not have type `ID` or `ID!` (e.g. has the type `string` or `number`). If a user selects this field themselves, this field is used as an id. This is unexpected and incorrect behavior. + +## @include + +A directive that is added to fields, inline fragments and fragment spreads, and allows for conditional inclusion. It is the opposite of the [`@skip`](#skip) directive. + +In the compiler, the `@include`/`@skip` directives are treated specially, and produce `Condition` nodes. + +## @inline + +A directive that applies to fragments which enables developers to pass masked data to functions that are executed outside of the React render phase. + +Normally, data is read out using `useFragment`. However, this function can only be called during the render phase. If store data is needed in a outside of the render phase, a developer has several options: + +* read that data during the render phase, and pass it to the function/have the function close over that data. (See also [#relay]) +* pass a reference to an `@inline` fragment, which can then be accessed (outside of the render phase) using the `readInlineData` directive. + +This directive causes them to be read out when the parent fragment is read out, and unmasked by the call to `readInlineData`. + +## Interface (GraphQL) + +An *Interface* is an abstract type that includes a certain set of fields that a type must include to implement the interface. + +You can spread an fragment on an interface onto a concrete type (for example `query MyQuery { viewer { account_user { ...on Actor { can_viewer_message } } }`) or a fragment on a concrete type onto an interface (for example `query MyQuery { node(id: 4) { ... on User { name } } }`). You are no longer allowed to spread a fragment on an interface onto an interface. + +See also abstract type refinement. + +## Invalidation + +In certain cases, it is easy to determine the outcome of a mutation. For example, if you "like" a Feedback, the like count will increment and `viewer_did_like` will be set to true. However, in other cases, such as when you are blocking another user, the full impact on the data in your store is hard to determine. + +For situations like these, Relay allows you to invalidate a record (or the whole store), which will cause the data to be re-fetched the next time it is rendered. + +See the [section in the guide](https://relay.dev/docs/guided-tour/reusing-cached-data/staleness-of-data/). + +## JSResource + +A lightweight API for specifying a that a React component should be loaded on demand, instead of being bundled with the first require (as would be the case if you imported or required it directly.) + +This API is safe to use in entrypoint files. + + +See [the npm module](https://www.npmjs.com/package/jsresource). + + +## Lazy Loading + +A query or entry point is lazy loaded if the request for the data occurs at render time. + +Lazy loaded queries and entry points have performance downsides, are vulnerable to being over- and under-fetched, and can result in components being rendered with null data. They should be avoided. + +## Linked Record + +A linked record is a record that is directly accessible from another record. For example, in the query `query MyQuery { viewer { account_user { active_instant_game { id } } } }`, `active_instant_game` (which has the type `Application` is a linked record of `account_user`. + +A linked record cannot be queried by itself, but must be queried by selecting subfields on it. + +Compare to [value](#value). + +## Literal + +A GraphQL literal is a call to + +```javascript +graphql`...` +``` + +in your code. These are pre-processed, and replaced at build time with a [GraphlQLTaggedNode](#graphqltaggednode) containing an [AST](#ast) representation of the contents of the literal. + +## Lookup + +One of the main methods exposed by the Relay store. Using a [reader selector](#reader-selector), traverses the data in the store and returns a [snapshot](#snapshot), which contains the data being read, as well as information about whether data is missing and other pieces of information. Also exposed via the Relay environment. + +Calls [`Reader.read`](#reader). + +## @match + +A directive that, when used in combination with [@module](#module), allows users to download specific JS components alongside the rest of the GraphQL payload if the field decorated with @match has a certain type. See [3D](#3d). + +## MatchContainer + +A component that renders the component returned in conjunction with a field decorated with the [@match](#match) directive. See [3D](#3d). + +## Missing Field Handler + +A function that provides a [DataID](#dataid) for a field (for singular and plural linked fields) and default values (for scalar fields). + +For example, you may have already fetched an item with id: 4, and are executing a query which selects `node(id: 4)`. Without a missing field handler, Relay would not know that the item with id: 4 will be returned by `node(id: 4)`, and would thus attempt to fetch this data over the network. Providing a missing field handler can inform Relay that the results of this selection are present at id: 4, thus allowing Relay to avoid a network request. + +`getRelayFBMissingFieldHandlers.js` provides this and other missing field handlers. + +## @module + +A directive that, when used in combination with [@match](#match), allows users to specify which JS components to download if the field decorated with @match has a certain type. See [3D](#3d). + +## Module + +TODO + +## Mutation + +A mutation is a combination of two things: a mutation on the backend, followed by query against updated data. + + +See the [guide on mutations](../guided-tour/updating-data/graphql-mutations), and [this article](https://www.internalfb.com/intern/wiki/Graphql-for-hack-developers/mutation-root-fields/) on defining mutations in your hack code. + + + +See the [guide on mutations](../guided-tour/updating-data/graphql-mutations). + + +## Mutation Root Query + +The root object of a mutation query. In an `updater` or `optimisticUpdater`, calling `store.getRootField('field_name')` will return the object from the mutation root query named `field_name`. + +The fields exposed on this object are **not** the same as those available for queries, and differ across mutations. + +## Network + +Relay environments contain a `network` object, which exposes a single `execute` function. All network requests initiated by Relay will go through this piece of code. + +This provides a convenient place to handle cross-cutting concerns, like authentication and authorization. + +## Node + +TODO + +## Normalization + +Normalization is the process of turning nested data (such as the server response) and turning it into flat data (which is how Relay stores it in the store.) + +See the [response normalizer](#response-normalizer). + +## Normalization AST + +An [AST](#ast) that is associated with an [operation](#operation) that (in combination with [variables](#variables)) can be used to: +* write a network payload to the store, +* write an optimistic response to the store, +* determine whether a query can be fulfilled from data in the store, and +* determine which records in the store are reachable (used in [garbage collection](#garbage-collection)). + +Unlike the [reader AST](#reader-ast), the normalization AST includes information on the contents of nested fragments. + +The generated artifact associated with an operation (e.g. `FooQuery.graphql.js`) contains both a normalization AST and a reader AST. + +## Normalization Selector + +A selector defines the starting point for a traversal into the graph for the purposes of targeting a subgraph, combining a GraphQL fragment, variables, and the Data ID for the root object from which traversal should progress. + +## Notify + +A method exposed by the store which will notify each [subscriber](#subscribe) whose data has been modified. Causes components which are rendering data that has been modified to re-render with new data. + +## Observable + +The fundamental abstraction in Relay for representing data that may currently be present, but may also only be available in the future. + +Observables differ from promises in that if the data in an observable has already been loaded, you can access it synchronously as follows: + +```javascript +const completedObservable = Observable.from("Relay is awesome!"); +let valueFromObservable; +observable.subscribe({ + next: (value) => { + valueFromObservable = value; + /* this will execute in the same tick */ + }, +}); +console.log(valueFromObservable); // logs out "Relay is awesome!" +``` + +This is advantageous, as it allows Relay hooks to not suspend if data is already present in the store. + +In Relay, observables are a partial implementation of [RxJS Observables](https://rxjs-dev.firebaseapp.com/guide/observable). + +## Operation + +In [GraphQL](https://spec.graphql.org/June2018/#sec-Language.Operations), a query, subscription or mutation. + +In Relay, every operation also has an associated [fragment](#fragments-within-relay). So, an accurate mental model is that operations are fragments whose [type condition](#type-condition) is that they are on [Query/Mutation/Subscription](#root-type) and for which Relay knows how to make a network request. + +## Operation Descriptor + +Colloquially, an operation descriptor is an operation and variables. + +The operation descriptor flowtype contains the three pieces of information that Relay needs to work with the data: [a reader selector](#reader-selector), a [normalization selector](#normalization-selector) and a [request descriptor](#request-descriptor). + +The variables are filtered to exclude unneeded variables and are populated to include default values for missing variables, thus ensuring that requests that differ in irrelevant ways are cached using the same request ID. + +## Operation Mock Resolver + +A function taking an operation descriptor and returning a network response or error, used when testing. + +## Operation Tracker + +TODO + +## Optimistic Update + +TODO + +## Optimistic Updater + +TODO + +## Pagination + +Querying a list of data (a [connection](#connection)) in parts is known as pagination. + +See the [graphql docs](https://graphql.org/learn/pagination/) and our [guided tour](../guided-tour/list-data/pagination). + +## Payload + +The value returned from the GraphQL server as part of the response to a request. + +## Plural Field + +A field for which the value is an array of [values](#value) or [records](#record). + +## @preloadable + +A directive that modifies queries and which causes relay to generate `$Parameters.js` files and preloadable concrete requests. Required if the query is going to be used as part of an entry point. + +## Preloadable Concrete Request + +A small, lightweight object that provides enough information to initiate the query and fetch the full query AST (the `ConcreteRequest`.) This object will only be generated if the query is annotated with `@preloadable`, and is the default export of `$parameters.js` files. It is only generated for queries which are annotated with `@preloadable`. + +Unlike concrete requests (the default export of `.graphql.js` files), preloadable concrete requests are extremely light weight. + +Note that entrypoints accept either preloadable concrete requests or concrete requests in the `.queries[queryName].parameters` position. However, ***because a concrete request is not a lightweight object, you should only include preloadable concrete requests here.*** + +Note also that preloadable queries have `id` fields, whereas other queries do not. + +## Preloadable Query Registry + +A central registry which will execute callbacks when a particular Query AST (concrete request) is loaded. + +Required because of current limitations on dynamically loading components in React Native. + +## Project + +For Relay to process a file with a GraphQL literal, it must be included in a project. A project specifies the folders to which it applies and the schema against which to evaluate GraphQL literals, and includes other information needed by the Relay compiler. + + +Projects are defined in a single [config](#config) file, found [here](https://www.internalfb.com/intern/diffusion/WWW/browse/master/scripts/relay/compiler-rs/config.www.json) and [here](https://www.internalfb.com/intern/diffusion/FBS/browse/master/xplat/relay/compiler-rs/config.xplat.json). + + +## Profiler + +TODO + +## Publish + +One of the main methods exposed by the `store`. Accepts a [record source](#record-source), from which the records in the store are updated. Also updates the mapping of which records in the store have been updated as a result of publishing. + +One or more calls to `publish` should be followed by a call to [`notify`](#notify). + +## Publish Queue + +A class used internally by the environment to keep track of, apply and revert pending (optimistic) updates; commit client updates; and commit server responses. + +Exposes mutator methods like `commitUpdate` that only add or remove updates from the queue, as well as a `run` method that actually performs these updates and calls `store.publish` and `store.notify`. + +## Query + +A [GraphQL query](https://graphql.org/learn/queries/) is a request that can be sent to a GraphQL server in combination with a set of [variables](../guided-tour/rendering/variables), in order to fetch some data. It consists of a [selection](#selection) of fields, and potentially includes other [fragments](#fragment). + +## Query Executor + +A class that normalizes and publishes optimistic responses and network responses from a network observable to the store. + +After each response is published to the store, `store.notify` is called, updating all components that need to re-render. + +Used by `environment` in methods such as `execute`, `executeWithSource` and `executeMutation`, among others. + +## Query Reference + +TODO + +## Query Resource + +A class for helping with lazily loaded queries and exposing two important methods: `prepare` and `retain`. + +* `prepare` is called during a component's render method, and will either read an existing cached value for the query, or fetch the query and suspend. It also stores the results of the attempted read (whether the data, a promise for the data or an error) in a local cache. +* `retain` is called after the component has successfully rendered. + +If the component which calls `.prepare` successfully loads a query, but suspends on a subsequent hook before committing, the data from that query can be garbage collected before the component ultimately renders. Thus, components which rely on `QueryResource` are at risk of rendering null data. + +Compare to [fragment resource](#fragment-resource). + +## `@raw_response_type` + +A directive added to queries which tells Relay to generate types that cover the `optimisticResponse` parameter to `commitMutation`. + + +See the [documentation](../guided-tour/updating-data/local-data-updates) for more. + +## Reader + +TODO this section + +## Reader AST + +An [AST](#AST) that is used to read the data selected in a given fragment. + +Both [operations](#operation) and [fragments](#fragment) have reader ASTs. + +A reader AST contains information about which fragments are spread at a given location, but unlike a [normalization AST](#normalization-ast), does not include information about the fields selected within these fragments. + +## Reader Fragment + +TODO + +See [GraphlQLTaggedNode](#graphqltaggednode). + +## Reader Selector + +An object containing enough information for the store to traverse its data and construct an object represented by a query or fragment. Intuitively, this "selects" a portion of the object graph. + +See also [lookup](#lookup). + +## Record + +A record refers to any item in the Relay [store](#store) that is stored by [ID](#id). [Values](#value) are not records; most everything else is. + +## Record Source + +An abstract interface for storing [records](#record), keyed by [DataID](#dataid), used both for representing the store's cache for updates to it. + +## Record Source Selector Proxy + +See [record proxy](#record-proxy). + +## Record Proxy + +See the [store documentation](../api-reference/store). + +## Ref Counting + +The pattern of keeping track of how many other objects can access a particular object, and cleaning it up or disposing of it when that number reaches zero. This pattern is implemented throughout the Relay codebase. + +## Reference Marker + +TODO + +## @refetchable + +A directive that modifies a fragment, and causes Relay to generate a query for that fragment. + +This yields efficiency gains. The fragment can be loaded as part of a single, larger query initially (thus requiring only a single request to fetch all of the data), and yet refetched independently. + +## @relay + +A directive that allows you to turn off data masking and is used on plural types. + +See [the documentation](../api-reference/graphql-and-directives/#relaymask-boolean). + +## Relay Classic + +An even older version of Relay. + +## Relay Hooks + +The easiest-to-use, safest Relay API. It relies on suspense, and is safe to use in React concurrent mode. + +You should not write new code using Relay Classic or Relay Modern. + +## Relay Modern + +An older version of Relay. This version of Relay had an API that was heavily focused on Containers. + +## Relay Resolvers + +Relay Resolvers is an experimental Relay feature which enables modeling derived state as client-only fields in Relay’s GraphQL graph. + +See also [the Relay Resolvers guide](../guides/relay-resolvers). + +## Release Buffer + +As queries are released (no longer [retained](#retain)), their root nodes are stored in a release buffer of fixed size, and only evicted by newly released queries when there isn't enough space in the release buffer. When Relay runs garbage collection, queries that are present in the release buffer and not disposed. + +The size of the release buffer is configured with the `gcReleaseBufferSize` parameter. + +## `@required` + +A Relay directive that makes handling potentially `null` values more egonomic. + +See also [the `@required` guide](../guides/required-directive/). + +## Request + +A request refers to an API call made over the network to access or mutate some data, or both. + +A query, when initiated, may or may not involve making a request, depending on whether the query can be fulfilled from the store or not. + +## Request Descriptor + +An object associating a [concrete request](#concrete-request) and [variables](#variables), as well as a pre-computed request ID. The variables should be filtered to exclude unneeded variables and are populated to include default values for missing variables, thus ensuring that requests that differ in irrelevant ways are cached using the same request ID. + +## Resolver + +An overloaded term, mostly referring to virtual fields, but also occassionally referring to other things. + +### When describing a field + +A resolver field is a "virtual" field that is backed by a function from a fragment reference on the same type to some arbitrary value. + +A live resolver is a "virtual" field that is backed by an external data source. e.g. one might use an external resolver to expose some state that is stored in local storage, or in an external Flux store. + +### Other meanings + +It can also be a [fragment spec resolver](#fragment-spec-resolver) or a [operation mock resolver](#operation-mock-resolver). + +## Response + +TODO + +## Response Normalizer + +A class, exposing a single method `normalize`. This will traverse the denormalized response from the API request, normalize it and write the normalized results into a given `MutableRecordSource`. It is called from the query executor. + +## Restore + +TODO + +## Retain + +TODO + +## Render Policy + +TODO + +## Revert + +TODO + +## Root Field + +TODO + +## Root Type + +The [GraphQL spec](https://spec.graphql.org/June2018/#sec-Root-Operation-Types) defines three special root types: Query, Mutation and Subscription. Queries must select fields off of the Query root type, etc. + +## Root + +Outermost React Component for a given page or screen. Can be associated with an entrypoint. + +Roots for entrypoints are referred to by the [`JSResource`](#JSResource) to the root React component module. + +## Scalar + +TODO + +## Scheduler + +TODO + +## Schema + +A collection of all of the GraphQL types that are known to Relay, for a given [project](#project). + + +## Schema Sync + +The GraphQL [schema](#schema) is derived from annotations on Hack classes in the www repository. + +Periodically, those changes are synced to fbsource in a schema sync diff. If the updated schema would break relay on fbsource, these schema sync diffs will not land. + +If a field is removed from www, but is only used in fbsource, the application developer may not notice that the field cannot be removed. This is a common source of schema breakages. + +For more info, look [here](https://www.internalfb.com/intern/wiki/GraphQL/Build_Infra/Schema_Sync/) and [here](https://www.internalfb.com/intern/wiki/Relay-team/GraphQL_Schema_Sync/). + + +## Schema Extension + +TODO + +## Selection + +A "selection of fields" refers to the fields you are requesting on an object that you are accessing, as part of a query, mutation, subscription or fragment. + +## Selector + +See [normalization selector](#normalization-selector). + +## @skip + +A directive that is added to fields, inline fragments and fragment spreads, and allows for conditional inclusion. It is the opposite of the [`@include`](#include) directive. + +## Snapshot + +The results of running a reader selector against the data currently in the store. See [lookup](#lookup). + +## Stale + +TODO + +## Store + +TODO + +## @stream + +TODO + +## @stream_connection + +TODO + +## Subscribe + +A method exposed by the Relay store. Accepts a callback and a snapshot (see [lookup](#lookup)). The relay store will call this callback when [`notify`](#notify) is called, if the data referenced by that snapshot has been updated or invalidated. + +## Subscription + +[GraphQL Subscriptions](../guided-tour/updating-data/graphql-subscriptions) are a mechanism which allow clients to subscribe to changes in a piece of data from the server, and get notified whenever that data changes. + +A GraphQL Subscription looks very similar to a query, with the exception that it uses the subscription keyword: + +```graphql +subscription FeedbackLikeSubscription($input: FeedbackLikeSubscribeData!) { + feedback_like_subscribe(data: $input) { + feedback { + id + like_count + } + } +} +``` + + + +See also [the guide](../guides/writing-subscriptions). + + + +## Transaction ID + +A unique id for a given instance of a call to `network.execute`. This ID will be consistent for the entire duration of a network request. It can be consumed by custom log functions passed to `RelayModernEnvironment`. + +## Traversal + +There are four tree traversals that are core to understanding the internal behavior of Relay. + +* Using the normalization AST: + * When Relay normalizes the payload it receives from the GraphQL server in the Response Normalizer; + * When Relay reads determines whether there is enough data for to fulfill an operation, in the Data Checker; and + * When Relay determines what data is no longer accessible during garbage collection, in the Reference Marker. +* Using the reader AST: + * When Relay reads data for rendering, in the Reader. + +## Type + +The GraphQL type of a field is a description of a field on a schema, in terms of what subfields it has, or what it's representation is (String, number, etc.). + +See also [interface](#interface-graphql), [abstract type](#abstract-type) and [the GraphQL docs](https://graphql.org/learn/schema/#type-language) for more info. + +## Type Refinement + +The inclusion of a fragment of particular type in a location only known to potentially implement that type. This allows us to select fields if and only if they are defined on that particular object, and return null otherwise. + +For example, `node(id: 4) { ... on User { name } }`. In this case, we do now know ahead of time whether `node(id: 4)` is a User. If it is, this fragment will include the user name. + +See also [abstract type refinement](#abstract-type-refinement). + +## Updater + +A callback passed to `commitMutation`, which provides the application developer with imperative control over the data in the store. + + +See [the documentation](../guided-tour/updating-data/) and also optimistic updater. + +## Value + +A single value on a record, such as `has_viewer_liked`, or `name`. + +Compare with [linked record](#linked-record). + +## Variables + +GraphQL variables are a construct that allows referencing dynamic values inside a GraphQL query. They must be provided when the query is initiated, and can be used throughout nested fragments. + +See the [variables section of the guided tour](../guided-tour/rendering/variables) and compare with [@argumentDefinitions](#argumentdefinitions). + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/introduction.md b/website/versioned_docs/version-v15.0.0/guided-tour/introduction.md new file mode 100644 index 0000000000000..a99a99075d522 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/introduction.md @@ -0,0 +1,57 @@ +--- +id: introduction +title: Introduction +slug: /guided-tour/ +description: Relay guided tour +keywords: +- guided tour +- relay +- graphql +- documentation +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +import FbCrashCourse from './fb/FbCrashCourse.md'; + +In this guided tour, we're going to go over how to use Relay to build out some of the more common use cases in apps. If you're interested in a detailed reference of our APIs, check out our **[API Reference](../api-reference/relay-environment-provider/)**. + + +## Before you read + +Before getting started, bear in mind that we assume some level of familiarity with: + + + +* [Javascript](https://our.internmc.facebook.com/intern/wiki/JavaScript/) +* [React](https://our.internmc.facebook.com/intern/wiki/ReactGuide/) +* [GraphQL](https://our.internmc.facebook.com/intern/wiki/GraphQL/) and our internal [GraphQL Server](https://our.internmc.facebook.com/intern/wiki/Graphql-for-hack-developers/) + + + + + +* [Javascript](https://felix-kling.de/jsbasics/) +* [React](https://reactjs.org/docs/getting-started.html) +* [GraphQL](https://graphql.org/learn/) + + + +## On to the Tutorial + + + +* [Tutorial](https://www.internalfb.com/intern/staticdocs/relay/docs/tutorial/intro/) + + + + + +* [Tutorial](https://relay.dev/docs/tutorial/intro/) + + + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/list-data/advanced-pagination.md b/website/versioned_docs/version-v15.0.0/guided-tour/list-data/advanced-pagination.md new file mode 100644 index 0000000000000..84e7594eb00cb --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/list-data/advanced-pagination.md @@ -0,0 +1,200 @@ +--- +id: advanced-pagination +title: Advanced Pagination +slug: /guided-tour/list-data/advanced-pagination/ +description: Relay guide for advanced pagination +keywords: +- pagination +- usePaginationFragment +- prefetching +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +In this section we're going to cover how to implement more advanced pagination use cases than the default cases covered by `usePaginationFragment`. + + +## Pagination Over Multiple Connections + +If you need to paginate over multiple connections within the same component, you can use `usePaginationFragment` multiple times: + +```js +import type {CombinedFriendsListComponent_user$key} from 'CombinedFriendsListComponent_user.graphql'; +import type {CombinedFriendsListComponent_viewer$key} from 'CombinedFriendsListComponent_viewer.graphql'; + +const React = require('React'); + +const {graphql, usePaginationFragment} = require('react-relay'); + +type Props = { + user: CombinedFriendsListComponent_user$key, + viewer: CombinedFriendsListComponent_viewer$key, +}; + +function CombinedFriendsListComponent(props: Props) { + + const {data: userData, ...userPagination} = usePaginationFragment( + graphql` + fragment CombinedFriendsListComponent_user on User { + name + friends + @connection( + key: "CombinedFriendsListComponent_user_friends_connection" + ) { + edges { + node { + name + age + } + } + } + } + `, + props.user, + ); + + const {data: viewerData, ...viewerPagination} = usePaginationFragment( + graphql` + fragment CombinedFriendsListComponent_user on Viewer { + actor { + ... on User { + name + friends + @connection( + key: "CombinedFriendsListComponent_viewer_friends_connection" + ) { + edges { + node { + name + age + } + } + } + } + } + } + `, + props.viewer, + ); + + return (...); +} +``` + +However, we recommend trying to keep a single connection per component, to keep the components easier to follow. + + + +## Bi-directional Pagination + +In the [Pagination](../pagination/) section we covered how to use `usePaginationFragment` to paginate in a single *"forward"* direction. However, connections also allow paginating in the opposite *"backward"* direction. The meaning of *"forward"* and *"backward"* directions will depend on how the items in the connection are sorted, for example *"forward"* could mean more recent*, and "backward"* could mean less recent. + +Regardless of the semantic meaning of the direction, Relay also provides the same APIs to paginate in the opposite direction, using `usePaginationFragment`, as long as the `before` and `last` connection arguments are also used along with `after` and `first`: + +```js +import type {FriendsListComponent_user$key} from 'FriendsListComponent_user.graphql'; + +const React = require('React'); +const {Suspense} = require('React'); + +const {graphql, usePaginationFragment} = require('react-relay'); + +type Props = { + userRef: FriendsListComponent_user$key, +}; + +function FriendsListComponent(props: Props) { + const { + data, + loadPrevious, + hasPrevious, + // ... forward pagination values + } = usePaginationFragment( + graphql` + fragment FriendsListComponent_user on User { + name + friends(after: $after, before: $before, first: $first, last: $last) + @connection(key: "FriendsListComponent_user_friends_connection") { + edges { + node { + name + age + } + } + } + } + `, + userRef, + ); + + return ( + <> +

Friends of {data.name}:

+ edge.node)}> + {node => { + return ( +
+ {node.name} - {node.age} +
+ ); + }} +
+ + {hasPrevious ? ( + + ) : null} + + {/* Forward pagination controls can go simultaneously here */} + + ); +} +``` + +* The APIs for both *"forward"* and *"backward"* are exactly the same, they're only named differently. When paginating forward, then the `after` and `first` connection arguments will be used, when paginating backward, the `before` and `last` connection arguments will be used. +* Note that the primitives for both *"forward"* and *"backward"* pagination are exposed from a single call to `usePaginationFragment`, so both *"forward"* and *"backward"* pagination can be performed simultaneously in the same component. + + + +## Custom Connection State + +By default, when using `usePaginationFragment` and `@connection`, Relay will *append* new pages of items to the connection when paginating *"forward",* and *prepend* new pages of items when paginating *"backward"*. This means that your component will always render the *full* connection, with *all* of the items that have been accumulated so far via pagination, and/or items that have been added or removed via mutations or subscriptions. + +However, it is possible that you'd need different behavior for how to merge and accumulate pagination results (or other updates to the connection), and/or derive local component state from changes to the connection. Some examples of this might be: + +* Keeping track of different *visible* slices or windows of the connection. +* Visually separating each *page* of items. This requires knowledge of the exact set of items inside each page that has been fetched. +* Displaying different ends of the same connection simultaneously, while keeping track of the "gaps" between them, and being able to merge results when preforming pagination between the gaps. For example, imagine rendering a list of comments where the oldest comments are displayed at the top, then a "gap" that can be interacted with to paginate, and then a section at the bottom which shows the most recent comments that have been added by the user or by real-time subscriptions. + + +To address these more complex use cases, Relay is still working on a solution: + + +> TBD + + + + +## Refreshing connections + +> TBD + + + + +## Prefetching Pages of a Connection + +> TBD + + + + +## Rendering One Page of Items at a Time + +> TBD + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/list-data/connections.md b/website/versioned_docs/version-v15.0.0/guided-tour/list-data/connections.md new file mode 100644 index 0000000000000..de0a92fde3023 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/list-data/connections.md @@ -0,0 +1,23 @@ +--- +id: connections +title: Connections +slug: /guided-tour/list-data/connections/ +description: Relay guide for connections +keywords: +- pagination +- connections +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import useBaseUrl from '@docusaurus/useBaseUrl'; + +There are several scenarios in which we'll want to query a list of data from the GraphQL server. Often times we don't want to query the *entire* set of data up front, but rather discrete sub-parts of the list, incrementally, usually in response to user input or other events. Querying a list of data in discrete parts is usually known as [Pagination](https://graphql.org/learn/pagination/). + + +Specifically in Relay, we do this via GraphQL fields known as [Connections](https://graphql.org/learn/pagination/#complete-connection-model). Connections are GraphQL fields that take a set of arguments to specify which "slice" of the list to query, and include in their response both the "slice" of the list that was requested, as well as information to indicate if there is more data available in the list and how to query it; this additional information can be used in order to perform pagination by querying for more "slices" or pages on the list. + +More specifically, we perform *cursor-based pagination,* in which the input used to query for "slices" of the list is a `cursor` and a `count`. Cursors are essentially opaque tokens that serve as markers or pointers to a position in the list. If you're curious to learn more about the details of cursor-based pagination and connections, check out the spec. + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/list-data/pagination.md b/website/versioned_docs/version-v15.0.0/guided-tour/list-data/pagination.md new file mode 100644 index 0000000000000..ca2d662221436 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/list-data/pagination.md @@ -0,0 +1,141 @@ +--- +id: pagination +title: Pagination +slug: /guided-tour/list-data/pagination/ +description: Relay guide to pagination +keywords: +- pagination +- usePaginationFragment +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbPaginationUsingUseTransition from './fb/FbPaginationUsingUseTransition.md'; + +To actually perform pagination over the connection, we need use the `loadNext` function to fetch the next page of items, which is available from `usePaginationFragment`: + + + + + + + +```js +import type {FriendsListComponent_user$key} from 'FriendsList_user.graphql'; + +const React = require('React'); + +const {graphql, usePaginationFragment} = require('react-relay'); + +const {Suspense} = require('React'); + +type Props = { + user: FriendsListComponent_user$key, +}; + +function FriendsListComponent(props: Props) { + const {data, loadNext} = usePaginationFragment( + graphql` + fragment FriendsListComponent_user on User + @refetchable(queryName: "FriendsListPaginationQuery") { + name + friends(first: $count, after: $cursor) + @connection(key: "FriendsList_user_friends") { + edges { + node { + name + age + } + } + } + } + `, + props.user, + ); + + return ( + <> +

Friends of {data.name}:

+
+ {(data.friends?.edges ?? []).map(edge => { + const node = edge.node; + return ( + }> + + + ); + })} +
+ + + + ); +} + +module.exports = FriendsListComponent; +``` + +Let's distill what's happening here: + +* `loadNext` takes a count to specify how many more items in the connection to fetch from the server. In this case, when `loadNext` is called we'll fetch the next 10 friends in the friends list of our currently rendered `User`. +* When the request to fetch the next items completes, the connection will be automatically updated and the component will re-render with the latest items in the connection. In our case, this means that the `friends` field will always contain *all* of the friends that we've fetched so far. By default, *Relay will automatically append new items to the connection upon completing a pagination request,* and will make them available to your fragment component*.* If you need a different behavior, check out our [Advanced Pagination Use Cases](../advanced-pagination/) section. +* `loadNext` may cause the component or new children components to suspend (as explained in [Loading States with Suspense](../../rendering/loading-states/)). This means that you'll need to make sure that there's a `Suspense` boundary wrapping this component from above. + +
+ + +Often, you will also want to access information about whether there are more items available to load. To do this, you can use the `hasNext` value, also available from `usePaginationFragment`: + +```js +import type {FriendsListPaginationQuery} from 'FriendsListPaginationQuery.graphql'; +import type {FriendsListComponent_user$key} from 'FriendsList_user.graphql'; + +const React = require('React'); +const {Suspense} = require('React'); + +const {graphql, usePaginationFragment} = require('react-relay'); + +type Props = { + user: FriendsListComponent_user$key, +}; + +function FriendsListComponent(props: Props) { + // ... + const { + data, + loadNext, + hasNext, + } = usePaginationFragment( + graphql`...`, + props.user, + ); + + return ( + <> +

Friends of {data.name}:

+ {/* ... */} + + {/* Only render button if there are more friends to load in the list */} + {hasNext ? ( + + ) : null} + + ); +} + +module.exports = FriendsListComponent; +``` + +* `hasNext` is a boolean which indicates if the connection has more items available. This information can be useful for determining if different UI controls should be rendered. In our specific case, we only render the `Button` if there are more friends available in the connection. + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/list-data/refetching-connections.md b/website/versioned_docs/version-v15.0.0/guided-tour/list-data/refetching-connections.md new file mode 100644 index 0000000000000..b30bbf7dec66f --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/list-data/refetching-connections.md @@ -0,0 +1,210 @@ +--- +id: refetching-connections +title: Refetching Connections (Using and Changing Filters) +slug: /guided-tour/list-data/refetching-connections/ +description: Relay guide to refetching connections +keywords: +- pagination +- refetching +- connection +- useRefetchableFragment +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbRefetchingConnectionsUsingUseTransition from './fb/FbRefetchingConnectionsUsingUseTransition.md'; + +Often times when querying for a list of data, you can provide different values in the query which serve as filters that change the result set, or sort it differently. + +Some examples of this are: + +* Building a search typeahead, where the list of results is a list filtered by the search term entered by the user. +* Changing the ordering mode of the list comments currently displayed for a post, which could produce a completely different set of comments from the server. +* Changing the way News Feed is ranked and sorted. + + +Specifically, in GraphQL, connection fields can accept arguments to sort or filter the set of queried results: + +```graphql +fragment UserFragment on User { + name + friends(order_by: DATE_ADDED, search_term: "Alice", first: 10) { + edges { + node { + name + age + } + } + } +} +``` + + +In Relay, we can pass those arguments as usual using GraphQL [variables](../../rendering/variables/) + +```js +type Props = { + userRef: FriendsListComponent_user$key, +}; + +function FriendsListComponent(props: Props) { + const userRef = props.userRef; + + const {data, ...} = usePaginationFragment( + graphql` + fragment FriendsListComponent_user on User { + name + friends( + order_by: $orderBy, + search_term: $searchTerm, + after: $cursor, + first: $count, + ) @connection(key: "FriendsListComponent_user_friends_connection") { + edges { + node { + name + age + } + } + } + } + `, + userRef, + ); + + return (...); +} +``` + + +When paginating, the original values for those filters will be preserved: + +```js +type Props = { + userRef: FriendsListComponent_user$key, +}; + +function FriendsListComponent(props: Props) { + const userRef = props.userRef; + + const {data, loadNext} = usePaginationFragment( + graphql` + fragment FriendsListComponent_user on User { + name + friends(order_by: $orderBy, search_term: $searchTerm) + @connection(key: "FriendsListComponent_user_friends_connection") { + edges { + node { + name + age + } + } + } + } + `, + userRef, + ); + + return ( + <> +

Friends of {data.name}:

+ {...} + + {/* + Loading the next items will use the original order_by and search_term + values used for the initial query + */ } + + + ); +} +``` +* Note that calling `loadNext` will use the original `order_by` and `search_term` values used for the initial query. During pagination, these value won't (*and shouldn't*) change. + +If we want to refetch the connection with *different* variables, we can use the `refetch` function provided by `usePaginationFragment`, similarly to how we do so when [Refetching Fragments with Different Data](../../refetching/refetching-fragments-with-different-data/): + + + + + + + +```js +/** + * FriendsListComponent.react.js + */ +import type {FriendsListComponent_user$key} from 'FriendsListComponent_user.graphql'; + +const React = require('React'); +const {useState, useEffect} = require('React'); + +const {graphql, usePaginationFragment} = require('react-relay'); + + +type Props = { + searchTerm?: string, + user: FriendsListComponent_user$key, +}; + +function FriendsListComponent(props: Props) { + const searchTerm = props.searchTerm; + const {data, loadNext, refetch} = usePaginationFragment( + graphql` + fragment FriendsListComponent_user on User { + name + friends( + order_by: $orderBy, + search_term: $searchTerm, + after: $cursor, + first: $count, + ) @connection(key: "FriendsListComponent_user_friends_connection") { + edges { + node { + name + age + } + } + } + } + `, + props.user, + ); + + useEffect(() => { + // When the searchTerm provided via props changes, refetch the connection + // with the new searchTerm + refetch({first: 10, search_term: searchTerm}, {fetchPolicy: 'store-or-network'}); + }, [searchTerm]) + + return ( + <> +

Friends of {data.name}:

+ + {/* When the button is clicked, refetch the connection but sorted differently */} + + + ... + + + ); +} +``` + +Let's distill what's going on here: + +* Calling `refetch` and passing a new set of variables will fetch the fragment again *with the newly provided variables*. The variables you need to provide are a subset of the variables that the generated query expects; the generated query will require an `id`, if the type of the fragment has an `id` field, and any other variables that are transitively referenced in your fragment. + * In our case, we need to pass the count we want to fetch as the `first` variable, and we can pass different values for our filters, like `orderBy` or `searchTerm`. +* This will re-render your component and may cause it to suspend (as explained in [Loading States with Suspense](../../rendering/loading-states/)) if it needs to send and wait for a network request. If `refetch` causes the component to suspend, you'll need to make sure that there's a `Suspense` boundary wrapping this component from above. +* Conceptually, when we call refetch, we're fetching the connection *from scratch*. It other words, we're fetching it again from the *beginning* and *"resetting"* our pagination state. For example, if we fetch the connection with a different `search_term`, our pagination information for the previous `search_term` no longer makes sense, since we're essentially paginating over a new list of items. + +
+ + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/list-data/rendering-connections.md b/website/versioned_docs/version-v15.0.0/guided-tour/list-data/rendering-connections.md new file mode 100644 index 0000000000000..377838ea5d99e --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/list-data/rendering-connections.md @@ -0,0 +1,112 @@ +--- +id: rendering-connections +title: Rendering Connections +slug: /guided-tour/list-data/rendering-connections/ +description: Relay guide to rendering connections +keywords: +- pagination +- usePaginationFragment +- connection +--- + +import DocsRating from '@site/src/core/DocsRating'; +import FbSuspenseListAlternative from './fb/FbSuspenseListAlternative.md'; +import FbRenderingConnectionsUsingSuspenseList from './fb/FbRenderingConnectionsUsingSuspenseList.md'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +In Relay, in order to display a list of data that is backed by a GraphQL connection, first you need to declare a fragment that queries for a connection: + +```js +const {graphql} = require('RelayModern'); + +const userFragment = graphql` + fragment UserFragment on User { + name + friends(after: $cursor, first: $count) + @connection(key: "UserFragment_friends") { + edges { + node { + ...FriendComponent + } + } + } + } +`; +``` + +* In the example above, we're querying for the `friends` field, which is a connection; in other words, it adheres to the connection spec. Specifically, we can query the `edges` and `node`s in the connection; the `edges` usually contain information about the relationship between the entities, while the `node`s are the actual entities at the other end of the relationship; in this case, the `node`s are objects of type `User` representing the user's friends. +* In order to indicate to Relay that we want to perform pagination over this connection, we need to mark the field with the `@connection` directive. We must also provide a *static* unique identifier for this connection, known as the `key`. We recommend the following naming convention for the connection key: `_`. +* We will go into more detail later as to why it is necessary to mark the field as a `@connection` and give it a unique `key` in our [Updating Connections](../updating-connections/) section. + + +In order to render this fragment which queries for a connection, we can use the `usePaginationFragment` Hook: + + + + + + + +```js +import type {FriendsListComponent_user$key} from 'FriendsList_user.graphql'; + +const React = require('React'); +const {Suspense} = require('React'); + +const {graphql, usePaginationFragment} = require('react-relay'); + +type Props = { + user: FriendsListComponent_user$key, +}; + +function FriendsListComponent(props: Props) { + const {data} = usePaginationFragment( + graphql` + fragment FriendsListComponent_user on User + @refetchable(queryName: "FriendsListPaginationQuery") { + name + friends(first: $count, after: $cursor) + @connection(key: "FriendsList_user_friends") { + edges { + node { + ...FriendComponent + } + } + } + } + `, + props.user, + ); + + + return ( + <> + {data.name != null ?

Friends of {data.name}:

: null} + +
+ {/* Extract each friend from the resulting data */} + {(data.friends?.edges ?? []).map(edge => { + const node = edge.node; + return ( + }> + + + ); + })} +
+ + ); +} + +module.exports = FriendsListComponent; +``` + + +* `usePaginationFragment` behaves the same way as a `useFragment` (see the [Fragments](../../rendering/fragments/) section), so our list of friends is available under `data.friends.edges.node`, as declared by the fragment. However, it also has a few additions: + * It expects a fragment that is a connection field annotated with the `@connection` directive + * It expects a fragment that is annotated with the `@refetchable` directive. Note that `@refetchable` directive can only be added to fragments that are "refetchable", that is, on fragments that are on `Viewer`, on `Query`, on any type that implements `Node` (i.e. a type that has an `id` field), or on a `@fetchable` type. For more info on `@fetchable` types, see [this post](https://fb.workplace.com/groups/graphql.fyi/permalink/1539541276187011/). +* It takes two Flow type parameters: the type of the generated query (in our case `FriendsListPaginationQuery`), and a second type which can always be inferred, so you only need to pass underscore (`_`). + +
+ + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/list-data/streaming-pagination.md b/website/versioned_docs/version-v15.0.0/guided-tour/list-data/streaming-pagination.md new file mode 100644 index 0000000000000..2cb8795be20b4 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/list-data/streaming-pagination.md @@ -0,0 +1,87 @@ +--- +id: streaming-pagination +title: Streaming Pagination +slug: /guided-tour/list-data/streaming-pagination/ +description: Relay guide to streaming pagination +keywords: +- pagination +- usePaginationFragment +- connection +- streaming +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + + + +Additionally, we can combine `usePaginationFragment` with Relay's [Incremental Data Delivery](../../../guides/incremental-data-delivery/) capabilities in order to fetch a connection and incrementally receive each item in the connection as it becomes ready, instead of waiting for the whole list of items to be returned in a single payload. This can be useful when for example computing each item in the connection is an expensive operation in the server, and we want to be able to show the first item(s) in the list as soon as possible without blocking on *all* the items that we need to become available; for example, on News Feed a user could ideally see and start interacting with the first story while additional stories loaded in below. + + + + + +Additionally, we can combine `usePaginationFragment` with Relay's Incremental Data Delivery capabilities in order to fetch a connection and incrementally receive each item in the connection as it becomes ready, instead of waiting for the whole list of items to be returned in a single payload. This can be useful when for example computing each item in the connection is an expensive operation in the server, and we want to be able to show the first item(s) in the list as soon as possible without blocking on *all* the items that we need to become available; for example, on News Feed a user could ideally see and start interacting with the first story while additional stories loaded in below. + + + +In order to do so, we can use the `@stream_connection` directive instead of the `@connection` directive: + +```js +import type {FriendsListComponent_user$key} from 'FriendsList_user.graphql'; + +const React = require('React'); + +const {graphql, usePaginationFragment} = require('react-relay'); + +type Props = { + user: FriendsListComponent_user$key, +}; + +function FriendsListComponent(props: Props) { + // ... + + const { + data, + loadNext, + hasNext, + } = usePaginationFragment( + graphql` + fragment FriendsListComponent_user on User + @refetchable(queryName: "FriendsListPaginationQuery") { + name + friends(first: $count, after: $cursor) + @stream_connection(key: "FriendsList_user_friends", initial_count: 2,) { + edges { + node { + name + age + } + } + } + } + `, + props.user, + ); + + return (...); +} + +module.exports = FriendsListComponent; +``` + +Let's distill what's happening here: + +* The `@stream_connection` directive can be used directly in place of the `@connection` directive; it accepts the same arguments as @connection plus additional, *optional* parameters to control streaming: + * `initial_count: Int`: A number (defaulting to zero) that controls how many items will be included in the initial payload. Any subsequent items are streamed, so when set to zero the list will initially be empty and all items will be streamed. Note that this number does not affect how many items are returned *total*, only how many items are included in the initial payload. For example, consider a product that today makes an initial fetch for 2 items and then *immediately* issues a pagination query to fetch 3 more. With streaming, this product could instead choose to fetch 5 items in the initial query with initial_count=2, in order to fetch the 2 items quickly while avoiding a round trip for the subsequent 3 items. +* As with regular usage of `usePaginationFragment`, the connection will be automatically updated as new items are streamed in from the server, and the component will re-render each time with the latest items in the connection. + + + + +For more information, see our docs on [Incremental Data Delivery](../../../guides/incremental-data-delivery/#stream_connection). + + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/list-data/updating-connections.md b/website/versioned_docs/version-v15.0.0/guided-tour/list-data/updating-connections.md new file mode 100644 index 0000000000000..af82fb204c3f5 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/list-data/updating-connections.md @@ -0,0 +1,603 @@ +--- +id: updating-connections +title: Updating Connections +slug: /guided-tour/list-data/updating-connections/ +description: Relay guide to updating connections +keywords: +- pagination +- usePaginationFragment +- updating +- connection +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +Usually when you're rendering a connection, you'll also want to be able to add or remove items to/from the connection in response to user actions. + +As explained in our [Updating Data](../../updating-data/) section, Relay holds a local in-memory store of normalized GraphQL data, where records are stored by their IDs. When creating mutations, subscriptions, or local data updates with Relay, you must provide an [`updater`](../../updating-data/graphql-mutations/#updater-functions) function, inside which you can access and read records, as well as write and make updates to them. When records are updated, any components affected by the updated data will be notified and re-rendered. + + +## Connection Records + +In Relay, connection fields that are marked with the `@connection` directive are stored as special records in the store, and they hold and accumulate *all* of the items that have been fetched for the connection so far. In order to add or remove items from a connection, we need to access the connection record using the connection `key`, which was provided when declaring a `@connection`; specifically, this allows us to access a connection inside an [`updater`](../../updating-data/graphql-mutations/#updater-functions) function using the `ConnectionHandler` APIs. + +For example, given the following fragment that declares a `@connection`, we can access the connection record inside an `updater` function in a few different ways: + +```js +const {graphql} = require('react-relay'); + +const storyFragment = graphql` + fragment StoryComponent_story on Story { + comments @connection(key: "StoryComponent_story_comments_connection") { + nodes { + body { + text + } + } + } + } +`; +``` + +### Accessing connections using `__id` + +We can query for a connection's `__id` field, and then use that `__id` to access the record in the store: + +```js +const fragmentData = useFragment( + graphql` + fragment StoryComponent_story on Story { + comments @connection(key: "StoryComponent_story_comments_connection") { + # Query for the __id field + __id + + # ... + } + } + `, + props.story, +); + +// Get the connection record id +const connectionID = fragmentData?.comments?.__id; +``` + +Then use it to access the record in the store: + +```js +function updater(store: RecordSourceSelectorProxy) { + // connectionID is passed as input to the mutation/subscription + const connection = store.get(connectionID); + + // ... +} +``` + +:::note +The `__id` field is **NOT** something that your GraphQL API needs to expose. Instead, it's an identifier that Relay automatically adds to identify the connection record. +::: + +### Accessing connections using `ConnectionHandler.getConnectionID` + +If we have access to the ID of the parent record that holds the connection, we can access the connection record by using the `ConnectionHandler.getConnectionID` API: + +```js +const {ConnectionHandler} = require('relay-runtime'); + +function updater(store: RecordSourceSelectorProxy) { + // Get the connection ID + const connectionID = ConnectionHandler.getConnectionID( + storyID, // passed as input to the mutation/subscription + 'StoryComponent_story_comments_connection', + ); + + // Get the connection record + const connectionRecord = store.get(connectionID); + + // ... +} +``` + +### Accessing connections using `ConnectionHandler.getConnection` + +If we have access to the parent record that holds the connection, we can access the connection record via the parent, by using the `ConnectionHandler.getConnection` API: + +```js +const {ConnectionHandler} = require('relay-runtime'); + +function updater(store: RecordSourceSelectorProxy) { + // Get parent story record + // storyID is passed as input to the mutation/subscription + const storyRecord = store.get(storyID); + + // Get the connection record from the parent + const connectionRecord = ConnectionHandler.getConnection( + storyRecord, + 'StoryComponent_story_comments_connection', + ); + + // ... +} +``` + +## Adding edges + +There are a couple of alternatives for adding edges to a connection: + +### Using declarative directives + +Usually, mutation or subscription payloads will expose the new edges that were added on the server as a field with a single edge or list of edges. If your mutation or subscription exposes an edge or edges field that you can query for in the response, then you can use the `@appendEdge` and `@prependEdge` declarative mutation directives on that field in order to add the newly created edges to the specified connections (note that these directives also work on queries). + +Alternatively, mutation or subscription payloads might expose the new nodes that were added on the server as a field with a single node or list of nodes. If your mutation or subscription exposes a node or nodes field that you can query for in the response, then you can use the `@appendNode` and `@prependNode` declarative mutation directives on that field in order to add the newly created nodes, wrapped inside edges, to the specified connections (note that these directives also work on queries). + +These directives accept a `connections` parameter, which needs to be a GraphQL variable containing an array of connection IDs. Connection IDs can be obtained either by using the [`__id` field on connections](#accessing-connections-using-__id) or using the [`ConnectionHandler.getConnectionID`](#accessing-connections-using-connectionhandlergetconnectionid) API. + + +#### `@appendEdge` / `@prependEdge` + +These directives work on a field with a single edge or list of edges. `@prependEdge` will add the selected edges to the beginning of each connection defined in the `connections` array, whereas `@appendEdge` will add the selected edges to the end of each connection in the array. + +**Arguments:** +- `connections`: An array of connection IDs. Connection IDs can be obtained either by using the [`__id` field on connections](#accessing-connections-using-__id) or using the [`ConnectionHandler.getConnectionID`](#accessing-connections-using-connectionhandlergetconnectionid) API. + + +**Example:** + +```js +// Get the connection ID using the `__id` field +const connectionID = fragmentData?.comments?.__id; + +// Or get it using `ConnectionHandler.getConnectionID()` +const connectionID = ConnectionHandler.getConnectionID( + '', + 'StoryComponent_story_comments_connection', +); + +// ... + +// Mutation +commitMutation(environment, { + mutation: graphql` + mutation AppendCommentMutation( + # Define a GraphQL variable for the connections array + $connections: [ID!]! + $input: CommentCreateInput + ) { + commentCreate(input: $input) { + # Use @appendEdge or @prependEdge on the edge field + feedbackCommentEdge @appendEdge(connections: $connections) { + cursor + node { + id + } + } + } + } + `, + variables: { + input, + // Pass the `connections` array + connections: [connectionID], + }, +}); +``` + + +#### `@appendNode` / `@prependNode` + +These directives work on a field with a single node or list of nodes, and will create edges with the specified `edgeTypeName`. `@prependNode` will add edges containing the selected nodes to the beginning of each connection defined in the `connections` array, whereas `@appendNode` will add edges containing the selected nodes to the end of each connection in the array. + +**Arguments:** +- `connections`: An array of connection IDs. Connection IDs can be obtained either by using the [`__id` field on connections](#accessing-connections-using-__id) or using the [`ConnectionHandler.getConnectionID`](#accessing-connections-using-connectionhandlergetconnectionid) API. +- `edgeTypeName`: The type name of the edge that contains the node, corresponding to the edge type argument in `ConnectionHandler.createEdge`. + +**Example:** +```js +// Get the connection ID using the `__id` field +const connectionID = fragmentData?.comments?.__id; + +// Or get it using `ConnectionHandler.getConnectionID()` +const connectionID = ConnectionHandler.getConnectionID( + '', + 'StoryComponent_story_comments_connection', +); + +// ... + +// Mutation +commitMutation(environment, { + mutation: graphql` + mutation AppendCommentMutation( + # Define a GraphQL variable for the connections array + $connections: [ID!]! + $input: CommentCreateInput + ) { + commentCreate(input: $input) { + # Use @appendNode or @prependNode on the node field + feedbackCommentNode @appendNode(connections: $connections, edgeTypeName: "CommentsEdge") { + id + } + } + } + `, + variables: { + input, + // Pass the `connections` array + connections: [connectionID], + }, +}); +``` + + +#### Order of execution + +For all of these directives, they will be executed in the following order within the mutation or subscription, as per the [order of execution of updates](../../updating-data/graphql-mutations/#order-of-execution-of-updater-functions): + +* When the mutation is initiated, after the optimistic response is handled, and after the optimistic updater function is executed, the `@prependEdge`, `@appendEdge`, `@prependNode`, and `@appendNode` directives will be applied to the optimistic response. +* If the mutation succeeds, after the data from the network response is merged with the existing values in the store, and after the updater function is executed, the `@prependEdge`, `@appendEdge`, `@prependNode`, and `@appendNode` directives will be applied to the data in the network response. +* If the mutation failed, the updates from processing the `@prependEdge`, `@appendEdge`, `@prependNode`, and `@appendNode` directives will be rolled back. + + +### Manually adding edges + +The directives described [above](#using-declarative-directives) largely remove the need to manually add and remove items from a connection, however, they do not provide as much control as you can get with manually writing an updater, and may not fulfill every use case. + +In order to write an updater to modify the connection, we need to make sure we have access to the [connection record](#connection-record). Once we have the connection record, we also need a record for the new edge that we want to add to the connection. Usually, mutation or subscription payloads will contain the new edge that was added; if not, you can also construct a new edge from scratch. + +For example, in the following mutation we can query for the newly created edge in the mutation response: + +```js +const {graphql} = require('react-relay'); + +const createCommentMutation = graphql` + mutation CreateCommentMutation($input: CommentCreateData!) { + comment_create(input: $input) { + comment_edge { + cursor + node { + body { + text + } + } + } + } + } +`; +``` + +* Note that we also query for the `cursor` for the new edge; this isn't strictly necessary, but it is information that will be required if we need to perform pagination based on that `cursor`. + + +Inside an [`updater`](../../updating-data/graphql-mutations/#updater-functions), we can access the edge inside the mutation response using Relay store APIs: + +```js +const {ConnectionHandler} = require('relay-runtime'); + +function updater(store: RecordSourceSelectorProxy) { + const storyRecord = store.get(storyID); + const connectionRecord = ConnectionHandler.getConnection( + storyRecord, + 'StoryComponent_story_comments_connection', + ); + + // Get the payload returned from the server + const payload = store.getRootField('comment_create'); + + // Get the edge inside the payload + const serverEdge = payload.getLinkedRecord('comment_edge'); + + // Build edge for adding to the connection + const newEdge = ConnectionHandler.buildConnectionEdge( + store, + connectionRecord, + serverEdge, + ); + + // ... +} +``` + +* The mutation payload is available as a root field on that store, which can be read using the `store.getRootField` API. In our case, we're reading `comment_create`, which is the root field in the response. +* Note that we need to construct the new edge from the edge received from the server using `ConnectionHandler.buildConnectionEdge` before we can add it to the connection. + + +If you need to create a new edge from scratch, you can use `ConnectionHandler.createEdge`: + +```js +const {ConnectionHandler} = require('relay-runtime'); + +function updater(store: RecordSourceSelectorProxy) { + const storyRecord = store.get(storyID); + const connectionRecord = ConnectionHandler.getConnection( + storyRecord, + 'StoryComponent_story_comments_connection', + ); + + // Create a new local Comment record + const id = `client:new_comment:${randomID()}`; + const newCommentRecord = store.create(id, 'Comment'); + + // Create new edge + const newEdge = ConnectionHandler.createEdge( + store, + connectionRecord, + newCommentRecord, + 'CommentEdge', /* GraphQl Type for edge */ + ); + + // ... +} +``` + + +Once we have a new edge record, we can add it to the the connection using `ConnectionHandler.insertEdgeAfter` or `ConnectionHandler.insertEdgeBefore`: + +```js +const {ConnectionHandler} = require('relay-runtime'); + +function updater(store: RecordSourceSelectorProxy) { + const storyRecord = store.get(storyID); + const connectionRecord = ConnectionHandler.getConnection( + storyRecord, + 'StoryComponent_story_comments_connection', + ); + + const newEdge = (...); + + // Add edge to the end of the connection + ConnectionHandler.insertEdgeAfter( + connectionRecord, + newEdge, + ); + + // Add edge to the beginning of the connection + ConnectionHandler.insertEdgeBefore( + connectionRecord, + newEdge, + ); +} +``` + +* Note that these APIs will *mutate* the connection in place + +:::note +Check out our complete [Relay Store APIs](../../../api-reference/store/). +::: + +## Removing edges + +### Using the declarative deletion directive + +Similarly to the [directives to add edges](#using-declarative-directives), we can use the `@deleteEdge` directive to delete edges from connections. If your mutation or subscription exposes a field with the ID or IDs of the nodes that were deleted that you can query for in the response, then you can use the `@deleteEdge` directive on that field to delete the respective edges from the connection (note that this directive also works on queries). + +#### `@deleteEdge` + +Works on GraphQL fields that return an `ID` or `[ID]`. Will delete the edges with nodes that match the `id` from each connection defined in the `connections` array. + +**Arguments:** +- `connections`: An array of connection IDs. Connection IDs can be obtained either by using the [`__id` field on connections](#accessing-connections-using-__id) or using the [`ConnectionHandler.getConnectionID`](#accessing-connections-using-connectionhandlergetconnectionid) API. + + +**Example:** + +```js +// Get the connection ID using the `__id` field +const connectionID = fragmentData?.comments?.__id; + +// Or get it using `ConnectionHandler.getConnectionID()` +const connectionID = ConnectionHandler.getConnectionID( + '', + 'StoryComponent_story_comments_connection', +); + +// ... + +// Mutation +commitMutation(environment, { + mutation: graphql` + mutation DeleteCommentsMutation( + # Define a GraphQL variable for the connections array + $connections: [ID!]! + $input: CommentsDeleteInput + ) { + commentsDelete(input: $input) { + deletedCommentIds @deleteEdge(connections: $connections) + } + } + `, + variables: { + input, + // Pass the `connections` array + connections: [connectionID], + }, +}); +``` + +### Manually removing edges + +`ConnectionHandler` provides a similar API to remove an edge from a connection, via `ConnectionHandler.deleteNode`: + +```js +const {ConnectionHandler} = require('RelayModern'); + +function updater(store: RecordSourceSelectorProxy) { + const storyRecord = store.get(storyID); + const connectionRecord = ConnectionHandler.getConnection( + storyRecord, + 'StoryComponent_story_comments_connection', + ); + + // Remove edge from the connection, given the ID of the node + ConnectionHandler.deleteNode( + connectionRecord, + commentIDToDelete, + ); +} +``` + +* In this case `ConnectionHandler.deleteNode` will remove an edge given a *`node` ID*. This means it will look up which edge in the connection contains a node with the provided ID, and remove that edge. +* Note that this API will *mutate* the connection in place. + + +:::note +Remember: when performing any of the operations described here to mutate a connection, any fragment or query components that are rendering the affected connection will be notified and re-render with the latest version of the connection. +::: + + +## Connection identity with filters + +In our previous examples, our connections didn't take any arguments as filters. If you declared a connection that takes arguments as filters, the values used for the filters will be part of the connection identifier. In other words, *each of the values passed in as connection filters will be used to identify the connection in the Relay store.* + +:::note +Note that this excludes pagination arguments, i.e. it excludes `first`, `last`, `before`, and `after`. +::: + + +For example, let's say the `comments` field took the following arguments, which we pass in as GraphQL [variables](../../rendering/variables/): + +```js +const {graphql} = require('RelayModern'); + +const storyFragment = graphql` + fragment StoryComponent_story on Story { + comments( + order_by: $orderBy, + filter_mode: $filterMode, + language: $language, + ) @connection(key: "StoryComponent_story_comments_connection") { + edges { + nodes { + body { + text + } + } + } + } + } +`; +``` + +In the example above, this means that whatever values we used for `$orderBy`, `$filterMode` and `$language` when we queried for the `comments` field will be part of the connection identifier, and we'll need to use those values when accessing the connection record from the Relay store. + +In order to do so, we need to pass a third argument to `ConnectionHandler.getConnection`, with concrete filter values to identify the connection: + +```js +const {ConnectionHandler} = require('RelayModern'); + +function updater(store: RecordSourceSelectorProxy) { + const storyRecord = store.get(storyID); + + // Get the connection instance for the connection with comments sorted + // by the date they were added + const connectionRecordSortedByDate = ConnectionHandler.getConnection( + storyRecord, + 'StoryComponent_story_comments_connection', + {order_by: '*DATE_ADDED*', filter_mode: null, language: null} + ); + + // Get the connection instance for the connection that only contains + // comments made by friends + const connectionRecordFriendsOnly = ConnectionHandler.getConnection( + storyRecord, + 'StoryComponent_story_comments_connection', + {order_by: null, filter_mode: '*FRIENDS_ONLY*', langugage: null} + ); +} +``` + +This implies that by default, *each combination of values used for filters will produce a different record for the connection.* + +When making updates to a connection, you will need to make sure to update all of the relevant records affected by a change. For example, if we were to add a new comment to our example connection, we'd need to make sure *not* to add the comment to the `FRIENDS_ONLY` connection, if the new comment wasn't made by a friend of the user: + +```js +const {ConnectionHandler} = require('relay-runtime'); + +function updater(store: RecordSourceSelectorProxy) { + const storyRecord = store.get(storyID); + + // Get the connection instance for the connection with comments sorted + // by the date they were added + const connectionRecordSortedByDate = ConnectionHandler.getConnection( + storyRecord, + 'StoryComponent_story_comments_connection', + {order_by: '*DATE_ADDED*', filter_mode: null, language: null} + ); + + // Get the connection instance for the connection that only contains + // comments made by friends + const connectionRecordFriendsOnly = ConnectionHandler.getConnection( + storyRecord, + 'StoryComponent_story_comments_connection', + {order_by: null, filter_mode: '*FRIENDS_ONLY*', language: null} + ); + + const newComment = (...); + const newEdge = (...); + + ConnectionHandler.insertEdgeAfter( + connectionRecordSortedByDate, + newEdge, + ); + + if (isMadeByFriend(storyRecord, newComment) { + // Only add new comment to friends-only connection if the comment + // was made by a friend + ConnectionHandler.insertEdgeAfter( + connectionRecordFriendsOnly, + newEdge, + ); + } +} +``` + + + +_Managing connections with many filters:_ + +As you can see, just adding a few filters to a connection can make the complexity and number of connection records that need to be managed explode. In order to more easily manage this, Relay provides 2 strategies: + +1) Specify exactly *which* filters should be used as connection identifiers. + +By default, *all* non-pagination filters will be used as part of the connection identifier. However, when declaring a `@connection`, you can specify the exact set of filters to use for connection identity: + +```js +const {graphql} = require('relay-runtime'); + +const storyFragment = graphql` + fragment StoryComponent_story on Story { + comments( + order_by: $orderBy + filter_mode: $filterMode + language: $language + ) + @connection( + key: "StoryComponent_story_comments_connection" + filters: ["order_by", "filter_mode"] + ) { + edges { + nodes { + body { + text + } + } + } + } + } +`; +``` + +* By specifying `filters` when declaring the `@connection`, we're indicating to Relay the exact set of filter values that should be used as part of connection identity. In this case, we're excluding `language`, which means that only values for `order_by` and `filter_mode` will affect connection identity and thus produce new connection records. +* Conceptually, this means that we're specifying which arguments affect the output of the connection from the server, or in other words, which arguments are *actually* *filters*. If one of the connection arguments doesn't actually change the set of items that are returned from the server, or their ordering, then it isn't really a filter on the connection, and we don't need to identify the connection differently when that value changes. In our example, changing the `language` of the comments we request doesn't change the set of comments that are returned by the connection, so it is safe to exclude it from `filters`. +* This can also be useful if we know that any of the connection arguments will never change in our app, in which case it would also be safe to exclude from `filters`. + + + +2) An easier API alternative to manage multiple connections with multiple filter values is still pending + + +> TBD + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/prefetching-queries.md b/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/prefetching-queries.md new file mode 100644 index 0000000000000..46cb0f1a9a054 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/prefetching-queries.md @@ -0,0 +1,10 @@ +--- +id: prefetching-queries +title: Prefetching Queries +slug: /guided-tour/accessing-data-without-react/prefetching-queries/ +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/reading-fragments.md b/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/reading-fragments.md new file mode 100644 index 0000000000000..50932e7d0da96 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/reading-fragments.md @@ -0,0 +1,12 @@ +--- +id: reading-fragments +title: Reading Fragments +slug: /guided-tour/accessing-data-without-react/reading-fragments/ +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/reading-queries.md b/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/reading-queries.md new file mode 100644 index 0000000000000..ee479755318f3 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/reading-queries.md @@ -0,0 +1,12 @@ +--- +id: reading-queries +title: Reading Queries +slug: /guided-tour/accessing-data-without-react/reading-queries/ +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/retaining-queries.md b/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/retaining-queries.md new file mode 100644 index 0000000000000..e0e865a20020e --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/retaining-queries.md @@ -0,0 +1,51 @@ +--- +id: retaining-queries +title: Retaining Queries +slug: /guided-tour/accessing-data-without-react/retaining-queries/ +description: Relay guide to retaining queries +keywords: +- retaining +- query +- environment +- garbage collection +- gc +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +In order to manually retain a query so that the data it references isn’t garbage collected by Relay, we can use the `environment.retain` method: + +```js +const { + createOperationDescriptor, + getRequest, + graphql, +} = require('relay-runtime') + +// Query graphql object +const query = graphql`...`; + +// Construct Relay's internal representation of the query +const queryRequest = getRequest(query); +const queryDescriptor = createOperationDescriptor( + queryRequest, + variables +); + +// Retain query; this will prevent the data for this query and +// variables from being gabrage collected by Relay +const disposable = environment.retain(queryDescriptor); + +// Disposing of the disposable will release the data for this query +// and variables, meaning that it can be deleted at any moment +// by Relay's garbage collection if it hasn't been retained elsewhere +disposable.dispose(); +``` + +:::note +Relay automatically manages the query data retention based on any mounted query components that are rendering the data, so you usually should not need to call retain directly within product code. For any advanced or special use cases, query data retention should usually be handled within infra-level code, such as a Router. +::: + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/subscribing-to-queries.md b/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/subscribing-to-queries.md new file mode 100644 index 0000000000000..24321e5e076ba --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/managing-data-outside-react/subscribing-to-queries.md @@ -0,0 +1,12 @@ +--- +id: subscribing-to-queries +title: Subscribing to Queries +slug: /guided-tour/accessing-data-without-react/subscribing-to-queries/ +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/refetching/OssAvoidSuspenseNote.md b/website/versioned_docs/version-v15.0.0/guided-tour/refetching/OssAvoidSuspenseNote.md new file mode 100644 index 0000000000000..27fd6885b1f5d --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/refetching/OssAvoidSuspenseNote.md @@ -0,0 +1,3 @@ +:::note +In future versions of React when concurrent rendering is supported, React will provide an option to support this case and avoid hiding already rendered content with a Suspense fallback when suspending. +::: diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/refetching/introduction.md b/website/versioned_docs/version-v15.0.0/guided-tour/refetching/introduction.md new file mode 100644 index 0000000000000..08c2e1948b5ea --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/refetching/introduction.md @@ -0,0 +1,17 @@ +--- +id: introduction +title: Introduction +slug: /guided-tour/refetching/ +description: Relay guide to refetching +keywords: +- refetching +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +After an app has been initially rendered, there are various scenarios in which you might want to refetch and show *new* or *different* data (e.g. change the currently displayed item), or maybe refresh the currently rendered data with the latest version from the server (e.g. refreshing a count), usually as a result of an event or user interaction. + +In this section we'll cover some of the most common scenarios and how to build them with Relay. + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/refetching/refetching-fragments-with-different-data.md b/website/versioned_docs/version-v15.0.0/guided-tour/refetching/refetching-fragments-with-different-data.md new file mode 100644 index 0000000000000..81e954561a270 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/refetching/refetching-fragments-with-different-data.md @@ -0,0 +1,171 @@ +--- +id: refetching-fragments-with-different-data +title: Refetching Fragments with Different Data +slug: /guided-tour/refetching/refetching-fragments-with-different-data/ +description: Relay guide to refetching fragments with different data +keywords: +- refetching +- fragment +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbRefetchingFragments from './fb/FbRefetchingFragments.md'; +import FbAvoidSuspenseCaution from './fb/FbAvoidSuspenseCaution.md'; +import OssAvoidSuspenseNote from './OssAvoidSuspenseNote.md'; + +When referring to **"refetching a fragment"**, we mean fetching a *different* version of the data than the one was originally rendered by the fragment. For example, this might be to change a currently selected item, to render a different list of items than the one being shown, or more generally to transition the currently rendered content to show new or different content. + +Conceptually, this means fetching and rendering the currently rendered fragment again, but under a new query with *different variables*; or in other words, rendering the fragment under a new query root. Remember that *fragments can't be fetched by themselves: they need to be part of a query,* so we can't just "fetch" the fragment again by itself. + +## Using `useRefetchableFragment` + +To do so, we can also use the [`useRefetchableFragment`](../../../api-reference/use-refetchable-fragment/) Hook in combination with the `@refetchable` directive, which will automatically generate a query to refetch the fragment under, and which we can fetch using the `refetch` function: + + + + + + + +```js +import type {CommentBody_comment$key} from 'CommentBody_comment.graphql'; + +type Props = { + comment: CommentBody_comment$key, +}; + +function CommentBody(props: Props) { + const [data, refetch] = useRefetchableFragment( + graphql` + fragment CommentBody_comment on Comment + # @refetchable makes it so Relay autogenerates a query for + # fetching this fragment + @refetchable(queryName: "CommentBodyRefetchQuery") { + body(lang: $lang) { + text + } + } + `, + props.comment, + ); + + const refetchTranslation = () => { + // We call refetch with new variables, + // which will refetch the @refetchable query with the + // new variables and update this component with the + // latest fetched data. + refetch({lang: 'SPANISH'}); + }; + + return ( + <> +

{data.body?.text}

+ + + ); +} +``` + +Let's distill what's happening in this example: + +* `useRefetchableFragment` behaves similarly to [`useFragment`](../../../api-reference/use-fragment/) (see the [Fragments](../../rendering/fragments/) section), but with a few additions: + * It expects a fragment that is annotated with the `@refetchable` directive. Note that `@refetchable` directive can only be added to fragments that are "refetchable", that is, on fragments that are on `Viewer`, on `Query`, on any type that implements `Node` (i.e. a type that has an `id` field), or on a [`@fetchable`](https://fb.workplace.com/groups/graphql.fyi/permalink/1539541276187011/) type. +* It returns a `refetch` function, which is already Flow-typed to expect the query variables that the generated query expects. +* It takes two Flow type parameters: the type of the generated query (in our case `CommentBodyRefetchQuery`), and a second type which can always be inferred, so you only need to pass underscore (`_`). +* We're calling the `refetch` function with 2 main inputs: + * The first argument is the set of variables to fetch the fragment with. In this case, calling `refetch` and passing a new set of variables will fetch the fragment again *with the newly provided variables*. The variables you need to provide are a subset of the variables that the `@refetchable` query expects; the query will require an `id`, if the type of the fragment has an `id` field, and any other variables that are transitively referenced in your fragment. + * In this case we're passing the current comment `id` and a new value for the `translationType` variable to fetch the translated comment body. + * We are not passing a second options argument in this case, which means that we will use the default `fetchPolicy` of `'store-or-network'`, which will skip the network request if the new data for that fragment is already cached (as we covered in [Reusing Cached Data For Render](../../reusing-cached-data/)). +* Calling `refetch` will re-render the component and may cause `useRefetchableFragment` to suspend (as explained in [Loading States with Suspense](../../rendering/loading-states/)). This means that you'll need to make sure that there's a `Suspense` boundary wrapping this component from above in order to show a fallback loading state. + +
+ +:::info +Note that this same behavior also applies to using the `refetch` function from [`usePaginationFragment`](../../../api-reference/use-pagination-fragment). +::: + +### If you need to avoid Suspense + +In some cases, you might want to avoid showing a Suspense fallback, which would hide the already rendered content. For these cases, you can use [`fetchQuery`](../../../api-reference/fetch-query/) instead, and manually keep track of a loading state: + + + + + + + + + +```js +import type {CommentBody_comment$key} from 'CommentBody_comment.graphql'; + +type Props = { + comment: CommentBody_comment$key, +}; + +function CommentBody(props: Props) { + const [data, refetch] = useRefetchableFragment( + graphql` + fragment CommentBody_comment on Comment + # @refetchable makes it so Relay autogenerates a query for + # fetching this fragment + @refetchable(queryName: "CommentBodyRefetchQuery") { + body(lang: $lang) { + text + } + } + `, + props.comment, + ); + + const [isRefetching, setIsRefreshing] = useState(false) + const refetchTranslation = () => { + if (isRefetching) { return; } + setIsRefreshing(true); + + // fetchQuery will fetch the query and write + // the data to the Relay store. This will ensure + // that when we re-render, the data is already + // cached and we don't suspend + fetchQuery(environment, AppQuery, variables) + .subscribe({ + complete: () => { + setIsRefreshing(false); + + // *After* the query has been fetched, we call + // refetch again to re-render with the updated data. + // At this point the data for the query should + // be cached, so we use the 'store-only' + // fetchPolicy to avoid suspending. + refetch({lang: 'SPANISH'}, {fetchPolicy: 'store-only'}); + } + error: () => { + setIsRefreshing(false); + } + }); + }; + + return ( + <> +

{data.body?.text}

+ + + ); +} +``` + +Let's distill what's going on here: + +* When refetching, we now keep track of our own `isRefetching` loading state, since we are avoiding suspending. We can use this state to render a busy spinner or similar loading UI in our component, *without* hiding the content. +* In the event handler, we first call `fetchQuery`, which will fetch the query and write the data to the local Relay store. When the `fetchQuery` network request completes, we call `refetch` so that we render the updated data, similar to the previous example. +* At this point, when `refetch` is called, the data for the fragment should already be cached in the local Relay store, so we use `fetchPolicy` of `'store-only'` to avoid suspending and only read the already cached data. + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/refetching/refetching-queries-with-different-data.md b/website/versioned_docs/version-v15.0.0/guided-tour/refetching/refetching-queries-with-different-data.md new file mode 100644 index 0000000000000..6640c5a9c101c --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/refetching/refetching-queries-with-different-data.md @@ -0,0 +1,344 @@ +--- +id: refetching-queries-with-different-data +title: Refetching Queries with Different Data +slug: /guided-tour/refetching/refetching-queries-with-different-data/ +description: Relay guide to refetching queries with different data +keywords: +- refetching +- query +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbRefetchingQueriesUsingUseQueryLoader from './fb/FbRefetchingQueriesUsingUseQueryLoader.md'; +import FbRefetchingQueriesUsingUseLazyLoadQuery from './fb/FbRefetchingQueriesUsingUseLazyLoadQuery.md'; +import FbAvoidSuspenseCaution from './fb/FbAvoidSuspenseCaution.md'; +import OssAvoidSuspenseNote from './OssAvoidSuspenseNote.md'; + +When referring to **"refetching a query"**, we mean fetching the query again for *different* data than was originally rendered by the query. For example, this might be to change a currently selected item, to render a different list of items than the one being shown, or more generally to transition the currently rendered content to show new or different content. + +## When using `useQueryLoader` / `loadQuery` + +Similarly to [Refreshing Queries with `useQueryLoader`](../refreshing-queries/#when-using-usequeryloader--loadquery), we can also use the `useQueryLoader` Hook described in our [Fetching Queries for Render](../../rendering/queries/#fetching-queries-for-render) section, but this time passing *different query variables*: + + + + + + + +```js +/** + * App.react.js + */ +const AppQuery = require('__generated__/AppQuery.graphql'); + +function App(props: Props) { + const variables = {id: '4'}; + const [queryRef, loadQuery] = useQueryLoader( + AppQuery, + props.appQueryRef /* initial query ref */ + ); + + const refetch = useCallback(() => { + // Load the query again using the same original variables. + // Calling loadQuery will update the value of queryRef. + loadQuery({id: 'different-id'}); + }, [/* ... */]); + + return ( + + + + ); +} +``` + +```js +/** + * MainContent.react.js + */ + +// Renders the preloaded query, given the query reference +function MainContent(props) { + const {refetch, queryRef} = props; + const data = usePreloadedQuery( + graphql` + query AppQuery($id: ID!) { + user(id: $id) { + name + friends { + count + } + } + } + `, + queryRef, + ); + + return ( + <> +

{data.user?.name}

+
Friends count: {data.user?.friends?.count}
+ + + ); +} +``` + +Let's distill what's going on here: + +* We call `loadQuery` in the event handler for refetching, so the network request starts immediately, and then pass the `queryRef` to `usePreloadedQuery`, so it renders the updated data. +* We are not passing a `fetchPolicy` to `loadQuery`, meaning that it will use the default value of `'store-or-network'`. We could provide a different policy in order to specify whether to use locally cached data (as we covered in [Reusing Cached Data For Render](../../reusing-cached-data/)). +* Calling `loadQuery` will re-render the component and may cause `usePreloadedQuery` to suspend (as explained in [Loading States with Suspense](../../rendering/loading-states/)). This means that we'll need to make sure that there's a `Suspense` boundary wrapping the `MainContent` component, in order to show a fallback loading state. + +
+ + +### If you need to avoid Suspense + +In some cases, you might want to avoid showing a Suspense fallback, which would hide the already rendered content. For these cases, you can use [`fetchQuery`](../../../api-reference/fetch-query/) instead, and manually keep track of a loading state: + + + + + + + + + +```js +/** + * App.react.js + */ +const AppQuery = require('__generated__/AppQuery.graphql'); + +function App(props: Props) { + const environment = useRelayEnvironment(); + const [queryRef, loadQuery] = useQueryLoader( + AppQuery, + props.appQueryRef /* initial query ref */ + ); + const [isRefetching, setIsRefetching] = useState(false) + + const refetch = useCallback(() => { + if (isRefetching) { return; } + setIsRefetching(true); + + // fetchQuery will fetch the query and write + // the data to the Relay store. This will ensure + // that when we re-render, the data is already + // cached and we don't suspend + fetchQuery(environment, AppQuery, variables) + .subscribe({ + complete: () => { + setIsRefetching(false); + + // *After* the query has been fetched, we call + // loadQuery again to re-render with a new + // queryRef. + // At this point the data for the query should + // be cached, so we use the 'store-only' + // fetchPolicy to avoid suspending. + loadQuery({id: 'different-id'}, {fetchPolicy: 'store-only'}); + }, + error: () => { + setIsRefetching(false); + } + }); + }, [/* ... */]); + + return ( + + + + ); +} +``` + +Let's distill what's going on here: + +* When refetching, we now keep track of our own `isRefetching` loading state, since we are avoiding suspending. We can use this state to render a busy spinner or similar loading UI inside the `MainContent` component, *without* hiding the `MainContent`. +* In the event handler, we first call `fetchQuery`, which will fetch the query and write the data to the local Relay store. When the `fetchQuery` network request completes, we call `loadQuery` so that we obtain an updated `queryRef` that we then pass to `usePreloadedQuery` in order render the updated data, similar to the previous example. +* At this point, when `loadQuery` is called, the data for the query should already be cached in the local Relay store, so we use `fetchPolicy` of `'store-only'` to avoid suspending and only read the already cached data. + +## When using `useLazyLoadQuery` + +Similarly to [Refreshing Queries with `useLazyLoadQuery`](../refreshing-queries/#when-using-uselazyloadquery), we can also use the [`useLazyLoadQuery`](../../../api-reference/use-lazy-load-query/) Hook described in our [Lazily Fetching Queries during Render](../../rendering/queries/#lazily-fetching-queries-during-render) section, but this time passing *different query variables*: + + + + + + + +```js +/** + * App.react.js + */ +const AppQuery = require('__generated__/AppQuery.graphql'); + +function App(props: Props) { + const [queryArgs, setQueryArgs] = useState({ + options: {fetchKey: 0}, + variables: {id: '4'}, + }); + + const refetch = useCallback(() => { + // Trigger a re-render of useLazyLoadQuery with new variables, + // *and* an updated fetchKey. + // The new fetchKey will ensure that the query is fully + // re-evaluated and refetched. + setQueryArgs(prev => ({ + options: { + fetchKey: (prev?.options.fetchKey ?? 0) + 1, + }, + variables: {id: 'different-id'} + })); + }, [/* ... */]); + + return ( + + + + ); +} +``` + +```js +/** + * MainContent.react.js + */ +// Fetches and renders the query, given the fetch options +function MainContent(props) { + const {refetch, queryArgs} = props; + const data = useLazyLoadQuery( + graphql` + query AppQuery($id: ID!) { + user(id: $id) { + name + friends { + count + } + } + } + `, + queryArgs.variables, + queryArgs.options, + ); + + return ( + <> +

{data.user?.name}

+
Friends count: {data.user.friends?.count}
+ + + ); +} +``` + +Let's distill what's going on here: + +* We update the component in the event handler for refreshing by setting new query args in state. This will cause the `MainContent` component that uses `useLazyLoadQuery` to re-render with the new `variables` and `fetchKey`, and refetch the query upon rendering. +* We are passing a new value of `fetchKey` which we increment on every update. Passing a new `fetchKey` to `useLazyLoadQuery` on every update will ensure that the query is fully re-evaluated and refetched. +* We are not passing a new `fetchPolicy` to `useLazyLoadQuery`, meaning that it will use the default value of `'store-or-network'`. We could provide a different policy in order to specify whether to use locally cached data (as we covered in [Reusing Cached Data For Render](../../reusing-cached-data/)). +* The state update in `refetch` will re-render the component and may cause the component to suspend (as explained in [Loading States with Suspense](../../rendering/loading-states/)). This means that we'll need to make sure that there's a `Suspense` boundary wrapping the `MainContent` component, in order to show a fallback loading state. + + +
+ +### If you need to avoid Suspense + +In some cases, you might want to avoid showing a Suspense fallback, which would hide the already rendered content. For these cases, you can use [`fetchQuery`](../../../api-reference/fetch-query/) instead, and manually keep track of a loading state: + + + + + + + + + +```js +/** + * App.react.js + */ +const AppQuery = require('__generated__/AppQuery.graphql'); + +function App(props: Props) { + const environment = useRelayEnvironment(); + const [isRefreshing, setIsRefreshing] = useState(false) + const [queryArgs, setQueryArgs] = useState({ + options: {fetchKey: 0, fetchPolicy: 'store-or-network'}, + variables: {id: '4'}, + }); + + const refetch = useCallback(() => { + if (isRefreshing) { return; } + setIsRefreshing(true); + + // fetchQuery will fetch the query and write + // the data to the Relay store. This will ensure + // that when we re-render, the data is already + // cached and we don't suspend + fetchQuery(environment, AppQuery, variables) + .subscribe({ + complete: () => { + setIsRefreshing(false); + + // *After* the query has been fetched, we update + // our state to re-render with the new fetchKey + // and fetchPolicy. + // At this point the data for the query should + // be cached, so we use the 'store-only' + // fetchPolicy to avoid suspending. + setQueryArgs(prev => ({ + options: { + fetchKey: (prev?.options.fetchKey ?? 0) + 1, + fetchPolicy: 'store-only', + }, + variables: {id: 'different-id'} + })); + }, + error: () => { + setIsRefreshing(false); + } + }); + }, [/* ... */]); + + return ( + + + + ); +} +``` + +Let's distill what's going on here: + +* When refetching, we now keep track of our own `isRefetching` loading state, since we are avoiding suspending. We can use this state to render a busy spinner or similar loading UI inside the `MainContent` component, *without* hiding the `MainContent`. +* In the event handler, we first call `fetchQuery`, which will fetch the query and write the data to the local Relay store. When the `fetchQuery` network request completes, we update our state so that we re-render an updated `fetchKey` and `fetchPolicy` that we then pass to `useLazyLoadQuery` in order render the updated data, similar to the previous example. +* At this point, when we update the state, the data for the query should already be cached in the local Relay store, so we use `fetchPolicy` of `'store-only'` to avoid suspending and only read the already cached data. + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/refetching/refreshing-fragments.md b/website/versioned_docs/version-v15.0.0/guided-tour/refetching/refreshing-fragments.md new file mode 100644 index 0000000000000..937af7feb3495 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/refetching/refreshing-fragments.md @@ -0,0 +1,191 @@ +--- +id: refreshing-fragments +title: Refreshing Fragments +slug: /guided-tour/refetching/refreshing-fragments/ +description: Relay guide to refreshing fragments +keywords: +- refreshing +- fragment +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbRefreshingUsingRealTimeFeatures from './fb/FbRefreshingUsingRealTimeFeatures.md'; +import FbRefreshingFragments from './fb/FbRefreshingFragments.md'; +import FbAvoidSuspenseCaution from './fb/FbAvoidSuspenseCaution.md'; +import OssAvoidSuspenseNote from './OssAvoidSuspenseNote.md'; + +When referring to **"refreshing a fragment"**, we mean fetching the *exact* same data that was originally rendered by the fragment, in order to get the most up-to-date version of that data from the server. + +## Using real-time features + + + + + + +If we want to keep our data up to date with the latest version from the server, the first thing to consider is if it appropriate to use any real-time features, which can make it easier to automatically keep the data up to date without manually refreshing the data periodically. + +One example of this is using [GraphQL Subscriptions](https://relay.dev/docs/guided-tour/updating-data/graphql-subscriptions/), which will require additional configuration on your server and [network layer](https://relay.dev/docs/guided-tour/updating-data/graphql-subscriptions/#configuring-the-network-layer). + + +## Using `useRefetchableFragment` + +In order to manually refresh the data for a fragment, we need a query to refetch the fragment under; remember, *fragments can't be fetched by themselves: they need to be part of a query,* so we can't just "fetch" the fragment again by itself. + +To do so, we can also use the [`useRefetchableFragment`](../../../api-reference/use-refetchable-fragment/) Hook in combination with the `@refetchable` directive, which will automatically generate a query to refetch the fragment under, and which we can fetch using the `refetch` function: + + + + + + + +```js +import type {UserComponent_user$key} from 'UserComponent_user.graphql'; + +type Props = { + user: UserComponent_user$key, +}; + +function UserComponent(props: Props) { + const [data, refetch] = useRefetchableFragment( + graphql` + fragment UserComponent_user on User + # @refetchable makes it so Relay autogenerates a query for + # fetching this fragment + @refetchable(queryName: "UserComponentRefreshQuery") { + id + name + friends { + count + } + } + `, + props.user, + ); + + const refresh = useCallback(() => { + // We call refetch with empty variables: `{}`, + // which will refetch the @refetchable query with the same + // original variables the fragment was fetched with, and update + // this component with the latest fetched data. + // The fetchPolicy ensures we always fetch from the server and skip + // the local data cache. + refetch({}, {fetchPolicy: 'network-only'}) + }), [/* ... */]; + + return ( + <> +

{data.name}

+
Friends count: {data.friends?.count}
+ + + ); +} +``` + +Let's distill what's happening in this example: + +* `useRefetchableFragment` behaves similarly to [`useFragment`](../../../api-reference/use-fragment/) (see the [Fragments](../../rendering/fragments/) section), but with a few additions: + * It expects a fragment that is annotated with the `@refetchable` directive. Note that `@refetchable` directive can only be added to fragments that are "refetchable", that is, on fragments that are on `Viewer`, on `Query`, on any type that implements `Node` (i.e. a type that has an `id` field). +* It returns a `refetch` function, which is already Flow-typed to expect the query variables that the generated query expects +* It takes two Flow type parameters: the type of the generated query (in our case `UserComponentRefreshQuery`), and a second type which can always be inferred, so you only need to pass underscore (`_`). +* We're calling the `refetch` function with 2 main inputs: + * The first argument is the set of variables to fetch the fragment with. In this case, calling `refetch` and passing an empty set of variables will fetch the fragment again *with the exact same variables the fragment was originally fetched with,* which is what we want for a refresh. + * In the second argument we are passing a `fetchPolicy` of `'network-only'` to ensure that we always fetch from the network and skip the local data cache. +* Calling `refetch` will re-render the component and cause `useRefetchableFragment` to suspend (as explained in [Loading States with Suspense](../../rendering/loading-states/)), since a network request will be required due to the `fetchPolicy` we are using. This means that you'll need to make sure that there's a `Suspense` boundary wrapping this component from above in order to show a fallback loading state. + +
+ +:::info +Note that this same behavior also applies to using the `refetch` function from [`usePaginationFragment`](../../../api-reference/use-pagination-fragment). +::: + +### If you need to avoid Suspense + +In some cases, you might want to avoid showing a Suspense fallback, which would hide the already rendered content. For these cases, you can use [`fetchQuery`](../../../api-reference/fetch-query/) instead, and manually keep track of a loading state: + + + + + + + + + +```js +import type {UserComponent_user$key} from 'UserComponent_user.graphql'; + +type Props = { + user: UserComponent_user$key, +}; + +function UserComponent(props: Props) { + const [data, refetch] = useRefetchableFragment( + graphql` + fragment UserComponent_user on User + # @refetchable makes it so Relay autogenerates a query for + # fetching this fragment + @refetchable(queryName: "UserComponentRefreshQuery") { + id + name + friends { + count + } + } + `, + props.user, + ); + + const [isRefreshing, setIsRefreshing] = useState(false); + const refresh = useCallback(() => { + if (isRefreshing) { return; } + setIsRefreshing(true); + + // fetchQuery will fetch the query and write + // the data to the Relay store. This will ensure + // that when we re-render, the data is already + // cached and we don't suspend + fetchQuery(environment, AppQuery, variables) + .subscribe({ + complete: () => { + setIsRefreshing(false); + + // *After* the query has been fetched, we call + // refetch again to re-render with the updated data. + // At this point the data for the query should + // be cached, so we use the 'store-only' + // fetchPolicy to avoid suspending. + refetch({}, {fetchPolicy: 'store-only'}); + }, + error: () => { + setIsRefreshing(false); + } + }); + }, [/* ... */]); + + return ( + <> +

{data.name}

+
Friends count: {data.friends?.count}
+ + + ); +} +``` + +Let's distill what's going on here: + +* When refreshing, we now keep track of our own `isRefreshing` loading state, since we are avoiding suspending. We can use this state to render a busy spinner or similar loading UI in our component, *without* hiding the content. +* In the event handler, we first call `fetchQuery`, which will fetch the query and write the data to the local Relay store. When the `fetchQuery` network request completes, we call `refetch` so that we render the updated data, similar to the previous example. +* At this point, when `refetch` is called, the data for the fragment should already be cached in the local Relay store, so we use `fetchPolicy` of `'store-only'` to avoid suspending and only read the already cached data. + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/refetching/refreshing-queries.md b/website/versioned_docs/version-v15.0.0/guided-tour/refetching/refreshing-queries.md new file mode 100644 index 0000000000000..41af90583486d --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/refetching/refreshing-queries.md @@ -0,0 +1,357 @@ +--- +id: refreshing-queries +title: Refreshing Queries +slug: /guided-tour/refetching/refreshing-queries/ +description: Relay guide to refreshing queries +keywords: +- refreshing +- queries +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbRefreshingUsingRealTimeFeatures from './fb/FbRefreshingUsingRealTimeFeatures.md'; +import FbRefreshingQueriesUsingUseQueryLoader from './fb/FbRefreshingQueriesUsingUseQueryLoader.md'; +import FbAvoidSuspenseCaution from './fb/FbAvoidSuspenseCaution.md'; +import FbRefreshingQueriesUsingUseLazyLoadQuery from './fb/FbRefreshingQueriesUsingUseLazyLoadQuery.md'; +import OssAvoidSuspenseNote from './OssAvoidSuspenseNote.md'; + +When referring to **"refreshing a query"**, we mean fetching the *exact* same data that was originally rendered by the query, in order to get the most up-to-date version of that data from the server. + +## Using real-time features + + + + + + +If we want to keep our data up to date with the latest version from the server, the first thing to consider is if it appropriate to use any real-time features, which can make it easier to automatically keep the data up to date without manually refreshing the data periodically. + +One example of this is using [GraphQL Subscriptions](https://relay.dev/docs/guided-tour/updating-data/graphql-subscriptions), which will require additional configuration on your server and [network layer](https://relay.dev/docs/guided-tour/updating-data/graphql-subscriptions/#configuring-the-network-layer). + + +## When using `useQueryLoader` / `loadQuery` + +To refresh a query using the [`useQueryLoader`](../../../api-reference/use-query-loader/) Hook described in our [Fetching Queries for Render](../../rendering/queries/#fetching-queries-for-render) section, we only need to call `loadQuery` again: + + + + + + + +```js +/** + * App.react.js + */ + +const AppQuery = require('__generated__/AppQuery.graphql'); + +function App(props: Props) { + const [queryRef, loadQuery] = useQueryLoader( + AppQuery, + props.appQueryRef /* initial query ref */ + ); + + const refresh = useCallback(() => { + // Load the query again using the same original variables. + // Calling loadQuery will update the value of queryRef. + // The fetchPolicy ensures we always fetch from the server and skip + // the local data cache. + const {variables} = props.appQueryRef; + loadQuery(variables, {fetchPolicy: 'network-only'}); + }, [/* ... */]); + + return ( + + + + ); +} +``` + +```js +/** + * MainContent.react.js + */ + +// Renders the preloaded query, given the query reference +function MainContent(props) { + const {refresh, queryRef} = props; + const data = usePreloadedQuery( + graphql` + query AppQuery($id: ID!) { + user(id: $id) { + name + friends { + count + } + } + } + `, + queryRef, + ); + + return ( + <> +

{data.user?.name}

+
Friends count: {data.user.friends?.count}
+ + + ); +} +``` + +Let's distill what's going on here: + +* We call `loadQuery` in the event handler for refreshing, so the network request starts immediately, and then pass the updated `queryRef` to the `MainContent` component that uses `usePreloadedQuery`, so it renders the updated data. +* We are passing a `fetchPolicy` of `'network-only'` to ensure that we always fetch from the network and skip the local data cache. +* Calling `loadQuery` will re-render the component and cause `usePreloadedQuery` to suspend (as explained in [Loading States with Suspense](../../rendering/loading-states/)), since a network request will always be made due to the `fetchPolicy` we are using. This means that we'll need to make sure that there's a `Suspense` boundary wrapping the `MainContent` component in order to show a fallback loading state. + +
+ +### If you need to avoid Suspense + +In some cases, you might want to avoid showing a Suspense fallback, which would hide the already rendered content. For these cases, you can use [`fetchQuery`](../../../api-reference/fetch-query/) instead, and manually keep track of a loading state: + + + + + + + + + +```js +/** + * App.react.js + */ + +const AppQuery = require('__generated__/AppQuery.graphql'); + +function App(props: Props) { + const environment = useRelayEnvironment(); + const [queryRef, loadQuery] = useQueryLoader( + AppQuery, + props.appQueryRef /* initial query ref */ + ); + const [isRefreshing, setIsRefreshing] = useState(false) + + const refresh = useCallback(() => { + if (isRefreshing) { return; } + const {variables} = props.appQueryRef; + setIsRefreshing(true); + + // fetchQuery will fetch the query and write + // the data to the Relay store. This will ensure + // that when we re-render, the data is already + // cached and we don't suspend + fetchQuery(environment, AppQuery, variables) + .subscribe({ + complete: () => { + setIsRefreshing(false); + + // *After* the query has been fetched, we call + // loadQuery again to re-render with a new + // queryRef. + // At this point the data for the query should + // be cached, so we use the 'store-only' + // fetchPolicy to avoid suspending. + loadQuery(variables, {fetchPolicy: 'store-only'}); + } + error: () => { + setIsRefreshing(false); + } + }); + }, [/* ... */]); + + return ( + + + + ); +} +``` + +Let's distill what's going on here: + +* When refreshing, we now keep track of our own `isRefreshing` loading state, since we are avoiding suspending. We can use this state to render a busy spinner or similar loading UI inside the `MainContent` component, *without* hiding the `MainContent`. +* In the event handler, we first call `fetchQuery`, which will fetch the query and write the data to the local Relay store. When the `fetchQuery` network request completes, we call `loadQuery` so that we obtain an updated `queryRef` that we then pass to `usePreloadedQuery` in order render the updated data, similar to the previous example. +* At this point, when `loadQuery` is called, the data for the query should already be cached in the local Relay store, so we use `fetchPolicy` of `'store-only'` to avoid suspending and only read the already cached data. + + +## When using `useLazyLoadQuery` + +To refresh a query using the [`useLazyLoadQuery`](../../../api-reference/use-lazy-load-query/) Hook described in our [Lazily Fetching Queries during Render](../../rendering/queries/#lazily-fetching-queries-during-render) section, we can do the following: + + + + + + + +```js +/** + * App.react.js + */ +const AppQuery = require('__generated__/AppQuery.graphql'); + +function App(props: Props) { + const variables = {id: '4'}; + const [refreshedQueryOptions, setRefreshedQueryOptions] = useState(null); + + const refresh = useCallback(() => { + // Trigger a re-render of useLazyLoadQuery with the same variables, + // but an updated fetchKey and fetchPolicy. + // The new fetchKey will ensure that the query is fully + // re-evaluated and refetched. + // The fetchPolicy ensures that we always fetch from the network + // and skip the local data cache. + setRefreshedQueryOptions(prev => ({ + fetchKey: (prev?.fetchKey ?? 0) + 1, + fetchPolicy: 'network-only', + })); + }, [/* ... */]); + + return ( + + + + ); +``` + +```js +/** + * MainContent.react.js + */ + +// Fetches and renders the query, given the fetch options +function MainContent(props) { + const {refresh, queryOptions, variables} = props; + const data = useLazyLoadQuery( + graphql` + query AppQuery($id: ID!) { + user(id: $id) { + name + friends { + count + } + } + } + `, + variables, + queryOptions, + ); + + return ( + <> +

{data.user?.name}

+
Friends count: {data.user.friends?.count}
+ + + ); +} +``` + +Let's distill what's going on here: + +* We update the component in the event handler for refreshing by setting new options in state. This will cause the `MainContent` component that uses `useLazyLoadQuery` to re-render with the new `fetchKey` and `fetchPolicy`, and refetch the query upon rendering. +* We are passing a new value of `fetchKey` which we increment on every update. Passing a new `fetchKey` to `useLazyLoadQuery` on every update will ensure that the query is fully re-evaluated and refetched. +* We are passing a `fetchPolicy` of `'network-only'` to ensure that we always fetch from the network and skip the local data cache. +* The state update in `refresh` will cause the component to suspend (as explained in [Loading States with Suspense](../../rendering/loading-states/)), since a network request will always be made due to the `fetchPolicy` we are using. This means that we'll need to make sure that there's a `Suspense` boundary wrapping the `MainContent` component in order to show a fallback loading state. + +
+ +### If you need to avoid Suspense + +In some cases, you might want to avoid showing a Suspense fallback, which would hide the already rendered content. For these cases, you can use [`fetchQuery`](../../../api-reference/fetch-query/) instead, and manually keep track of a loading state: + + + + + + + + + +```js +/** + * App.react.js + */ +import type {AppQuery as AppQueryType} from 'AppQuery.graphql'; + +const AppQuery = require('__generated__/AppQuery.graphql'); + +function App(props: Props) { + const variables = {id: '4'} + const environment = useRelayEnvironment(); + const [refreshedQueryOptions, setRefreshedQueryOptions] = useState(null); + const [isRefreshing, setIsRefreshing] = useState(false) + + const refresh = useCallback(() => { + if (isRefreshing) { return; } + setIsRefreshing(true); + + // fetchQuery will fetch the query and write + // the data to the Relay store. This will ensure + // that when we re-render, the data is already + // cached and we don't suspend + fetchQuery(environment, AppQuery, variables) + .subscribe({ + complete: () => { + setIsRefreshing(false); + + // *After* the query has been fetched, we update + // our state to re-render with the new fetchKey + // and fetchPolicy. + // At this point the data for the query should + // be cached, so we use the 'store-only' + // fetchPolicy to avoid suspending. + setRefreshedQueryOptions(prev => ({ + fetchKey: (prev?.fetchKey ?? 0) + 1, + fetchPolicy: 'store-only', + })); + } + error: () => { + setIsRefreshing(false); + } + }); + }, [/* ... */]); + + return ( + + + + ); +} +``` + +Let's distill what's going on here: + +* When refreshing, we now keep track of our own `isRefreshing` loading state, since we are avoiding suspending. We can use this state to render a busy spinner or similar loading UI inside the `MainContent` component, *without* hiding the `MainContent`. +* In the event handler, we first call `fetchQuery`, which will fetch the query and write the data to the local Relay store. When the `fetchQuery` network request completes, we update our state so that we re-render an updated `fetchKey` and `fetchPolicy` that we then pass to `useLazyLoadQuery` in order render the updated data, similar to the previous example. +* At this point, when we update the state, the data for the query should already be cached in the local Relay store, so we use `fetchPolicy` of `'store-only'` to avoid suspending and only read the already cached data. + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/rendering/environment.md b/website/versioned_docs/version-v15.0.0/guided-tour/rendering/environment.md new file mode 100644 index 0000000000000..e4fd6516fc560 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/rendering/environment.md @@ -0,0 +1,59 @@ +--- +id: environment +title: Environment +slug: /guided-tour/rendering/environment/ +description: Relay guide to the environment +keywords: +- environment +- RelayEnvironmentProvider +- useRelayEnvironment +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbActorsAndEnvironments from './fb/FbActorsAndEnvironments.md'; +import FbEnvironmentSetup from './fb/FbEnvironmentSetup.md'; + +## Relay Environment Provider + +In order to render Relay components, you need to render a `RelayEnvironmentProvider` component at the root of the app: + +```js +// App root + +const {RelayEnvironmentProvider} = require('react-relay'); +const Environment = require('MyEnvironment'); + +function Root() { + return ( + + {/*... */} + + ); +} +``` + +* The `RelayEnvironmentProvider` takes an environment, which it will make available to all descendant Relay components, and which is necessary for Relay to function. + + + +## Accessing the Relay Environment + +If you want to access the *current* Relay Environment within a descendant of a `RelayEnvironmentProvider` component, you can use the `useRelayEnvironment` Hook: + +```js +const {useRelayEnvironment} = require('react-relay'); + +function UserComponent(props: Props) { + const environment = useRelayEnvironment(); + + return (...); +} +``` + + + + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/rendering/error-states.md b/website/versioned_docs/version-v15.0.0/guided-tour/rendering/error-states.md new file mode 100644 index 0000000000000..c07745359f189 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/rendering/error-states.md @@ -0,0 +1,295 @@ +--- +id: error-states +title: Error States with ErrorBoundaries +slug: /guided-tour/rendering/error-states/ +description: Relay guide to rendering error states +keywords: +- rendering +- error +- boundary +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbErrorBoundary from './fb/FbErrorBoundary.md'; + + + +As you may have noticed, we mentioned that using `usePreloadedQuery` will render data from a query that was (or is) being fetched from the server, but we didn't elaborate on how to render UI to show an error if an error occurred during fetch. We will cover that in this section. + +We can use [Error Boundary](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary) components to catch errors that occur during render (due to a network error, or any kind of error), and render an alternative error UI when that occurs. The way it works is similar to how `Suspense` works, by wrapping a component tree in an error boundary, we can specify how we want to react when an error occurs, for example by rendering a fallback UI. + +[Error boundaries](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary) are simply components that implement the static `getDerivedStateFromError` method: + +```js +const React = require('React'); + +type State = {error: ?Error}; + +class ErrorBoundary extends React.Component { + static getDerivedStateFromError(error): State { + // Set some state derived from the caught error + return {error: error}; + } +} +``` + +```js +/** + * App.react.js + */ + +const ErrorBoundary = require('ErrorBoundary'); +const React = require('React'); + +const MainContent = require('./MainContent.react'); +const SecondaryContent = require('./SecondaryContent.react'); + +function App() { + return ( + // Render an ErrorSection if an error occurs within + // MainContent or Secondary Content + }> + + + + ); +} +``` + +* We can use the Error Boundary to wrap subtrees and show a different UI when an error occurs within that subtree. When an error occurs, the specified `fallback` will be rendered instead of the content inside the boundary. +* Note that we can also control the granularity at which we render error UIs, by wrapping components at different levels with error boundaries. In this example, if any error occurs within `MainContent` or `SecondaryContent`, we will render an `ErrorSection` in place of the entire app content. + + + +## Retrying after an Error + +### When using `useQueryLoader` / `loadQuery` + +When using `useQueryLoader`/`loadQuery` to fetch a query, in order to retry after an error has occurred, you can call `loadQuery` again and pass the *new* query reference to `usePreloadedQuery`: + +```js +/** + * ErrorBoundaryWithRetry.react.js + */ + +const React = require('React'); + +// NOTE: This is NOT actual production code; +// it is only used to illustrate example +class ErrorBoundaryWithRetry extends React.Component { + state = {error: null}; + + static getDerivedStateFromError(error): State { + return {error: error}; + } + + _retry = () => { + // This ends up calling loadQuery again to get and render + // a new query reference + this.props.onRetry(); + this.setState({ + // Clear the error + error: null, + }); + } + + render() { + const {children, fallback} = this.props; + const {error} = this.state; + if (error) { + if (typeof fallback === 'function') { + return fallback({error, retry: this._retry}); + } + return fallback; + } + return children; + } +} +``` +* When an error occurs, we render the provided `fallback`. +* When `retry` is called, we will clear the error, and call `loadQuery` again. This will fetch the query again and provide us a new query reference, which we can then pass down to `usePreloadedQuery`. + +```js +/** + * App.react.js + */ + +const ErrorBoundaryWithRetry = require('ErrorBoundaryWithRetry'); +const React = require('React'); + +const MainContent = require('./MainContent.react'); + +const query = require('__generated__/MainContentQuery.graphql'); + +// NOTE: This is NOT actual production code; +// it is only used to illustrate example +function App(props) { + // E.g., initialQueryRef provided by router + const [queryRef, loadQuery] = useQueryLoader(query, props.initialQueryRef); + + return ( + loadQuery(/* ... */)} + fallback={({error, retry}) => + <> + + {/* Render a button to retry; this will attempt to re-render the + content inside the boundary, i.e. the query component */} + + + }> + {/* The value of queryRef will be updated after calling + loadQuery again */} + + + ); +} + +/** + * MainContent.react.js + */ +function MainContent(props) { + const data = usePreloadedQuery( + graphql`...`, + props.queryRef + ); + + return (/* ... */); +} +``` +* The sample Error Boundary in this example code will provide a `retry` function to the `fallback` which we can use to clear the error, re-load the query, and re-render with a new query ref that we can pass to the component that uses `usePreloadedQuery`. That component will consume the new query ref and suspend if necessary on the new network request. + + +### When using `useLazyLoadQuery` + +When using `useLazyLoadQuery` to fetch a query, in order to retry after an error has occurred, you can attempt to re-mount *and* re-evaluate the query component by passing it a new `fetchKey`: + +```js +/** + * ErrorBoundaryWithRetry.react.js + */ + +const React = require('React'); + +// NOTE: This is NOT actual production code; +// it is only used to illustrate example +class ErrorBoundaryWithRetry extends React.Component { + state = {error: null, fetchKey: 0}; + + static getDerivedStateFromError(error): State { + return {error: error, fetchKey: 0}; + } + + _retry = () => { + this.setState(prev => ({ + // Clear the error + error: null, + // Increment and set a new fetchKey in order + // to trigger a re-evaluation and refetching + // of the query using useLazyLoadQuery + fetchKey: prev.fetchKey + 1, + })); + } + + render() { + const {children, fallback} = this.props; + const {error, fetchKey} = this.state; + if (error) { + if (typeof fallback === 'function') { + return fallback({error, retry: this._retry}); + } + return fallback; + } + return children({fetchKey}); + } +} +``` +* When an error occurs, we render the provided `fallback`. +* When `retry` is called, we will clear the error, and increment our `fetchKey` which we can then pass down to `useLazyLoadQuery`. This will make it so we re-render the component that uses `useLazyLoadQuery` with a new `fetchKey`, ensuring that the query is refetched upon the new call to `useLazyLoadQuery`. + +```js +/** + * App.react.js + */ + +const ErrorBoundaryWithRetry = require('ErrorBoundaryWithRetry'); +const React = require('React'); + +const MainContent = require('./MainContent.react'); + +// NOTE: This is NOT actual production code; +// it is only used to illustrate example +function App() { + return ( + + <> + + {/* Render a button to retry; this will attempt to re-render the + content inside the boundary, i.e. the query component */} + + + }> + {({fetchKey}) => { + // If we have retried, use the new `retryQueryRef` provided + // by the Error Boundary + return ; + }} + + ); +} + +/** + * MainContent.react.js + */ +function MainContent(props) { + const data = useLazyLoadQuery( + graphql`...`, + variables, + {fetchKey: props.fetchKey} + ); + + return (/* ... */); +} +``` +* The sample Error Boundary in this example code will provide a `retry` function to the `fallback` which we can use to clear the error and re-render `useLazyLoadQuery` with a new `fetchKey`. This will cause the query to be re-evaluated and refetched, and `useLazyLoadQuery` start a new network request and suspend. + + + +## Accessing errors in GraphQL Responses + + + + +By default, internally at fb, Relay will *only* surface errors to React that are returned in the top-level [`errors` field](https://graphql.org/learn/validation/) if they are ether: + +* of `CRITICAL` severity, +* *or* if the top-level `data` field wasn't returned in the response. + + + + +If you wish to access error information in your application to display user friendly messages, the recommended approach is to model and expose the error information as part of your GraphQL schema. + +For example, you could expose a field in your schema that returns either the expected result, or an Error object if an error occurred while resolving that field (instead of returning null): + + +```js +type Error { + # User friendly message + message: String! +} + +type Foo { + bar: Result | Error +} +``` + + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/rendering/fragments.md b/website/versioned_docs/version-v15.0.0/guided-tour/rendering/fragments.md new file mode 100644 index 0000000000000..300913a407fce --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/rendering/fragments.md @@ -0,0 +1,354 @@ +--- +id: fragments +title: Fragments +slug: /guided-tour/rendering/fragments/ +description: Relay guide to rendering fragments +keywords: +- useFragment +- rendering +- fragment +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +The main building block for declaring data dependencies for React Components in Relay are [GraphQL Fragments](https://graphql.org/learn/queries/#fragments). Fragments are reusable units in GraphQL that represent a set of data to query from a GraphQL type exposed in the [schema](https://graphql.org/learn/schema/). + +In practice, they are a selection of fields on a GraphQL Type: + +```graphql +fragment UserFragment on User { + name + age + profile_picture(scale: 2) { + uri + } +} +``` + + +In order to declare a fragment inside your JavaScript code, you must use the `graphql` tag: + +```js +const {graphql} = require('react-relay'); + +const userFragment = graphql` + fragment UserFragment_user on User { + name + age + profile_picture(scale: 2) { + uri + } + } +`; +``` + +## Rendering Fragments + +In order to *render* the data for a fragment, you can use the `useFragment` Hook: + +```js +import type {UserComponent_user$key} from 'UserComponent_user.graphql'; + +const React = require('React'); + +const {graphql, useFragment} = require('react-relay'); + +type Props = { + user: UserComponent_user$key, +}; + +function UserComponent(props: Props) { + const data = useFragment( + graphql` + fragment UserComponent_user on User { + name + profile_picture(scale: 2) { + uri + } + } + `, + props.user, + ); + + return ( + <> +

{data.name}

+
+ +
+ + ); +} + +module.exports = UserComponent; +``` + +Let's distill what's going on here: + +* `useFragment` takes a fragment definition and a *fragment reference*, and returns the corresponding `data` for that fragment and reference. + * This is similar to `usePreloadedQuery`, which takes a query definition and a query reference. +* A *fragment reference* is an object that Relay uses to *read* the data declared in the fragment definition; as you can see, the `UserComponent_user` fragment itself just declares fields on the `User` type, but we need to know *which* specific user to read those fields from; this is what the fragment reference corresponds to. In other words, a fragment reference is like *a pointer to a specific instance of a type* that we want to read data from. +* Note that *the component is automatically subscribed to updates to the fragment data*: if the data for this particular `User` is updated anywhere in the app (e.g. via fetching new data, or mutating existing data), the component will automatically re-render with the latest updated data. +* Relay will automatically generate Flow types for any declared fragments when the compiler is run, so you can use these types to declare the type for your Component's `props`. + * The generated Flow types include a type for the fragment reference, which is the type with the `$key` suffix: `$key`, and a type for the shape of the data, which is the type with the `$data` suffix: `$data`; these types are available to import from files that are generated with the following name: `.graphql.js`. + * We use our [lint rule](https://github.com/relayjs/eslint-plugin-relay) to enforce that the type of the fragment reference prop is correctly declared when using `useFragment`. By using a properly typed fragment reference as input, the type of the returned `data` will automatically be Flow-typed without requiring an explicit annotation. + * In our example, we're typing the `user` prop as the fragment reference we need for `useFragment`, which corresponds to the `UserComponent_user$key` imported from `UserComponent_user.graphql`, which means that the type of `data` above would be: `{ name: ?string, profile_picture: ?{ uri: ?string } }`. +* Fragment names need to be globally unique. In order to easily achieve this, we name fragments using the following convention based on the module name followed by an identifier: `_`. This makes it easy to identify which fragments are defined in which modules and avoids name collisions when multiple fragments are defined in the same module. + + +If you need to render data from multiple fragments inside the same component, you can use `useFragment` multiple times: + +```js +import type {UserComponent_user$key} from 'UserComponent_user.graphql'; +import type {UserComponent_viewer$key} from 'UserComponent_viewer.graphql'; + +const React = require('React'); +const {graphql, useFragment} = require('react-relay'); + +type Props = { + user: UserComponent_user$key, + viewer: UserComponent_viewer$key, +}; + +function UserComponent(props: Props) { + const userData = useFragment( + graphql` + fragment UserComponent_user on User { + name + profile_picture(scale: 2) { + uri + } + } + `, + props.user, + ); + + const viewerData = useFragment( + graphql` + fragment UserComponent_viewer on Viewer { + actor { + name + } + } + `, + props.viewer, + ); + + return ( + <> +

{userData.name}

+
+ + Acting as: {viewerData.actor?.name ?? 'Unknown'} +
+ + ); +} + +module.exports = UserComponent; +``` + +## Composing Fragments + +In GraphQL, fragments are reusable units, which means they can include *other* fragments, and consequently a fragment can be included within other fragments or [queries](../queries/): + +```graphql +fragment UserFragment on User { + name + age + profile_picture(scale: 2) { + uri + } + ...AnotherUserFragment +} + +fragment AnotherUserFragment on User { + username + ...FooUserFragment +} +``` + + +With Relay, you can compose fragment components in a similar way, using both component composition and fragment composition. Each React component is responsible for fetching the data dependencies of its direct children - just as it has to know about its children's props in order to render them correctly. This pattern means that developers are able to reason locally about components - what data they need, what components they render - but Relay is able to derive a global view of the data dependencies of an entire UI tree. + +```js +/** + * UsernameSection.react.js + * + * Child Fragment Component + */ + +import type {UsernameSection_user$key} from 'UsernameSection_user.graphql'; + +const React = require('React'); +const {graphql, useFragment} = require('react-relay'); + +type Props = { + user: UsernameSection_user$key, +}; + +function UsernameSection(props: Props) { + const data = useFragment( + graphql` + fragment UsernameSection_user on User { + username + } + `, + props.user, + ); + + return
{data.username ?? 'Unknown'}
; +} + +module.exports = UsernameSection; +``` + +```js +/** + * UserComponent.react.js + * + * Parent Fragment Component + */ + +import type {UserComponent_user$key} from 'UserComponent_user.graphql'; + +const React = require('React'); +const {graphql, useFragment} = require('react-relay'); + +const UsernameSection = require('./UsernameSection.react'); + +type Props = { + user: UserComponent_user$key, +}; + +function UserComponent(props: Props) { + const user = useFragment( + graphql` + fragment UserComponent_user on User { + name + age + profile_picture(scale: 2) { + uri + } + + # Include child fragment: + ...UsernameSection_user + } + `, + props.user, + ); + + return ( + <> +

{user.name}

+
+ + {user.age} + + {/* Render child component, passing the _fragment reference_: */} + +
+ + ); +} + +module.exports = UserComponent; +``` + +There are a few things to note here: + +* `UserComponent` both renders `UsernameSection`, *and* includes the fragment declared by `UsernameSection` inside its own `graphql` fragment declaration. +* `UsernameSection` expects a *fragment reference* as the `user` prop. As we've mentioned before, a fragment reference is an object that Relay uses to *read* the data declared in the fragment definition; as you can see, the child `UsernameSection_user` fragment itself just declares fields on the `User` type, but we need to know *which* specific user to read those fields from; this is what the fragment reference corresponds to. In other words, a fragment reference is like *a pointer to a specific instance of a type* that we want to read data from. +* Note that in this case the `user` passed to `UsernameSection`, i.e. the fragment reference, *doesn't actually contain any of the data declared by the child `UsernameSection` component*; instead, `UsernameSection` will use the fragment reference to read the data *it* declared internally, using `useFragment`. + * This means that the parent component will not receive the data selected by a child component (unless that parent explicitly selected the same fields). Likewise, child components will not receive the data selected by their parents (again, unless the child selected those same fields). + * This prevents separate components from *even accidentally* having implicit dependencies on each other. If this wasn't the case, modifying a component could break other components! + * This allows us to reason locally about our components and modify them without worrying about affecting other components. + * This is known as [*data masking*](../../../principles-and-architecture/thinking-in-relay/). +* The *fragment reference* that the child (i.e. `UsernameSection`) expects is the result of reading a parent fragment that *includes* the child fragment. In our particular example, that means the result of reading a fragment that includes `...UsernameSection_user` will be the fragment reference that `UsernameSection` expects. In other words, the data obtained as a result of reading a fragment via `useFragment` also serves as the fragment reference for any child fragments included in that fragment. + + +## Composing Fragments into Queries + +Fragments in Relay allow declaring data dependencies for a component, but they ***can't be fetched by themselves***. Instead, they need to be included in a query, either directly or transitively. This means that *all fragments must belong to a query when they are rendered*, or in other words, they must be "rooted" under some query. Note that a single fragment can still be included by multiple queries, but when rendering a specific *instance* of a fragment component, it must have been included as part of a specific query request. + +To fetch and render a query that includes a fragment, you can compose them in the same way fragments are composed, as shown in the [Composing Fragments](#composing-fragments) section: + +```js +/** + * UserComponent.react.js + * + * Fragment Component + */ + +import type {UserComponent_user$key} from 'UserComponent_user.graphql'; + +const React = require('React'); +const {graphql, useFragment} = require('react-relay'); + +type Props = { + user: UserComponent_user$key, +}; + +function UserComponent(props: Props) { + const data = useFragment( + graphql`...`, + props.user, + ); + + return (...); +} + +module.exports = UserComponent; +``` + +```js +/** + * App.react.js + * + * Query Component + */ + +import type {AppQuery} from 'AppQuery.graphql'; +import type {PreloadedQuery} from 'react-relay'; + +const React = require('React'); +const {graphql, usePreloadedQuery} = require('react-relay'); + +const UserComponent = require('./UserComponent.react'); + +type Props = { + appQueryRef: PreloadedQuery, +} + +function App({appQueryRef}) { + const data = usePreloadedQuery( + graphql` + query AppQuery($id: ID!) { + user(id: $id) { + name + + # Include child fragment: + ...UserComponent_user + } + } + `, + appQueryRef, + ); + + return ( + <> +

{data.user?.name}

+ {/* Render child component, passing the fragment reference: */} + + + ); +} +``` + +Note that: + +* The *fragment reference* that `UserComponent` expects is the result of reading a parent query that includes its fragment, which in our case means a query that includes `...UsernameSection_user`. In other words, the `data` obtained as a result of `usePreloadedQuery` also serves as the fragment reference for any child fragments included in that query. +* As mentioned previously, *all fragments must belong to a query when they are rendered,* which means that all fragment components *must* be descendants of a query. This guarantees that you will always be able to provide a fragment reference for `useFragment`, by starting from the result of reading a root query with `usePreloadedQuery`. + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/rendering/loading-states.md b/website/versioned_docs/version-v15.0.0/guided-tour/rendering/loading-states.md new file mode 100644 index 0000000000000..0c52b41bd5781 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/rendering/loading-states.md @@ -0,0 +1,257 @@ +--- +id: loading-states +title: Loading States with Suspense +slug: /guided-tour/rendering/loading-states/ +description: Relay guide to loading states +keywords: +- suspense +- loading +- glimmer +- fallback +- spinner +--- + +import DocsRating from '@site/src/core/DocsRating'; +import FbSuspensePlaceholder from '../../fb/FbSuspensePlaceholder.md'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbSuspenseDefinition from './fb/FbSuspenseDefinition.md'; +import FbSuspenseMoreInfo from './fb/FbSuspenseMoreInfo.md'; +import FbSuspenseTransitionsAndUpdatesThatSuspend from './fb/FbSuspenseTransitionsAndUpdatesThatSuspend.md'; +import FbSuspenseInRelayTransitions from './fb/FbSuspenseInRelayTransitions.md'; +import FbSuspenseInRelayFragments from './fb/FbSuspenseInRelayFragments.md'; + + +As you may have noticed, we mentioned that using `usePreloadedQuery` and `useLazyLoadQuery` will render data from a query that was being fetched from the server, but we didn't elaborate on how to render a loading UI (such as a glimmer) while that data is still being fetched. We will cover that in this section. + + + + + + + +To render loading states while a query is being fetched, we rely on [React Suspense](https://reactjs.org/docs/concurrent-mode-suspense.html). Suspense is a new feature in React that allows components to interrupt or *"suspend"* rendering in order to wait for some asynchronous resource (such as code, images or data) to be loaded; when a component "suspends", it indicates to React that the component isn't *"ready"* to be rendered yet, and won't be until the asynchronous resource it's waiting for is loaded. When the resource finally loads, React will try to render the component again. + + + +This capability is useful for components to express asynchronous dependencies like data, code, or images that they require in order to render, and lets React coordinate rendering the loading states across a component tree as these asynchronous resources become available. More generally, the use of Suspense give us better control to implement more deliberately designed loading states when our app is loading for the first time or when it's transitioning to different states, and helps prevent accidental flickering of loading elements (such as spinners), which can commonly occur when loading sequences aren't explicitly designed and coordinated. + + + + + + + + +:::caution +Note that this **DOES NOT** mean that "Suspense for Data Fetching" is ready for general implementation and adoption yet. **Support, general guidance, and requirements for usage of Suspense for Data Fetching are still not ready**, and the React team is still defining what this guidance will be for upcoming React releases. + +Even though there will be some limitations when Suspense is used in React 17, Relay Hooks are stable and on the trajectory for supporting upcoming releases of React. + +For more information, see our **[Suspense Compatibility](../../../migration-and-compatibility/suspense-compatibility/)** guide. +::: + + + +## Loading fallbacks with Suspense Boundaries + +When a component is suspended, we need to render a *fallback* in place of the component while we wait for it to become *"ready"*. In order to do so, we use the `Suspense` component provided by React: + +```js +const React = require('React'); +const {Suspense} = require('React'); + +function App() { + return ( + // Render a fallback using Suspense as a wrapper + }> + + + ); +} +``` + + +`Suspense` components can be used to wrap any component; if the target component suspends, `Suspense` will render the provided fallback until all its descendants become *"ready"* (i.e. until *all* of the suspended components within the subtree resolve). Usually, the fallback is used to render fallback loading states such as a glimmers and placeholders. + +Usually, different pieces of content in our app might suspend, so we can show loading state until they are resolved by using `Suspense` : + +```js +/** + * App.react.js + */ + +const React = require('React'); +const {Suspense} = require('React'); + +function App() { + return ( + // LoadingGlimmer is rendered via the Suspense fallback + }> + {/* MainContent may suspend */} + + ); +} +``` + + + + + +Let's distill what's going on here: + +* If `MainContent` suspends because it's waiting on some asynchronous resource (like data), the `Suspense` component that wraps `MainContent` will detect that it suspended, and will render the `fallback` element (i.e. the `LoadingGlimmer` in this case) up until `MainContent` is ready to be rendered. Note that this also transitively includes descendants of `MainContent`, which might also suspend. + + +What's nice about Suspense is that you have granular control about how to accumulate loading states for different parts of your component tree: + +```js +/** + * App.react.js + */ + +const React = require('React'); +const {Suspense} = require('React'); + +function App() { + return ( + // A LoadingGlimmer for all content is rendered via the Suspense fallback + }> + + {/* SecondaryContent can also suspend */} + + ); +} +``` + + + +* In this case, both `MainContent` and `SecondaryContent` may suspend while they load their asynchronous resources; by wrapping both in a `Suspense`, we can show a single loading state up until they are *all* ready, and then render the entire content in a single paint, after everything has successfully loaded. +* In fact, `MainContent` and `SecondaryContent` may suspend for different reasons other than fetching data, but the same `Suspense` component can be used to render a fallback up until *all* components in the subtree are ready to be rendered. Note that this also transitively includes descendants of `MainContent` or `SecondaryContent`, which might also suspend. + + +Conversely, you can also decide to be more granular about your loading UI and wrap Suspense components around smaller or individual parts of your component tree: + +```js +/** + * App.react.js + */ + +const React = require('React'); +const {Suspense} = require('React'); + +function App() { + return ( + <> + {/* Show a separate loading UI for the LeftHandColumn */} + }> + + + + {/* Show a separate loading UI for both the Main and Secondary content */} + }> + + + + + ); +} +``` + + + +* In this case, we're showing 2 separate loading UIs: + * One to be shown until the `LeftColumn` becomes ready + * And one to be shown until both the `MainContent` and `SecondaryContent` become ready. +* What is powerful about this is that by more granularly wrapping our components in Suspense, *we allow other components to be rendered earlier as they become ready*. In our example, by separately wrapping `MainContent` and `SecondaryContent` under `Suspense`, we're allowing `LeftColumn` to render as soon as it becomes ready, which might be earlier than when the content sections become ready. + + +## Transitions and Updates that Suspend + + + + + + + +`Suspense` boundary fallbacks allow us to describe our loading placeholders when initially rendering some content, but our applications will also have transitions between different content. Specifically, when switching between two components within an already mounted boundary, the new component you're switching to might not have loaded all of its async dependencies, which means that it might also suspend. + +In these cases, we would still show the `Suspense` boundary fallbacks. However, this means that we would hide existing content in favor of showing the `Suspense` fallback. In future versions of React when concurrent rendering is supported, React will provide an option to support this case and avoid hiding already rendered content with a Suspense fallback when suspending. + + + +## How We Use Suspense in Relay + +### Queries + +In our case, our query components are components that can suspend, so we use Suspense to render loading states while a query is being fetched. Let's see what that looks like in practice: + +Say we have the following query renderer component: + +```js +/** + * MainContent.react.js + * + * Query Component + */ + +const React = require('React'); +const {graphql, usePreloadedQuery} = require('react-relay'); + +function MainContent(props) { + // Fetch and render a query + const data = usePreloadedQuery( + graphql`...`, + props.queryRef, + ); + + return (...); +} +``` + +```js +/** + * App.react.js + */ + +const React = require('React'); +const {Suspense} = require('React'); + +function App() { + return ( + // LoadingGlimmer is rendered via the Suspense fallback + }> + {/* MainContent may suspend */} + + ); +} +``` + + + +Let's distill what's going on here: + +* We have a `MainContent` component, which is a query renderer that fetches and renders a query. `MainContent` will *suspend* rendering when it attempts to fetch the query, indicating that it isn't ready to be rendered yet, and it will resolve when the query is fetched. +* The `Suspense `component that wraps `MainContent` will detect that `MainContent` suspended, and will render the `fallback` element (i.e. the `LoadingGlimmer` in this case) up until `MainContent` is ready to be rendered; that is, up until the query is fetched. + + +### Fragments + + + + + + + +Fragments are also integrated with Suspense in order to support rendering of data that's being `@defer'`d or data that's partially available in the Relay Store (i.e. [partial rendering](../../reusing-cached-data/rendering-partially-cached-data/)). + + + +### Transitions + + + + + +Additionally, our APIs for refetching ([Refreshing and Refetching](../../refetching/)) and for [rendering connections](../../list-data/connections/) are also integrated with Suspense; for these use cases, these APIs will also suspend. + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/rendering/queries.md b/website/versioned_docs/version-v15.0.0/guided-tour/rendering/queries.md new file mode 100644 index 0000000000000..9cd08b286363b --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/rendering/queries.md @@ -0,0 +1,261 @@ +--- +id: queries +title: Queries +slug: /guided-tour/rendering/queries/ +description: Relay guide to queries +keywords: +- query +- usePreloadedQuery +- useLazyLoadQuery +- useQueryLoader +- loadQuery +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +import FbEntrypointsExtraInfo from './fb/FbEntrypointsExtraInfo.md'; + +A [GraphQL Query](https://graphql.org/learn/queries/) is a description of data you want to query from a GraphQL server. It consists of a set of fields (and potentially [fragments](../fragments/)) that we want to request from the GraphQL server. What we can query for will depend on the [GraphQL Schema](https://graphql.org/learn/schema/) exposed on the server, which describes the data that is available for querying. + +A query can be sent as a request over the network, along with an optional collection of [variables](../variables/) that the query uses, in order to fetch the data. The server response will be a JSON object that matches the shape of the query we sent: + +```graphql +query UserQuery($id: ID!) { + user(id: $id) { + id + name + ...UserFragment + } + viewer { + actor { + name + } + } +} + +fragment UserFragment on User { + username +} +``` + + + +[Sample response](https://fburl.com/graphiql/v5hs717f): + + + + + +Sample response: + + + +```json +{ + "data": { + "user": { + "id": "4", + "name": "Mark Zuckerberg", + "username": "zuck" + }, + "viewer": { + "actor": { + "name": "Your Name" + } + } + } +} +``` + + + +## Rendering Queries + +To *render* a query in Relay, you can use the `usePreloadedQuery` hook. `usePreloadedQuery` takes a query definition and a query reference, and returns the corresponding data for that query and reference. + +```js +import type {HomeTabQuery} from 'HomeTabQuery.graphql'; +import type {PreloadedQuery} from 'react-relay'; + +const React = require('React'); +const {graphql, usePreloadedQuery} = require('react-relay'); + +type Props = { + queryRef: PreloadedQuery, +}; + +function HomeTab(props: Props) { + const data = usePreloadedQuery( + graphql` + query HomeTabQuery($id: ID!) { + user(id: $id) { + name + } + } + `, + props.queryRef, + ); + + return ( +

{data.user?.name}

+ ); +} +``` + +Lets see what's going on here: + +* `usePreloadedQuery` takes a `graphql` query and a `PreloadedQuery` reference, and returns the data that was fetched for that query. + * The `PreloadedQuery` (in this case `queryRef`) is an object that describes and references an *instance* of our query that is being (or was) fetched. + * We'll cover how to actually fetch the query in the next section below, and cover how to show loading states if the query is in-flight when we try to render it in the [Loading States with Suspense](../loading-states/) section. +* Similarly to [fragments](../fragments/), *the component is automatically subscribed to updates to the query data*: if the data for this query is updated anywhere in the app, the component will automatically re-render with the latest updated data. +* `usePreloadedQuery` also takes a Flow type parameter, which corresponds to the Flow type for the query, in this case `HomeTabQuery`. + * The Relay compiler automatically generates Flow types for any declared queries, which are available to import from the generated files with the following name format: *``*`.graphql.js`. + * Note that the `data` is already properly Flow-typed without requiring an explicit annotation, and is based on the types from the GraphQL schema. For example, the type of `data` above would be: `{ user: ?{ name: ?string } }`. +* Make sure you're providing a Relay environment using a [Relay Environment Provider](../environment/) at the root of your app before trying to render a query. + + +## Fetching Queries for Render + +Apart from *rendering* a query, we also need to fetch it from the server. Usually we want to fetch queries somewhere at the root of our app, and only have **one or a few queries that [*accumulate*](../fragments/#composing-fragments-into-queries) all the data required to render the screen**. Ideally, we'd fetch them as early as possible, before we even start rendering our app. + +In order to *fetch* a query for later rendering it, you can use the `useQueryLoader` Hook: + +```js +import type {HomeTabQuery as HomeTabQueryType} from 'HomeTabQuery.graphql'; +import type {PreloadedQuery} from 'react-relay'; + +const HomeTabQuery = require('HomeTabQuery.graphql') +const {useQueryLoader} = require('react-relay'); + + +type Props = { + initialQueryRef: PreloadedQuery, +}; + +function AppTabs(props) { + const [ + homeTabQueryRef, + loadHomeTabQuery, + ] = useQueryLoader( + HomeTabQuery, + props.initialQueryRef, /* e.g. provided by router */ + ); + + const onSelectHomeTab = () => { + // Start loading query for HomeTab immediately in the event handler + // that triggers navigation to that tab, *before* we even start + // rendering the target tab. + // Calling this function will update the value of homeTabQueryRef. + loadHomeTabQuery({id: '4'}); + + // ... + } + + // ... + + return ( + screen === 'HomeTab' && homeTabQueryRef != null ? + // Pass to component that uses usePreloadedQuery + : + // ... + ); +} +``` + +The example above is somewhat contrived, but let's distill what is happening: + +* We are calling `useQueryLoader` inside our `AppTabs` component. + * It takes a query, which in this case is our `HomeTabQuery` (the query that we declared in our previous example), and which we can obtain by requiring the auto-generated file: `'HomeTabQuery.graphql'`. + * It takes an optional initial `PreloadedQuery` to be used as the initial value of the `homeTabQueryRef` that is stored in state and returned by `useQueryLoader`. + * It also additionally takes a Flow type parameter, which corresponds to the Flow type for the query, in this case `HomeTabQueryType`, which you can also obtain from the auto-generated file: `'HomeTabQuery.graphql'`. +* Calling `useQueryLoader` allows us to obtain 2 things: + * `homeTabQueryRef`: A `?PreloadedQuery`, which is an object that describes and references an *instance* of our query that is being (or was) fetched. This value will be null if we haven't fetched the query, i.e. if we haven't called `loadHomeTabQuery`. + * `loadHomeTabQuery`: A function that will *fetch* the data for this query from the server (if it isn't already cached), and given an object with the [variables](../variables/) the query expects, in this case `{id: '4'}` (we'll go into more detail about how Relay uses cached data in the [Reusing Cached Data For Render](../../reusing-cached-data/) section). Calling this function will also update the value of `homeTabQueryRef` to an instance of a `PreloadedQuery`. + * Note that the `variables` we pass to this function will be checked by Flow to ensure that you are passing values that match what the GraphQL query expects. + * Also note that we are calling this function in the event handler that causes the `HomeTab` to be rendered. This allows us to start fetching the data for the screen as early as possible, even before the new tab starts rendering. + * In fact, `loadQuery` will throw an error if it is called during React's render phase! +* Note that `useQueryLoader` will automatically dispose of all queries that have been loaded when the component unmounts. Disposing of a query means that Relay will no longer hold on to the data for that particular instance of the query in its cache (we'll cover the lifetime of query data in [Reusing Cached Data For Render](../../reusing-cached-data/) section). Additionally, if the request for the query is still in flight when disposal occurs, it will be canceled. +* Our `AppTabs` component renders the `HomeTab` component from the previous example, and passes it the corresponding query reference. Note that this parent component owns the lifetime of the data for that query, meaning that when it unmounts, it will of dispose of that query, as mentioned above. +* Finally, make sure you're providing a Relay environment using a [Relay Environment Provider](../environment/) at the root of your app before trying to use `useQueryLoader`. + + +Sometimes, you want to start a fetch outside of the context of a parent component, for example to fetch the data required for the initial load of the application. For these cases, you can use the `loadQuery` API directly, without using `useQueryLoader`: + +```js +import type {HomeTabQuery as HomeTabQueryType} from 'HomeTabQuery.graphql'; + +const HomeTabQuery = require('HomeTabQuery.graphql') +const {loadQuery} = require('react-relay'); + + +const environment = createEnvironment(...); + +// At some point during app initialization +const initialQueryRef = loadQuery( + environment, + HomeTabQuery, + {id: '4'}, +); + +// ... + +// E.g. passing the initialQueryRef to the root component +render() +``` + +* In this example, we are calling the `loadQuery` function directly to obtain a `PreloadedQuery` instance that we can later pass to a component that uses `usePreloadedQuery`. +* In this case, we would expect the root `AppTabs` component to manage the lifetime of the query reference, and dispose of it at the appropriate time, if at all. +* We've left the details of "app initialization" vague in this example, since that will vary from application to application. The important thing to note here is that we should obtain a query reference before we start rendering the root component. In fact, `loadQuery` will throw an error if it is called during React's render phase! + + +### Render as you Fetch + +The examples above illustrate how to separate fetching the data from rendering it, in order to start the fetch as early as possible (as opposed to waiting until the component is rendered to start the fetch), and allow us to show content to our users a lot sooner. It also helps prevent waterfalling round trips, and gives us more control and predictability over when the fetch occurs, whereas if we fetch during render, it becomes harder to determine when the fetch will (or should) occur. This fits nicely with the [*"render-as-you-fetch"*](https://reactjs.org/docs/concurrent-mode-suspense.html#approach-3-render-as-you-fetch-using-suspense) pattern with [React Suspense](../loading-states/). + +This is the preferred pattern for fetching data with Relay, and it applies in several circumstances, such as the initial load of an application, during subsequent navigations, or generally when using UI elements which are initially hidden and later revealed upon an interaction (such as menus, popovers, dialogs, etc), and which also require fetching additional data. + + + + +## Lazily Fetching Queries during Render + +Another alternative for fetching a query is to lazily fetch the query when the component is rendered. However, as we've mentioned previously, the preferred pattern is to start fetching queries ahead of rendering. If lazy fetching is used without caution, it can trigger nested or waterfalling round trips, and can degrade performance. + +To fetch a query lazily, you can use the `useLazyLoadQuery` Hook: + +```js +const React = require('React'); +const {graphql, useLazyLoadQuery} = require('react-relay'); + +function App() { + const data = useLazyLoadQuery( + graphql` + query AppQuery($id: ID!) { + user(id: $id) { + name + } + } + `, + {id: '4'}, + ); + + return ( +

{data.user?.name}

+ ); +} +``` +Lets see what's going on here: + +* `useLazyLoadQuery` takes a graphql query and some variables for that query, and returns the data that was fetched for that query. The variables are an object containing the values for the [variables](../variables/) referenced inside the GraphQL query. +* Similarly to [fragments](../fragments/), the component is automatically subscribed to updates to the query data: if the data for this query is updated anywhere in the app, the component will automatically re-render with the latest updated data. +* `useLazyLoadQuery` additionally takes a Flow type parameter, which corresponds to the Flow type for the query, in this case AppQuery. + * Remember that Relay automatically generates Flow types for any declared queries, which you can import and use with `useLazyLoadQuery`. These types are available in the generated files with the following name format: `.graphql.js`. + * Note that the `variables` will be checked by Flow to ensure that you are passing values that match what the GraphQL query expects. + * Note that the data is already properly Flow-typed without requiring an explicit annotation, and is based on the types from the GraphQL schema. For example, the type of `data` above would be: `{ user: ?{ name: ?string } }`. +* By default, when the component renders, Relay will *fetch* the data for this query (if it isn't already cached), and return it as a the result of the `useLazyLoadQuery` call. We'll go into more detail about how to show loading states in the [Loading States with Suspense](../loading-states/) section, and how Relay uses cached data in the [Reusing Cached Data For Rendering](../../reusing-cached-data/) section. +* Note that if you re-render your component and pass *different query variables* than the ones originally used, it will cause the query to be fetched again with the new variables, and potentially re-render with different data. +* Finally, make sure you're providing a Relay environment using a [Relay Environment Provider](../../../api-reference/relay-environment-provider/) at the root of your app before trying to render a query. + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/rendering/variables.md b/website/versioned_docs/version-v15.0.0/guided-tour/rendering/variables.md new file mode 100644 index 0000000000000..39154f6c9607a --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/rendering/variables.md @@ -0,0 +1,233 @@ +--- +id: variables +title: Variables +slug: /guided-tour/rendering/variables/ +description: Relay guide to query variables +keywords: +- query +- variables +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +You may have noticed that the query declarations in our examples above contain references to an `$id` symbol inside the GraphQL code: these are [GraphQL Variables](https://graphql.org/learn/queries/#variables). + +GraphQL variables are a construct that allows referencing dynamic values inside a GraphQL query. When fetching a query from the server, we also need to provide as input the actual set of values to use for the variables declared inside the query: + +```graphql +query UserQuery($id: ID!) { + # The value of $id is used as input to the user() call: + user(id: $id) { + id + name + } +} +``` + +In the above, `ID!` is the type of the `$id` variable. That is, it is a required ID. + +When sending a network request to fetch the query above, we need to provide both the query, and the variables to be used for this particular execution of the query. For example: + +```graphql +# Query: +query UserQuery($id: ID!) { + # ... +} + +# Variables: +{"id": 4} +``` + + + + +Fetching the above query and variables from the server would produce the following response, which can also be visualized in [GraphiQL](https://fburl.com/graphiql/kiuar058): + + + + + +Fetching the above query and variables from the server would produce the following response: + + + +```json +{ + "data": { + "user": { + "id": "4", + "name": "Mark Zuckerberg" + } + } +} +``` + + +* * * + +Fragments can also reference variables that have been declared by a query: + +```graphql +fragment UserFragment on User { + name + profile_picture(scale: $scale) { + uri + } +} + + +query ViewerQuery($scale: Float!) { + viewer { + actor { + ...UserFragment + } + } +} +``` + +* Even though the fragment above doesn't *declare* the `$scale` variable directly, it can still reference it. Doing so makes it so any query that includes this fragment, either directly or transitively, *must* declare the variable and its type, otherwise an error will be produced. +* In other words, *query variables are available globally by any fragment that is a descendant of the query*. +* A fragment which references a global variable can only be included (directly or transitively) in a query which defines that global variable. + + +In Relay, fragment declarations inside components can also reference query variables: + +```js +function UserComponent(props: Props) { + const data = useFragment( + graphql` + fragment UserComponent_user on User { + name + profile_picture(scale: $scale) { + uri + } + } + `, + props.user, + ); + + return (...); +} +``` + +* The above fragment could be included by multiple queries, and rendered by different components, which means that any query that ends up rendering/including the above fragment *must* declare the `$scale` variable. +* If any query that happens to include this fragment *doesn't* declare the `$scale` variable, an error will be produced by the Relay Compiler at build time, ensuring that an incorrect query never gets sent to the server (sending a query with missing variable declarations will also produce an error in the server). + + + +## @arguments and @argumentDefinitions + +Relay also provides a way to declare variables that are scoped locally to a fragment using the `@arguments` and `@argumentDefinitions` directives. Fragments that use local variables are easy to customize and reuse, since they do not depend on the value of global (query-level) variables. + +```js +/** + * Declare a fragment that accepts arguments with @argumentDefinitions + */ + +function TaskView(props) { + const data = useFragment( + graphql` + fragment TaskView_task on Task + @argumentDefinitions(showDetailedResults: {type: "Boolean!"}) { + name + is_completed + ... @include(if: $showDetailedResults) { + description + } + } + `, + props.task, + ); +} +``` + +```js +/** + * Include fragment using @arguments + */ + +function TaskList(props) { + const data = usePreloadedQuery( + graphql` + query TaskListQuery { + todays_tasks { + ...TaskView_task @arguments(showDetailedResults: true) + } + tomorrows_tasks { + ...TaskView_task @arguments(showDetailedResults: false) + } + } + `, + props.queryRef, + ); +} +``` + +* Locally-scoped variables also make it easier to reuse a fragment from another query. + * A query definition must list all variables that are used by any nested fragments, including in recursively-nested fragments. + * Since a fragment can potentially be accessible from many queries, modifying a fragment that uses global variables can require changes to many query definitions. + * This can also lead to awkward situations, like having multiple versions of the "same" variable, such as `$showDetailedResults` and `$showDetails`. + + * Since fragments with only locally-scoped variables do not use global variables, they do not suffer from this issue. + +* Note that when passing `@arguments` to a fragment, we can pass a literal value or pass another variable. The variable can be a global query variable, a local variable declared via `@argumentDefinitions` or a literal (e.g. `42.0`). +* When we actually fetch `TaskView_task` as part of a query, the `showDetailedResults` value will depend on the argument that was provided by the parent of `TaskView_task`: + +Fragments that expect arguments can also declare default values, making the arguments optional: + +```js +/** + * Declare a fragment that accepts arguments with default values + */ + +function TaskView(props) { + const data = useFragment( + graphql` + fragment TaskView_task on Task + @argumentDefinitions(showDetailedResults: {type: "Boolean!", defaultValue: true}) { + name + is_completed + ... @include(if: $showDetailedResults) { + description + } + } + `, + props.task, + ); +} +``` + +```js +function TaskList(props) { + const data = usePreloadedQuery( + graphql` + query TaskListQuery { + todays_tasks { + ...TaskView_task + } + tomorrows_tasks { + ...TaskView_task @arguments(showDetailedResults: false) + } + } + `, + props.queryRef, + ); +} +``` + +* Not passing the argument to `TaskView_task` makes it use the default value for its locally declared `$showDetailedResult` variable. + + + +## Accessing GraphQL Variables At Runtime + + +If you want to access the variables that were set at the query root, the recommended approach is to pass the variables down the component tree in your application, using props, or your own application-specific context. + +Relay currently does not expose the resolved variables (i.e. after applying argument definitions) for a specific fragment, and you should very rarely need to do so. + + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/availability-of-data.md b/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/availability-of-data.md new file mode 100644 index 0000000000000..81ef071f315ea --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/availability-of-data.md @@ -0,0 +1,18 @@ +--- +id: availability-of-data +title: Availability of Data +slug: /guided-tour/reusing-cached-data/availability-of-data/ +description: Relay guide to the availability of data +keywords: +- availability +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +The behavior of the fetch policies described in the [previous section](../fetch-policies/) will depend on the availability of the data in the Relay store at the moment we attempt to evaluate a query. + +There are two factors that determine the availability of data: the [presence of data](../presence-of-data/) and [staleness of data](../staleness-of-data/). + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/fetch-policies.md b/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/fetch-policies.md new file mode 100644 index 0000000000000..cf5a31a4c22f6 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/fetch-policies.md @@ -0,0 +1,56 @@ +--- +id: fetch-policies +title: Fetch Policies +slug: /guided-tour/reusing-cached-data/fetch-policies/ +description: Relay guide to fetch policies +keywords: +- fetch policy +- network-only +- store-only +- store-and-network +- store-or-network +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +The first step to reusing locally cached data is to pass a `fetchPolicy` to the `loadQuery` function, which can be provided by `useQueryLoader` (see the [Fetching Queries section](../../rendering/queries/)): + +```js +const React = require('React'); +const {graphql} = require('react-relay'); + +function AppTabs() { + const [ + queryRef, + loadQuery, + ] = useQueryLoader(HomeTabQuery); + + const onSelectHomeTab = () => { + loadQuery({id: '4'}, {fetchPolicy: 'store-or-network'}); + } + + // ... +} +``` + +The provided `fetchPolicy` will determine: + +* *whether* the query should be fulfilled from the local cache, and +* *whether* a network request should be made to fetch the query from the server, depending on the [availability of the data for that query in the store](../availability-of-data/). + + +By default, Relay will try to read the query from the local cache; if any piece of data for that query is [missing](../presence-of-data/) or [stale](../staleness-of-data/), it will fetch the entire query from the network. This default `fetchPolicy` is called "*store-or-network".* + +Specifically, `fetchPolicy` can be any of the following options: ** + +* "store-or-network": *(default)* *will* reuse locally cached data, and will *only* send a network request if any data for the query is [missing](../presence-of-data/) or [stale](../staleness-of-data/). If the query is fully cached, a network request will *not* be made. +* "store-and-network": *will* reuse locally cached data and will *always* send a network request, regardless of whether any data was [missing](../presence-of-data/) or [stale](../staleness-of-data/) in the store. +* "network-only": *will* *not* reuse locally cached data, and will *always* send a network request to fetch the query, ignoring any data that might be locally cached and whether it's [missing](../presence-of-data/) or [stale](../staleness-of-data/). +* "store-only": *will* *only* reuse locally cached data, and will *never* send a network request to fetch the query. In this case, the responsibility of fetching the query falls to the caller, but this policy could also be used to read and operate on data that is entirely [local](../../updating-data/local-data-updates/). + + +Note that the `refetch` function discussed in the [Fetching and Rendering Different Data](../../refetching/) section also takes a `fetchPolicy`. + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/filling-in-missing-data.md b/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/filling-in-missing-data.md new file mode 100644 index 0000000000000..ee183a527fe5e --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/filling-in-missing-data.md @@ -0,0 +1,102 @@ +--- +id: filling-in-missing-data +title: Filling in Missing Data (Missing Field Handlers) +slug: /guided-tour/reusing-cached-data/filling-in-missing-data/ +description: Relay guide to filling in missing data +keywords: +- missing field handler +- missing data +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +import FbMissingFieldHandlers from './fb/FbMissingFieldHandlers.md'; + +In the previous section we covered how to reuse data that is fully or partially cached, however there are cases in which Relay can't automatically tell that it can reuse some of the data it already has from other queries in order to fulfill a specific query. Specifically, Relay knows how to reuse data that is cached for a query that has been fetched before; that is, if you fetch the exact same query twice, Relay will know that it has the data cached for that query the second time it tries to evaluate it. + +However, when using different queries, there might still be cases where different queries point to the same data, which we'd want to be able to reuse. For example, imagine the following two queries: + +```js +// Query 1 + +query UserQuery { + user(id: 4) { + name + } +} +``` + +```js +// Query 2 + +query NodeQuery { + node(id: 4) { + ... on User { + name + } + } +} +``` + + +These two queries are different, but reference the exact same data. Ideally, if one of the queries was already cached in the store, we should be able to reuse that data when rendering the other query. However, Relay doesn't have this knowledge by default, so we need to configure it to encode the knowledge that a `node(id: 4)` *"is the same as"* `user(id: 4)`. + +To do so, we can provide `missingFieldHandlers` to the `RelayEnvironment` which specifies this knowledge. + + + +```js +const {ROOT_TYPE, Environment} = require('relay-runtime'); + +const missingFieldHandlers = [ + { + handle(field, record, argValues): ?string { + // Make sure to add a handler for the node field + if ( + record != null && + record.getType() === ROOT_TYPE && + field.name === 'node' && + argValues.hasOwnProperty('id') + ) { + return argValues.id + } + if ( + record != null && + record.getType() === ROOT_TYPE && + field.name === 'user' && + argValues.hasOwnProperty('id') + ) { + // If field is user(id: $id), look up the record by the value of $id + return argValues.id; + } + if ( + record != null && + record.getType() === ROOT_TYPE && + field.name === 'story' && + argValues.hasOwnProperty('story_id') + ) { + // If field is story(story_id: $story_id), look up the record by the + // value of $story_id. + return argValues.story_id; + } + return undefined; + }, + kind: 'linked', + }, +]; + +const environment = new Environment({/*...*/, missingFieldHandlers}); +``` + +* `missingFieldHandlers` is an array of *handlers*. Each handler must specify a `handle` function, and the kind of missing fields it knows how to handle. The 2 main types of fields that you'd want to handle are: + * *'scalar'*: This represents a field that contains a scalar value, for example a number or a string. + * *'linked'*: This represents a field that references another object, i.e. not a scalar. +* The `handle` function takes the field that is missing, the record that field belongs to, and any arguments that were passed to the field in the current execution of the query. + * When handling a *'scalar'* field, the handle function should return a scalar value, in order to use as the value for a missing field + * When handling a *'linked'* field*,* the handle function should return an *ID*, referencing another object in the store that should be use in place of the missing field. ** +* As Relay attempts to fulfill a query from the local cache, whenever it detects any missing data, it will run any of the provided missing field handlers that match the field type before definitively declaring that the data is missing. + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/introduction.md b/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/introduction.md new file mode 100644 index 0000000000000..c4ea2384f6dbf --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/introduction.md @@ -0,0 +1,22 @@ +--- +id: introduction +title: Reusing Cached Data +slug: /guided-tour/reusing-cached-data/ +description: Relay guide to reusing cached data +keywords: +- reusing +- cached +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +While an app is in use, Relay will accumulate and cache *(for some time)* the data for the multiple queries that have been fetched throughout usage of our app. Often times, we'll want to be able to reuse and immediately render this data that is locally cached instead of waiting for a network request when fulfilling a query; this is what we'll cover in this section. + +Some examples of when this might be useful are: + +* Navigating between tabs in an app, where each app renders a query. If a tab has already been visited, re-visiting the tab should render it instantly, without having to wait for a network request to fetch the data that we've already fetched before. +* Navigating to a post that was previously rendered on a feed. If the post has already been rendered on a feed, navigating to the post's permalink page should render the post immediately, since all of the data for the post should already be cached. + * Even if rendering the post in the permalink page requires more data than rendering the post on a feed, we'd still like to reuse and immediately render as much of the post's data that we already have available locally, without blocking render for the entire post if only a small bit of data is missing. + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/presence-of-data.md b/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/presence-of-data.md new file mode 100644 index 0000000000000..b057115fbfede --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/presence-of-data.md @@ -0,0 +1,93 @@ +--- +id: presence-of-data +title: Presence of Data +slug: /guided-tour/reusing-cached-data/presence-of-data/ +description: Relay guide to the presence of data +keywords: +- presence +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbGarbageCollection from './fb/FbGarbageCollection.md'; + + +An important thing to keep in mind when attempting to reuse data that is cached in the Relay store is to understand the lifetime of that data; that is, if it is present in the store, and for how long it will be. + +Data in the Relay store for a given query will generally be present after the query has been fetched for the first time, as long as that query is being rendered on the screen. If we've never fetched data for a specific query, then it will be missing from the store. + +However, even after we've fetched data for different queries, we can't keep all of the data that we've fetched indefinitely in memory, since over time it would grow to be too large and too stale. In order to mitigate this, Relay runs a process called *Garbage Collection*, in order to delete data that we're no longer using. + +## Garbage Collection in Relay + +Specifically, Relay runs garbage collection on the local in-memory store by deleting any data that is no longer being referenced by any component in the app. + +However, this can be at odds with reusing cached data; if the data is deleted too soon, before we try to reuse it again later, that will prevent us from reusing that data to render a screen without having to wait on a network request. To address this, this section will cover what you need to do in order to ensure that the data you want to reuse is kept cached for as long as you need it. + + +:::note +NOTE: Usually, you shouldn't need to worry about configuring garbage collection and data retention, as this should be configured by the app infrastructure at the RelayEnvironment level; however, we will cover it here for reference. +::: + + + + + +## Query Retention + +Retaining a query indicates to Relay that the data for that query and variables shouldn't be deleted (i.e. garbage collected). Multiple callers might retain a single query, and as long as there is at least one caller retaining a query, it won't be deleted from the store. + +By default, any query components using `useQueryLoader` / `usePreloadedQuery` or our other APIs will retain the query for as long as they are mounted. After they unmount, they will release the query, which means that the query might be deleted at any point in the future after that occurs. + +If you need to retain a specific query outside of the components lifecycle, you can use the [`retain`](../../accessing-data-without-react/retaining-queries/) operation: + +```js +// Retain query; this will prevent the data for this query and +// variables from being garbage collected by Relay +const disposable = environment.retain(queryDescriptor); + +// Disposing of the disposable will release the data for this query +// and variables, meaning that it can be deleted at any moment +// by Relay's garbage collection if it hasn't been retained elsewhere +disposable.dispose(); +``` + +* As mentioned, this will allow you to retain the query even after a query component has unmounted, allowing other components, or future instances of the same component, to reuse the retained data. + + +## Controlling Relay's Garbage Collection Policy + +There are currently 2 options you can provide to your Relay Store in to control the behavior of garbage collection: + +### GC Scheduler + +The `gcScheduler` is a function you can provide to the Relay Store which will determine when a GC execution should be scheduled to run: + +```js +// Sample scheduler function +// Accepts a callback and schedules it to run at some future time. +function gcScheduler(run: () => void) { + resolveImmediate(run); +} + +const store = new Store(source, {gcScheduler}); +``` + +* By default, if a `gcScheduler` option is not provided, Relay will schedule garbage collection using the `resolveImmediate` function. +* You can provide a scheduler function to make GC scheduling less aggressive than the default, for example based on time or [scheduler](https://github.com/facebook/react/tree/main/packages/scheduler) priorities, or any other heuristic. By convention, implementations should not execute the callback immediately. + + +### GC Release Buffer Size + +The Relay Store internally holds a release buffer to keep a specific (configurable) number of queries temporarily retained even *after* they have been released by their original owner (which will happen by default when a component rendering that query unmounts). This makes it possible (and more likely) to be able to reuse data when navigating back to a page, tab or piece of content that has been visited before. + +In order to configure the size of the release buffer, we can specify the `gcReleaseBufferSize` option to the Relay Store: + +```js +const store = new Store(source, {gcReleaseBufferSize: 10}); +``` + +* Note that having a buffer size of 0 is equivalent to not having the release buffer, which means that queries will be immediately released and collected. +* By default, environments have a release buffer size of 10. + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/rendering-partially-cached-data.md b/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/rendering-partially-cached-data.md new file mode 100644 index 0000000000000..6028410b79f6b --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/rendering-partially-cached-data.md @@ -0,0 +1,175 @@ +--- +id: rendering-partially-cached-data +title: Rendering Partially Cached Data +slug: /guided-tour/reusing-cached-data/rendering-partially-cached-data/ +description: Relay guide to rendering partially cached data +keywords: +- partially cached data +- renderPolicy +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbProfilePhotoHeaderExample from './fb/FbProfileHeaderExample.md'; +import FbSuspensePlaceholder from '../../fb/FbSuspensePlaceholder.md'; + +When rendering cached data in Relay, it is possible to perform partial rendering. We define *"partial rendering"* as the ability to immediately render a query that is partially cached. That is, parts of the query might be missing, but parts of the query might already be cached. In these cases, we want to be able to immediately render the parts of the query that are cached, without waiting on the full query to be fetched. + +This can be useful in scenarios where we want to render a screen or a page as fast as possible, and we know that some of the data for that page is already cached so we can skip a loading state. For example, take the profile page: it is very likely that the user's name has already been cached at some point during usage of the app, so when visiting a profile page, if the name of the user is cached, we'd like to render immediately, even if the rest of the data for the profile page isn't available yet. + + +### Fragments as boundaries for partial rendering + +To do this, we rely on the ability of fragment components to *suspend* (see the [Loading States with Suspense](../../rendering/loading-states/) section). A fragment component will suspend if any of the data it declared locally is missing during render, and is currently being fetched. Specifically, it will suspend until the data it requires is fetched, that is, until the query to which it belongs (its *parent query*) is fetched. + +Let's explain what this means with an example. Say we have the following fragment component: + +```js +/** + * UsernameComponent.react.js + * + * Fragment Component + */ + +import type {UsernameComponent_user$key} from 'UsernameComponent_user.graphql'; + +const React = require('React'); +const {graphql, useFragment} = require('react-relay'); + +type Props = { + user: UsernameComponent_user$key, +}; + +function UsernameComponent(props: Props) { + const user = useFragment( + graphql` + fragment UsernameComponent_user on User { + username + } + `, + props.user, + ); + return (...); +} + +module.exports = UsernameComponent; +``` + + +And we have the following query component, which queries for some data, and also includes the fragment above: + +```javascript +/** + * AppTabs.react.js + * + * Query Loader Component + */ + + // .... + + const onSelectHomeTab = () => { + loadHomeTabQuery({id: '4'}, {fetchPolicy: 'store-or-network'}); + } + + // ... + +/** + * HomeTab.react.js + * + * Query Component + */ + +const React = require('React'); +const {graphql, usePreloadedQuery} = require('react-relay'); + +const UsernameComponent = require('./UsernameComponent.react'); + +function HomeTab(props: Props) { + const data = usePreloadedQuery( + graphql` + query HomeTabQuery($id: ID!) { + user(id: $id) { + name + ...UsernameComponent_user + } + } + `, + props.queryRef, + ); + + return ( + <> +

{data.user?.name}

+ + + ); +} +``` + + +Say that when this `HomeTab` component is rendered, we've already previously fetched *(_only_)* the `name` for the `User` with `{id: 4}`, and it is locally cached in the Relay store associated with our current Relay environment. + +If we attempt to render the query with a `fetchPolicy` that allows reusing locally cached data (`'store-or-network'`, or `'store-and-network'`), the following will occur: + +* The query will check if any of its locally-required data is missing. In this case, *it isn't*. Specifically, the query is only directly selecting the `name` field, and that field *is* available in the store. + * Relay considers data to be missing only if it is declared locally and missing. In other words, data that is selected within fragment spreads does not affect whether the outer query or fragment is determined to have missing data. +* Given that the query doesn't have any data missing, it will render, and then attempt to render the child `UsernameComponent`. +* When the `UsernameComponent` attempts to render the `UsernameComponent_user` fragment, Relay will notice that some of the data required to render is missing; specifically, the `username` is missing. At this point, since `UsernameComponent` has missing data, it will suspend rendering until the network request completes. Note that regardless of which `fetchPolicy` you choose, a network request will always be started if any piece of data for the full query, i.e. including fragments, is missing. + + +At this point, when `UsernameComponent` suspends due to the missing `username`, ideally we should still be able to render the `User`'s `name` immediately, since it's locally cached. However, since we aren't using a `Suspense` component to catch the fragment's suspension, the suspension will bubble up and the entire `App` component will be suspended. + +In order to achieve the desired effect of rendering the `name` when it's available even if the `username` is missing, we just need to wrap the `UsernameComponent` in `Suspense,` to *allow* the other parts of `App` to continue rendering: + +```js +/** + * HomeTab.react.js + * + * Query Component + */ + +const React = require('React'); +const {Suspense} = require('React'); +const {graphql, usePreloadedQuery} = require('react-relay'); + +const UsernameComponent = require('./UsernameComponent.react'); + + +function HomeTab() { + const data = usePreloadedQuery( + graphql` + query AppQuery($id: ID!) { + user(id: $id) { + name + ...UsernameComponent_user + } + } + `, + props.queryRef, + ); + + return ( + <> +

{data.user?.name}

+ + {/* + Wrap the UserComponent in Suspense to allow other parts of the + App to be rendered even if the username is missing. + */} + }> + + + + ); +} +``` + + + +The process that we described above works the same way for nested fragments (i.e. fragments included within other fragments). This means that if the data required to render a fragment is locally cached, the fragment component will be able to render, regardless of whether data for any of its child or descendant fragments is missing. If data for a child fragment is missing, we can wrap it in a `Suspense` component to allow other fragments and parts of the app to continue rendering. + +As mentioned in our motivating example, this is desirable because it can allows us to skip loading states entirely. More specifically, the ability to render data that is partially available allows us to render intermediate UI states that more closely resemble the final rendered state. + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/staleness-of-data.md b/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/staleness-of-data.md new file mode 100644 index 0000000000000..639a696917510 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/reusing-cached-data/staleness-of-data.md @@ -0,0 +1,116 @@ +--- +id: staleness-of-data +title: Staleness of Data +slug: /guided-tour/reusing-cached-data/staleness-of-data/ +description: Relay guide to the staleness of data +keywords: +- staleness +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbPushViews from './fb/FbPushViews.md'; + +Assuming our data is [present in the store](../presence-of-data/), we still need to consider the staleness of such data. + +By default, Relay will not consider data in the store to be stale (regardless of how long it has been in the cache), unless it's explicitly marked as stale using our data invalidation APIs or if it is older than the query cache expiration time. + +Marking data as stale is useful for cases when we explicitly know that some data is no longer fresh (for example after executing a [Mutation](../../updating-data/graphql-mutations/)). + +Relay exposes the following APIs to mark data as stale within an update to the store: + +## Globally Invalidating the Relay Store + +The coarsest type of data invalidation we can perform is invalidating the *whole* store, meaning that all currently cached data will be considered stale after invalidation. + +To invalidate the store, we can call `invalidateStore()` within an [updater](../../updating-data/graphql-mutations/) function: + +```js +function updater(store) { + store.invalidateStore(); +} +``` + +* Calling `invalidateStore()` will cause *all* data that was written to the store before invalidation occurred to be considered stale, and will require any query to be refetched again the next time it's evaluated. +* Note that an updater function can be specified as part of a [mutation](../../updating-data/graphql-mutations/), [subscription](../../updating-data/graphql-subscriptions/) or just a [local store update](../../updating-data/local-data-updates/). + +## Invalidating Specific Data In The Store + +We can also be more granular about which data we invalidate and only invalidate *specific records* in the store; compared to global invalidation, only queries that reference the invalidated records will be considered stale after invalidation. + +To invalidate a record, we can call `invalidateRecord()` within an [updater](../../updating-data/graphql-mutations/) function: + +```js +function updater(store) { + const user = store.get(''); + if (user != null) { + user.invalidateRecord(); + } +} +``` + +* Calling `invalidateRecord()` on the `user` record will mark *that* specific user in the store as stale. That means that any query that is cached and references that invalidated user will now be considered stale, and will require to be refetched again the next time it's evaluated. +* Note that an updater function can be specified as part of a [mutation](../../updating-data/graphql-mutations/), [subscription](../../updating-data/graphql-subscriptions/) or just a [local store update](../../updating-data/local-data-updates/). + +## Subscribing to Data Invalidation + +Just marking the store or records as stale will cause queries to be refetched they next time they are evaluated; so for example, the next time you navigate back to a page that renders a stale query, the query will be refetched even if the data is cached, since the query references stale data. + +This is useful for a lot of use cases, but there are some times when we'd like to immediately refetch some data upon invalidation, for example: + +* When invalidating data that is already visible in the current page. Since no navigation is occurring, we won't re-evaluate the queries for the current page, so even if some data is stale, it won't be immediately refetched and we will be showing stale data. +* When invalidating data that is rendered on a previous view that was never unmounted; since the view wasn't unmounted, if we navigate back, the queries for that view won't be re-evaluated, meaning that even if some is stale, it won't be refetched and we will be showing stale data. + + + +To support these use cases, Relay exposes the `useSubscribeToInvalidationState` hook: + +```js +function ProfilePage(props) { + // Example of querying data for the current page for a given user + const data = usePreloadedQuery( + graphql`...`, + props.preloadedQuery, + ) + + // Here we subscribe to changes in invalidation state for the given user ID. + // Whenever the user with that ID is marked as stale, the provided callback will + // be executed + useSubscribeToInvalidationState([props.userID], () => { + // Here we can do things like: + // - re-evaluate the query by passing a new preloadedQuery to usePreloadedQuery. + // - imperatively refetch any data + // - render a loading spinner or gray out the page to indicate that refetch + // is happening. + }) + + return (...); +} +``` + +* `useSubscribeToInvalidationState` takes an array of ids, and a callback. Whenever any of the records for those ids are marked as stale, the provided callback will fire. +* Inside the callback, we can react accordingly and refetch and/or update any current views that are rendering stale data. As an example, we could re-execute the top-level `usePreloadedQuery` by keeping the `preloadedQuery` in state and setting a new one here; since that query is stale at that point, the query will be refetched even if the data is cached in the store. + + +## Query Cache Expiration Time + +In addition, the query cache expiration time affects whether certain operations (i.e. a query and variables) can be fulfilled with data that is already present in the store, i.e. whether the data for a query has become stale. + + A stale query is one which can be fulfilled with records from the store, and + +* the time since it was last fetched is greater than the query cache expiration time, or +* which contains at least one record that was invalidated. + +This staleness check occurs when a new request is made (e.g. in a call to `loadQuery`). Components which reference stale data will continue to be able to render that data; however, any additional requests which would be fulfilled using stale data will go to the network. + +In order to configure the query cache expiration time, we can specify the `queryCacheExpirationTime` option to the Relay Store: + +```js +const store = new Store(source, {queryCacheExpirationTime: 5 * 60 * 1000 }); +``` + +If the query cache expiration time is not provided, staleness checks only look at whether the referenced records have been invalidated. + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/client-only-data.md b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/client-only-data.md new file mode 100644 index 0000000000000..1c7f1c81195c4 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/client-only-data.md @@ -0,0 +1,115 @@ +--- +id: client-only-data +title: Client-only data +slug: /guided-tour/updating-data/client-only-data/ +description: Relay guide to client-only data +keywords: +- client-only +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbClientOnlyDataDir from './fb/FbClientOnlyDataDir.md'; + +## Client-Only Data (Client Schema Extensions) + +Relay provides the ability to extend the GraphQL schema *on the client* (i.e. in the browser), via client schema extensions, in order to model data that only needs to be created, read and updated on the client. This can be useful to add small pieces of information to data that is fetched from the server, or to entirely model client-specific state to be stored and managed by Relay. + +Client schema extensions allows you to modify existing types on the schema (e.g. by adding new fields to a type), or to create entirely new types that only exist in the client. + + +### Extending Existing Types + + + +In order to extend an existing type, add a `.graphql` file to the appropriate schema extension directory (depending on the repo): + + + + + +In order to extend an existing type, add a `.graphql` file to your appropriate source (`--src`) directory: + + + + +```graphql +extend type Comment { + is_new_comment: Boolean +} +``` + + + + + + + + + +* In this example, we're using the `extend` keyword to extend an existing type, and we're adding a new field, `is_new_comment` to the existing `Comment` type, which we will be able to [read](#reading-client-only-data) in our components, and [update](#updating-client-only-data) when necessary using normal Relay APIs; you might imagine that we might use this field to render a different visual treatment for a comment if it's new, and we might set it when creating a new comment. + + + +### Adding New Types + +You can define types using the same regular GraphQL syntax, by defining it inside a `.graphql` file in `html/js/relay/schema/`: + + +```graphql +# You can define more than one type in a single file +enum FetchStatus { + FETCHED + PENDING + ERRORED +} + + +type FetchState { + # You can reuse client types to define other types + status: FetchStatus + + # You can also reference regular server types + started_by: User! +} + +extend type Item { + # You can extend server types with client-only types + fetch_state: FetchState +} +``` + +* In this contrived example, we're defining 2 new client-only types, and `enum` and a regular `type`. Note that they can reference themselves as normal, and reference regular server defined types. Also note that we can extend server types and add fields that are of our client-only types. +* As mentioned previously, we will be able to [read](#reading-client-only-data) and [update](#updating-client-only-data) this data normally via Relay APIs. + + + +### Reading Client-Only Data + +We can read client-only data be selecting it inside [fragments](../../rendering/fragments/) or [queries](../../rendering/queries/) as normal: + +```js +const data = *useFragment*( + graphql` + fragment CommentComponent_comment on Comment { + + # We can select client-only fields as we would any other field + is_new_comment + + body { + text + } + } + `, + props.user, +); +``` + + + +### Updating Client-Only Data + +In order to update client-only data, you can do so regularly inside [mutation](../graphql-mutations/) or [subscription](../graphql-subscriptions/) updaters, or by using our primitives for doing [local updates](../local-data-updates/) to the store. + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/graphql-mutations.md b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/graphql-mutations.md new file mode 100644 index 0000000000000..b4b3536f608cf --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/graphql-mutations.md @@ -0,0 +1,378 @@ +--- +id: graphql-mutations +title: GraphQL mutations +slug: /guided-tour/updating-data/graphql-mutations/ +description: Relay guide to GraphQL mutations +keywords: +- mutation +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +In GraphQL, data on the server is updated using [GraphQL mutations](https://graphql.org/learn/queries/#mutations). Mutations are read-write server operations, which both modify the data on the backend and allow you to query the modified data in the same request. + +## Writing Mutations + +A GraphQL mutation looks very similar to a query, except that it uses the `mutation` keyword: + +```graphql +mutation FeedbackLikeMutation($input: FeedbackLikeData!) { + feedback_like(data: $input) { + feedback { + id + viewer_does_like + like_count + } + } +} +``` + +* The mutation above modifies the server data to "like" the specified `Feedback` object. +* `feedback_like` is a *mutation root field* (or just *mutation field*) which updates data on the backend. + + + +:::info +You can view mutation root fields in the GraphQL Schema Explorer by opening VSCode @ FB and executing the command "Relay: Open GraphQL Schema Explorer". Then, in the "Schema Explorer Tab", click on "Mutation". + +You can click on the various mutation fields to see their parameters, descriptions and exposed fields. +::: + + + +* A mutation is handled in two separate steps: first, the update is processed on the server, and then the query is executed. This ensures that you only see data that has already been updated as part of your mutation response. + +:::note +Note that queries are processed in the same way. Outer selections are calculated before inner selections. It is simply a matter of convention that top-level mutation fields have side-effects, while other fields tend not to. +::: + +* The mutation field (in this case, `feedback_like`) returns a specific GraphQL type which exposes the data for which we can query in the mutation response. + + + +* [It is a best practice](https://fb.workplace.com/groups/644933736023601/?multi_permalinks=823422684841371) to include the `viewer` object and all updated Ents as part of the mutation response. + + + +* In this case, we're querying for the *updated* feedback object, including the updated `like_count` and the updated value for `viewer_does_like`, indicating whether the current viewer likes the feedback object. + + + +* Check out the [Hack documentation on writing mutations](https://www.internalfb.com/intern/wiki/Graphql-for-hack-developers/mutation-root-fields/) for information on how to add a mutation field to your backend code. + + + +An example of a successful response for the above mutation could look like this: + +```json +{ + "feedback_like": { + "feedback": { + "id": "feedback-id", + "viewer_does_like": true, + "like_count": 1, + } + } +} +``` + +In Relay, we can declare GraphQL mutations using the `graphql` tag too: + +```js +const {graphql} = require('react-relay'); + +const feedbackLikeMutation = graphql` + mutation FeedbackLikeMutation($input: FeedbackLikeData!) { + feedback_like(data: $input) { + feedback { + id + viewer_does_like + like_count + } + } + } +`; +``` + +* Note that mutations can also reference GraphQL [variables](../../rendering/variables/) in the same way queries or fragments do. + +## Using `useMutation` to execute a mutation + +In order to execute a mutation against the server in Relay, we can use the `commitMutation` and [useMutation](../../../api-reference/use-mutation) APIs. Let's take a look at an example using the `useMutation` API: + +```js +import type {FeedbackLikeData, LikeButtonMutation} from 'LikeButtonMutation.graphql'; + +const {useMutation, graphql} = require('react-relay'); + +function LikeButton({ + feedbackId: string, +}) { + const [commitMutation, isMutationInFlight] = useMutation( + graphql` + mutation LikeButtonMutation($input: FeedbackLikeData!) { + feedback_like(data: $input) { + feedback { + viewer_does_like + like_count + } + } + } + ` + ); + + return +} +``` + +Let's distill what's happening here. + +* `useMutation` takes a graphql literal containing a mutation as its only argument. +* It returns a tuple of items: + * a callback (which we call `commitMutation`) which accepts a `UseMutationConfig`, and + * a boolean indicating whether a mutation is in flight. +* In addition, `useMutation` accepts a Flow type parameter. As with queries, the Flow type of the mutation is exported from the file that the Relay compiler generates. + * If this type is provided, the `UseMutationConfig` becomes statically typed as well. **It is a best practice to always provide this type.** +* Now, when `commitMutation` is called with the mutation variables, Relay will make a network request that executes the `feedback_like` field on the server. In this example, this would find the feedback specified by the variables, and record on the backend that the user liked that piece of feedback. +* Once that field is executed, the backend will select the updated Feedback object and select the `viewer_does_like` and `like_count` fields off of it. + * Since the `Feedback` type contains an `id` field, the Relay compiler will automatically add a selection for the `id` field. +* When the mutation response is received, Relay will find a feedback object in the store with a matching `id` and update it with the newly received `viewer_does_like` and `like_count` values. +* If these values have changed as a result, any components which selected these fields off of the feedback object will be re-rendered. Or, to put it colloquially, any component which depends on the updated data will re-render. + +:::note +The name of the type of the parameter `FeedbackLikeData` is derived from the name of the top-level mutation field, i.e. from `feedback_like`. This type is also exported from the generated `graphql.js` file. +::: + +## Refreshing components in response to mutations + +In the previous example, we manually selected `viewer_does_like` and `like_count`. Components that select these fields will be re-rendered, should the value of those fields change. + +However, it is generally better to spread fragments that correspond to components that we want to refresh in response to the mutation. This is because the data selected by components can change. + +Requiring developers to know about all mutations that might affect their components' data (and keeping them up-to-date) is an example of the kind of global reasoning that Relay wants to avoid requiring. + +For example, we might rewrite the mutation as follows: + +```graphql +mutation FeedbackLikeMutation($input: FeedbackLikeData!) { + feedback_like(data: $input) { + feedback { + ...FeedbackDisplay_feedback + ...FeedbackDetail_feedback + } + } +} +``` + +If this mutation is executed, then whatever fields were selected by the `FeedbackDisplay` and `FeedbackDetail` components will be refetched, and those components will remain in a consistent state. + +:::note +Spreading fragments is generally preferable to refetching the data after a mutation has completed, since the updated data can be fetched in a single round trip. +::: + +## Executing a callback when the mutation completes or errors + +We may want to update some state in response to the mutation succeeding or failing. For example, we might want to alert the user if the mutation failed. The `UseMutationConfig` object can include the following fields to handle such cases: + +* `onCompleted`, a callback that is executed when the mutation completes. It is passed the mutation response (stopping at fragment spread boundaries). + * The value passed to `onCompleted` is the the mutation fragment, as read out from the store, **after** updaters and declarative mutation directives are applied. This means that data from within unmasked fragments will not be read, and records that were deleted (e.g. by `@deleteRecord`) may also be null. +* `onError`, a callback that is executed when the mutation errors. It is passed the error that occurred. + +## Declarative mutation directives + +### Manipulating connections in response to mutations + +Relay makes it easy to respond to mutations by adding items to or removing items from connections (i.e. lists). For example, you might want to append a newly created user to a given connection. For more, see [Using declarative directives](../../list-data/updating-connections/#using-declarative-directives). + +### Deleting items in response to mutations + +In addition, you might want to delete an item from the store in response to a mutation. In order to do this, you would add the `@deleteRecord` directive to the deleted ID. For example: + +```graphql +mutation DeletePostMutation($input: DeletePostData!) { + delete_post(data: $input) { + deleted_post { + id @deleteRecord + } + } +} +``` + +## Imperatively modifying local data + +At times, the updates you wish to perform are more complex than just updating the values of fields and cannot be handled by the declarative mutation directives. For such situations, the `UseMutationConfig` accepts an `updater` function which gives you full control over how to update the store. + +This is discussed in more detail in the section on [Imperatively modifying store data](../imperatively-modifying-store-data/). + +## Optimistic updates + +Oftentimes, we don't want to wait for the server to respond before we respond to the user interaction. For example, if a user clicks the "Like" button, we would like to instantly show the affected comment, post, etc. has been liked by the user. + +More generally, in these cases, we want to immediately update the data in our store optimistically, i.e. under the assumption that the mutation will complete successfully. If the mutation ends up not succeeding, we would like to roll back that optimistic update. + +### Optimistic response + +In order to enable this, the `UseMutationConfig` can include an `optimisticResponse` field. + +For this field to be Flow-typed, the call to `useMutation` must be passed a Flow type parameter **and** the mutation must be decorated with a `@raw_response_type` directive. + +In the previous example, we might provide the following optimistic response: + +```js +{ + feedback_like: { + feedback: { + // Even though the id field is not explicitly selected, the + // compiler selected it for us + id: feedbackId, + viewer_does_like: true, + }, + }, +} +``` + +Now, when we call `commitMutation`, this data will be immediately written into the store. The item in the store with the matching id will be updated with a new value of `viewer_does_like`. Any components which have selected this field will be re-rendered. + +When the mutation succeeds or errors, the optimistic response will be rolled back. + +Updating the `like_count` field takes a bit more work. In order to update it, we should also read the **current like count** in the component. + +```js +import type {FeedbackLikeData, LikeButtonMutation} from 'LikeButtonMutation.graphql'; +import type {LikeButton_feedback$fragmentType} from 'LikeButton_feedback.graphql'; + +const {useMutation, graphql} = require('react-relay'); + +function LikeButton({ + feedback: LikeButton_feedback$fragmentType, +}) { + const data = useFragment( + graphql` + fragment LikeButton_feedback on Feedback { + __id + viewer_does_like @required(action: THROW) + like_count @required(action: THROW) + } + `, + feedback + ); + + const [commitMutation, isMutationInFlight] = useMutation( + graphql` + mutation LikeButtonMutation($input: FeedbackLikeData!) + @raw_response_type { + feedback_like(data: $input) { + feedback { + viewer_does_like + like_count + } + } + } + ` + ); + + const changeToLikeCount = data.viewer_does_like ? -1 : 1; + return +} +``` + +:::caution + +You should be careful, and consider using [optimistic updaters](../imperatively-modifying-store-data/#example) if the value of the optimistic response depends on the value of the store and if there can be multiple optimistic responses affecting that store value. + +For example, if **two** optimistic responses each increase the like count by one, and the **first** optimistic updater is rolled back, the second optimistic update will still be applied, and the like count in the store will remain increased by two. + +::: + +:::caution + +Optimistic responses contain **many pitfalls!** + +* An optimistic response can contain the data for the full query response, i.e. including the content of fragment spreads. This means that if a developer selects more fields in components whose fragments are spread in an optimistic response, these components may have inconsistent or partial data during an optimistic update. +* Because the type of the optimistic update includes the contents of all recursively nested fragments, it can be very large. Adding `@raw_response_type` to certain mutations can degrade the performance of the Relay compiler. + +::: + +### Optimistic updaters + +Optimistic responses aren't enough for every case. For example, we may want to optimistically update data that we aren't selecting in the mutation. Or, we may want to add or remove items from a connection (and the declarative mutation directives are insufficient for our use case.) + +For situations like these, the `UseMutationConfig` can contain an `optimisticUpdater` field, which allows developers to imperatively and optimistically update the data in the store. This is discussed in more detail in the section on [Imperatively updating store data](../imperatively-modifying-store-data/). + +## Order of execution of updater functions + +In general, execution of the `updater` and optimistic updates will occur in the following order: + +* If an `optimisticResponse` is provided, that data will be written into the store. +* If an `optimisticUpdater` is provided, Relay will execute it and update the store accordingly. +* If an `optimisticResponse` was provided, the declarative mutation directives present in the mutation will be processed on the optimistic response. +* If the mutation request succeeds: + * Any optimistic update that was applied will be rolled back. + * Relay will write the server response to the store. + * If an `updater` was provided, Relay will execute it and update the store accordingly. The server payload will be available to the `updater` as a root field in the store. + * Relay will process any declarative mutation directives using the server response. + * The `onCompleted` callback will be called. +* If the mutation request fails: + * Any optimistic update was applied will be rolled back. + * The `onError` callback will be called. + +## Invalidating data during a mutation + +The recommended approach when executing a mutation is to request *all* the relevant data that was affected by the mutation back from the server (as part of the mutation body), so that our local Relay store is consistent with the state of the server. + +However, often times it can be unfeasible to know and specify all the possible data the possible data that would be affected for mutations that have large rippling effects (e.g. imagine "blocking a user" or "leaving a group"). + +For these types of mutations, it's often more straightforward to explicitly mark some data as stale (or the whole store), so that Relay knows to refetch it the next time it is rendered. In order to do so, you can use the data invalidation APIs documented in our [Staleness of Data section](../../reusing-cached-data/staleness-of-data/). + + + +## Handling errors + +GraphQL errors can largely be differentiated as: + +1. Operation (query/mutation/subscription) level errors, and +2. Field level errors + +### Surfacing mutation level errors + +If you're surfacing an error in the mutation (eg the server rejects the entire mutation because it's invalid), as long as the error returned is considered a [`CRITICAL`](https://www.internalfb.com/code/www/[b5a08782893a]/flib/graphql/experimental/core/error/GraphQL2ErrorSeverity.php?lines=11) error, you can make use of the `onError` callback from `useMutation` to handle that error in whatever way you see fit for your use case. + +If you control the server resolver, the question you should ask is whether or not throwing a CRITICAL error is the correct behavior for the client. Note though that throwing a CRITICAL error means that Relay will no longer process the interaction, which may not always be what you want if you can still partially update your UI. For example, it's possible that the mutation errored, but still wrote some data to the database, in which case you might still want Relay to process the updated fields. + +In the non-CRITICAL case the mutation may have failed, but some data was successfully returned in the case of partial data and/or the error response if encoded in the schema. Relay will still process this data, update its store, as well as components relying on that data. That is not true for the case where you've returned a CRITICAL error. + +### Surfacing field level errors +Field level errors from the server are generally recommended to be at the [`ERROR`](https://www.internalfb.com/code/www/[9120ab8aa8a5]/flib/graphql/experimental/core/error/GraphQL2ErrorSeverity.php?lines=17) level, because your UI should still be able to process the other fields that were successfully returned. If you want to explicitly handle the field level error, then we still recommend [modeling that](../../rendering/error-states/#accessing-errors-in-graphql-responses) in your schema. + + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/graphql-subscriptions.md b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/graphql-subscriptions.md new file mode 100644 index 0000000000000..8e36ffc74c9ff --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/graphql-subscriptions.md @@ -0,0 +1,279 @@ +--- +id: graphql-subscriptions +title: GraphQL subscriptions +slug: /guided-tour/updating-data/graphql-subscriptions/ +description: Relay guide to GraphQL subscriptions +keywords: +- subscription +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + + + +[GraphQL subscriptions](https://our.internmc.facebook.com/intern/wiki/GraphQL_Subscriptions/) are a mechanism to allow clients to query for data in response to a stream of server-side events. + + + + + +GraphQL subscriptions are a mechanism to allow clients to query for data in response to a stream of server-side events. + + + +A GraphQL subscription looks very similar to a query, except that it uses the `subscription` keyword: + +```graphql +subscription FeedbackLikeSubscription($input: FeedbackLikeSubscribeData!) { + feedback_like_subscribe(data: $input) { + feedback { + like_count + } + } +} +``` + +* Establishing a subscription using this GraphQL snippet will cause the application to be notified whenever an event is emitted from the `feedback_like_subscribe` stream. +* `feedback_like_subscribe` is a *subscription root field* (or just *subscription field*), which sets up the subscription on the backend. + + + +:::info +You can view subscription root fields in the GraphQL Schema Explorer by opening VSCode @ FB and executing the command "Relay: Open GraphQL Schema Explorer". Then, in the "Schema Explorer Tab", click on "Subscription". + +You can click on the various mutation fields to see their parameters, descriptions and exposed fields. +::: + + + +* Like mutations, a subscription is handled in two separate steps. First, a server-side event occurs. Then, the query is executed. + +:::note +Note that the event stream can be completely arbitrary, and can have no relation to the fields selected. In other words, there is no guarantee that the values selected in a subscription will have changed from notification to notification. +::: + +* `feedback_like_subscribe` returns a specific GraphQL type which exposes the data we can query in response to the server-side event. In this case, we're querying for the Feedback object and its updated `like_count`. This allows us to show the like count in real time. + +An example of a subscription payload received by the client could look like this: + +```json +{ + "feedback_like_subscribe": { + "feedback": { + "id": "feedback-id", + "like_count": 321, + } + } +} +``` + +In Relay, we can declare GraphQL subscriptions using the `graphql` tag too: + +```js +const {graphql} = require('react-relay'); + +const feedbackLikeSubscription = graphql` + subscription FeedbackLikeSubscription($input: FeedbackLikeSubscribeData!) { + feedback_like_subscribe(data: $input) { + feedback { + like_count + } + } + } +`; +``` + +* Note that subscriptions can also reference GraphQL [variables](../../rendering/variables/) in the same way queries or fragments do. + +## Using `useSubscription` to create a subscription + +In order to create a subscription in Relay, we can use the `useSubscription` and `requestSubscription` APIs. Let's take a look at an example using the `useSubscription` API: + +```js +import type {Environment} from 'react-relay'; +import type {FeedbackLikeSubscribeData} from 'FeedbackLikeSubscription.graphql'; + +const {graphql, useSubscription} = require('react-relay'); +const {useMemo} = require('React'); + +function useFeedbackSubscription( + input: FeedbackLikeSubscribeData, +) { + const config = useMemo(() => ({ + subscription: graphql` + subscription FeedbackLikeSubscription( + $input: FeedbackLikeSubscribeData! + ) { + feedback_like_subscribe(data: $input) { + feedback { + like_count + } + } + } + `, + variables: {input}, + }), [input]); + + return useSubscription(config); +} +``` + +Let's distill what's happening here. + +* `useSubscription` takes a `GraphlQLSubscriptionConfig` object, which includes the following fields: + * `subscription`: the GraphQL literal containing the subscription, and + * `variables`: the variables with which to establish the subscription. +* In addition, `useSubscription` accepts a Flow type parameter. As with queries, the Flow type of the subscription is exported from the file that the Relay compiler generates. + * If this type is provided, the `GraphQLSubscriptionConfig` becomes statically typed as well. **It is a best practice to always provide this type.** +* Now, when the `useFeedbackSubscription` hook commits, Relay will establish a subscription. + * Unlike with APIs like `useLazyLoadQuery`, Relay will **not** attempt to establish this subscription during the render phase. +* Once it is established, whenever an event occurs, the backend will select the updated Feedback object and select the `like_count` fields off of it. + * Since the `Feedback` type contains an `id` field, the Relay compiler will automatically add a selection for the `id` field. +* When the subscription response is received, Relay will find a feedback object in the store with a matching `id` and update it with the newly received `like_count` value. +* If these values have changed as a result, any components which selected these fields off of the feedback object will be re-rendered. Or, to put it colloquially, any component which depends on the updated data will re-render. + +:::note +The name of the type of the parameter `FeedbackLikeSubscribeData` is derived from the name of the top-level mutation field, i.e. from `feedback_like_subscribe`. This type is also exported from the generated `graphql.js` file. +::: + +:::caution + +The `GraphQLSubscriptionConfig` object passed to `useSubscription` should be memoized! Otherwise, `useSubscription` will dispose the subscription and re-establish it with every render! + +::: + +## Refreshing components in response to subscription events + +In the previous example, we manually selected `like_count`. Components that select this field will be re-rendered, should we receive an updated value. + +However, it is generally better to spread fragments that correspond to the components that we want to refresh in response to the mutation. This is because the data selected by components can change. + +Requiring developers to know about all subscriptions that might fetch their components data (and keeping them up-to-date) is an example of the kind of global reasoning that Relay wants to avoid requiring. + +For example, we might rewrite the subscription as follows: + +```graphql +subscription FeedbackLikeSubscription($input: FeedbackLikeSubscribeData!) { + feedback_like_subscribe(data: $input) { + feedback { + ...FeedbackDisplay_feedback + ...FeedbackDetail_feedback + } + } +} +``` + +Now, whenever a event in the `feedback_like_subscribe` event stream occurred, the data selected by the `FeedbackDisplay` and `FeedbackDetail` components will be refetched, and those components will remain in a consistent state. + +:::note +Spreading fragments is generally preferable to refetching the data in response to subscription events, since the updated data can be fetched in a single round trip. +::: + +## Executing a callback when the subscription fires, errors or is closed by the server + +In addition to writing updated data to the Relay store, we may want to execute a callback whenever a subscription payload is received. We may want to execute a callback if an error is received or if an error is received or if the server ends the subscription. The `GraphQLSubscriptionConfig` can include the following fields to handle such cases: + +* `onNext`, a callback that is executed when a subscription payload is received. It is passed the subscription response (stopping at fragment spread boundaries). +* `onError`, a callback that is executed when the subscription errors. It is passed the error that occured. +* `onCompleted`, a callback that is executed when the server ends the subscription. + +## Declarative mutation directives + +[Declarative mutation directives](../../list-data/updating-connections/#using-declarative-directives) and [`@deleteRecord`](../graphql-mutations/#deleting-items-in-response-to-mutations) work in subscriptions, too. + +### Manipulating connections in response to subscription events + +Relay makes it easy to respond to subscription events by adding items to or removing items from connections (i.e. lists). For example, you might want to append a newly created user to a given connection. For more, see [Using declarative directives](../../list-data/updating-connections/#using-declarative-directives). + +### Deleting items in response to mutations + +In addition, you might want to delete an item from the store in response to a mutation. In order to do this, you would add the `@deleteRecord` directive to the deleted ID. For example: + +```graphql +subscription DeletePostSubscription($input: DeletePostSubscribeData!) { + delete_post_subscribe(data: $input) { + deleted_post { + id @deleteRecord + } + } +} +``` + +## Imperatively modifying local data + +At times, the updates you wish to perform are more complex than just updating the values of fields and cannot be handled by the declarative mutation directives. For such situations, the `GraphQLSubscriptionConfig` accepts an `updater` function which gives you full control over how to update the store. + +This is discussed in more detail in the section on [Imperatively updating store data](../imperatively-modifying-store-data/). + +## Configuring the Network Layer + + + +You will need to Configure your [Network layer](../../../guides/network-layer) to handle subscriptions. + +Usually GraphQL subscriptions are communicated over [WebSockets](https://developer.mozilla.org/en-US/docs/Web/API/WebSockets_API), here's an example using [graphql-ws](https://github.com/enisdenjo/graphql-ws): + +```javascript +import { + ... + Network, + Observable +} from 'relay-runtime'; +import { createClient } from 'graphql-ws'; + +const wsClient = createClient({ + url:'ws://localhost:3000', +}); + +const subscribe = (operation, variables) => { + return Observable.create((sink) => { + return wsClient.subscribe( + { + operationName: operation.name, + query: operation.text, + variables, + }, + sink, + ); + }); +} + +const network = Network.create(fetchQuery, subscribe); +``` + +Alternatively, the legacy [subscriptions-transport-ws](https://github.com/apollographql/subscriptions-transport-ws) library can be used too: + +```javascript +import { + ... + Network, + Observable +} from 'relay-runtime'; +import { SubscriptionClient } from 'subscriptions-transport-ws'; + +const subscriptionClient = new SubscriptionClient('ws://localhost:3000', { + reconnect: true, +}); + +const subscribe = (request, variables) => { + const subscribeObservable = subscriptionClient.request({ + query: request.text, + operationName: request.name, + variables, + }); + // Important: Convert subscriptions-transport-ws observable type to Relay's + return Observable.from(subscribeObservable); +}; + +const network = Network.create(fetchQuery, subscribe); +``` + + + + +At Facebook, the Network Layer has already been configured to handle GraphQL Subscriptions. For more details on writing subscriptions at Facebook, check out this [guide](../../../guides/writing-subscriptions/). For a guide on setting up subscriptions on the server side, check out this [wiki](https://our.internmc.facebook.com/intern/wiki/GraphQL_Subscriptions/creating-a-new-subscription/). + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/imperatively-modifying-linked-fields.md b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/imperatively-modifying-linked-fields.md new file mode 100644 index 0000000000000..8ca9e7643d250 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/imperatively-modifying-linked-fields.md @@ -0,0 +1,521 @@ +--- +id: imperatively-modifying-linked-fields +title: Imperatively modifying linked fields +slug: /guided-tour/updating-data/imperatively-modifying-linked-fields/ +description: Using readUpdatableQuery to update linked fields in the store +keywords: +- record source +- store +- updater +- typesafe updaters +- readUpdatableQuery +- readUpdatableFragment +- updatable +- assignable +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + + + +:::caution + +Because in TypeScript, [getters and setters cannot have different types](https://github.com/microsoft/TypeScript/issues/43662), and the generated types of getters and setters is not the same, `readUpdatableQuery` is currently unusable with TypeScript. `readUpdatableFragment` is usable, as long as the updatable fragment contains only scalar fields. + +::: + + + +:::note +See also [using readUpdatableQuery to update scalar fields in the store](../imperatively-modifying-store-data). +::: + + +The examples in the [previous section](../imperatively-modifying-store-data/) showed how to use the `readUpdatableQuery` API to update scalar fields like `is_new_comment` and `is_selected`. + +The examples did **not** cover how to assign to linked fields. Let's start with an example of a component which allows the user of the application to update the Viewer's `best_friend` field. + +## Example: setting the viewer's best friend + +In order to assign a viewer's best friend, that viewer must have such a field. It may be defined by the server schema, or it may be defined locally in a schema extension as follows: + +```graphql +extend type Viewer { + best_friend: User, +} +``` + +Next, let's define a fragment and give it the `@assignable` directive, making it an **assignable fragment**. Assignable fragments can only contain a single field, `__typename`. This fragment will be on the `User` type, which is the type of the `best_friend` field. + +```js +// AssignBestFriendButton.react.js +graphql` + fragment AssignBestFriendButton_assignable_user on User @assignable { + __typename + } +`; +``` + +The fragment must be spread at both the source (i.e. on the viewer's new best friend), and at the destination (within the viewer's `best_friend` field in the updatable query). + +Lets define a component with a fragment where we spread `AssignableBestFriendButton_assignable_user`. This user will be the viewer's new best friend. + +```js +// AssignBestFriendButton.react.js +import type {AssignBestFriendButton_user$key} from 'AssignBestFriendButton_user.graphql'; + +const {useFragment} = require('react-relay'); + +export default function AssignBestFriendButton({ + someTypeRef: AssignBestFriendButton_user$key, +}) { + const data = useFragment(graphql` + fragment AssignBestFriendButton_someType on SomeType { + user { + name + ...AssignableBestFriendButton_assignable_user + } + } + `, someTypeRef); + + // We will replace this stub with the real thing below. + const onClick = () => {}; + + return (); +} +``` + +That's great! Now, we have a component that renders a button. Let's fill out that button's click handler by using the `commitLocalUpdate` and `readUpdatableQuery` APIs to assign `viewer.best_friend`. + +* In order to make it valid to assign `data.user` to `best_friend`, we must **also** spread `AssignBestFriendButton_assignable_user` under the `best_friend` field in the viewer in the updatable query or fragment. + +```js +import type {RecordSourceSelectorProxy} from 'react-relay'; + +const {commitLocalUpdate, useRelayEnvironment} = require('react-relay'); + +// ... + +const environment = useRelayEnvironment(); +const onClick = () => { + const updatableData = commitLocalUpdate( + environment, + (store: RecordSourceSelectorProxy) => { + const {updatableData} = store.readUpdatableQuery( + graphql` + query AssignBestFriendButtonUpdatableQuery + @updatable { + viewer { + best_friend { + ...AssignableBestFriendButton_assignable_user + } + } + } + `, + {} + ); + + if (data.user != null && updatableData.viewer != null) { + updatableData.viewer.best_friend = data.user; + } + } + ); +}; +``` + +### Putting it all together + +The full example is as follows: + +```graphql +extend type Viewer { + best_friend: User, +} +``` + +```js +// AssignBestFriendButton.react.js +import type {AssignBestFriendButton_user$key} from 'AssignBestFriendButton_user.graphql'; +import type {RecordSourceSelectorProxy} from 'react-relay'; + +const {commitLocalUpdate, useFragment, useRelayEnvironment} = require('react-relay'); + +graphql` + fragment AssignBestFriendButton_assignable_user on User @assignable { + __typename + } +`; + +export default function AssignBestFriendButton({ + someTypeRef: AssignBestFriendButton_someType$key, +}) { + const data = useFragment(graphql` + fragment AssignBestFriendButton_someType on SomeType { + user { + name + ...AssignableBestFriendButton_assignable_user + } + } + `, someTypeRef); + + const environment = useRelayEnvironment(); + const onClick = () => { + const updatableData = commitLocalUpdate( + environment, + (store: RecordSourceSelectorProxy) => { + const {updatableData} = store.readUpdatableQuery( + graphql` + query AssignBestFriendButtonUpdatableQuery + @updatable { + viewer { + best_friend { + ...AssignableBestFriendButton_assignable_user + } + } + } + `, + {} + ); + + if (data.user != null && updatableData.viewer != null) { + updatableData.viewer.best_friend = data.user; + } + } + ); + }; + + return (); +} +``` + +Let's recap what is happening here. + +* We are writing a component in which clicking a button results in a user is being assigned to `viewer.best_friend`. After this button is clicked, all components which were previously reading the `viewer.best_friend` field will be re-rendered, if necessary. +* The source of the assignment is a user where an **assignable fragment** is spread. +* The target of the assignment is accessed using the `commitLocalUpdate` and `readUpdatableQuery` APIs. +* The query passed to `readUpdatableQuery` must include the `@updatable` directive. +* The target field must have that same **assignable fragment** spread. +* We are checking whether `data.user` is not null before assigning. This isn't strictly necessary. However, if we assign `updatableData.viewer.best_friend = null`, we will be nulling out the linked field in the store! This is (probably) not what you want. + +## Pitfalls + +* Note that there are no guarantees about what fields are present on the assigned user. This means that any consumes an updated field has no guarantee that the required fields were fetched and are present on the assigned object. + + + +:::note + +It is technically feasible to add fields to the assignable fragment, which would have the effect of guaranteeing that certain fields are present in the assigned object. + +If this is a need, please reach out to [Relay Support](https://fb.workplace.com/groups/relay.support). + +::: + + + +## Example: Assigning to a list + +Let's modify the previous example to append the user to a list of best friends. In this example, the following principle is relevant: + +> Every assigned linked field (i.e. the right hand side of the assignment) **must originate in a read-only fragment, query, mutation or subscription**. + +This means that `updatableData.foo = updatableData.foo` is invalid. For the same reason, `updatableData.viewer.best_friends = updatableData.viewer.best_friends.concat([newBestFriend])` is invalid. To work around this restriction, we must select the existing best friends from a read-only fragment, and perform the assignment as follows: `viewer.best_friends = existing_list.concat([newBestFriend])`. + +Consider the following full example: + +```graphql +extend type Viewer { + # We are now defined a "best_friends" field instead of a "best_friend" field + best_friends: [User!], +} +``` + +```js +// AssignBestFriendButton.react.js +import type {AssignBestFriendButton_user$key} from 'AssignBestFriendButton_user.graphql'; +import type {AssignBestFriendButton_viewer$key} from 'AssignBestFriendButton_viewer'; + +import type {RecordSourceSelectorProxy} from 'react-relay'; + +const {commitLocalUpdate, useFragment, useRelayEnvironment} = require('react-relay'); + +graphql` + fragment AssignBestFriendButton_assignable_user on User @assignable { + __typename + } +`; + +export default function AssignBestFriendButton({ + someTypeRef: AssignBestFriendButton_someType$key, + viewerFragmentRef: AssignBestFriendButton_viewer$key, +}) { + const data = useFragment(graphql` + fragment AssignBestFriendButton_someType on SomeType { + user { + name + ...AssignableBestFriendButton_assignable_user + } + } + `, someTypeRef); + + const viewer = useFragment(graphql` + fragment AssignBestFriendButton_viewer on Viewer { + best_friends { + # since viewer.best_friends appears in the right hand side of the assignment + # (i.e. updatableData.viewer.best_friends = viewer.best_friends.concat(...)), + # the best_friends field must contain the correct assignable fragment spread + ...AssignableBestFriendButton_assignable_user + } + } + `, viewerRef); + + const environment = useRelayEnvironment(); + const onClick = () => { + commitLocalUpdate( + environment, + (store: RecordSourceSelectorProxy) => { + const {updatableData} = store.readUpdatableQuery( + graphql` + query AssignBestFriendButtonUpdatableQuery + @updatable { + viewer { + best_friends { + ...AssignableBestFriendButton_assignable_user + } + } + } + `, + {} + ); + + if (data.user != null && updatableData.viewer != null && viewer.best_friends != null) { + updatableData.viewer.best_friends = [ + ...viewer.best_friends, + data.user, + ]; + } + } + ); + }; + + return (); +} +``` + +## Example: assigning from an abstract field to a concrete field + +If you are assigning from an abstract field, e.g. a `Node` to a `User` (which implements `Node`), you must use an inline fragment to refine the `Node` type to `User`. Consider this snippet: + +```js +const data = useFragment(graphql` + fragment AssignBestFriendButton_someType on Query { + node(id: "4") { + ... on User { + __typename + ...AssignableBestFriendButton_assignable_user + } + } + } +`, queryRef); + +const environment = useRelayEnvironment(); +const onClick = () => { + const updatableData = commitLocalUpdate( + environment, + (store: RecordSourceSelectorProxy) => { + const {updatableData} = store.readUpdatableQuery( + graphql` + query AssignBestFriendButtonUpdatableQuery + @updatable { + viewer { + best_friend { + ...AssignableBestFriendButton_assignable_user + } + } + } + `, + {} + ); + + if (data.node != null && data.node.__typename === "User" && updatableData.viewer != null) { + updatableData.viewer.best_friend = data.node; + } + } + ); +}; +``` + +In this snippet, we do two things: + +* We use an inline fragment to refine the `Node` type to the `User` type. Inside of this refinement, we spread the assignable fragment. +* We check that `data.node.__typename === "User"`. This indicates to Flow that within that if block, `data.node` is known to be a user, and therefore `updatableData.viewer.best_friend = data.node` can typecheck. + +## Example: assigning to an interface when the source is guaranteed to implement that interface + +You may wish to assign to a destination field that has an interface type (in this example, `Actor`). If the source field is guaranteed to implement that interface, then assignment is straightforward. + +For example, the source might have the same interface type or have a concrete type (`User`, in this example) that implements that interface. + +Consider the following snippet: + +```js +graphql` + fragment Foo_actor on Actor @assignable { + __typename + } +`; + +const data = useFragment(graphql` + fragment Foo_query on Query { + user { + ...Foo_actor + } + viewer { + actor { + ...Foo_actor + } + } + } +`, queryRef); + +const environment = useRelayEnvironment(); +const onClick = () => { + commitLocalUpdate(environment, store => { + const {updatableData} = store.readUpdatableQuery( + graphql` + query FooUpdatableQuery @updatable { + viewer { + actor { + ...Foo_actor + } + } + } + `, + {} + ); + + // Assigning the user works as you would expect + if (updatableData.viewer != null && data.user != null) { + updatableData.viewer = data.user; + } + + // As does assigning the viewer + if (updatableData.viewer != null && data.viewer?.actor != null) { + updatableData.viewer = data.viewer.actor; + } + }); +}; +``` + +## Example: assigning to an interface when the source is **not** guaranteed to implement that interface + +You may wish to assign to a destination field that has an interface type (in this example, `Actor`). If the source type (e.g. `Node`) is **not** known to implement that interface, then an extra step is involved: validation. + + + +:::note + +With additional changes to Relay's type generation, this can be made simpler. Please reach out to [Robert Balicki](https://www.internalfb.com/profile/view/1238951) if this is a pain point for you. + +::: + + + +In order to understand why, some background is necessary. The flow type for the setter for an interface field might look like: + +```js +set actor(value: ?{ + +__id: string, + +__isFoo_actor: string, + +$fragmentSpreads: Foo_actor$fragmentType, + ... +}): void, +``` + +The important thing to note is that the setter expects an object with a non-null `__isFoo_actor` field. + +When an assignable fragment with an abstract type is spread in a regular fragment, it results in an `__isFoo_actor: string` selection that is not optional if the type is known to implement the interface, and optional otherwise. + +Since a `Node` is **not** guaranteed to implement `Actor`, when the Relay compiler encounters the selection `node(id: "4") { ...Foo_actor }`, it will emit an optional field (`__isFoo_actor?: string`). Attempting to assign this to `updatableData.viewer.actor` will not typecheck! + +### Introducing validators + +The generated file for every generated artifact includes a named `validator` export. In our example, the function is as follows: + +```js +function validate(value/*: { + +__id: string, + +__isFoo_actor?: string, + +$fragmentSpreads: Foo_actor$fragmentType, + ... +}*/)/*: false | { + +__id: string, + +__isFoo_actor: string, + +$fragmentSpreads: Foo_actor$fragmentType, + ... +}*/ { + return value.__isFoo_actor != null ? (value/*: any*/) : false; +} +``` + +In other words, this function checks for the presence of the `__isFoo_actor` field. If it is found, it returns the same object, but with a flow type that is valid for assignment. If not, it returns false. + +### Example + +Let's put this all together in an example: + +```js +import {validate as validateActor} from 'Foo_actor.graphql'; + +graphql` + fragment Foo_actor on Actor @assignable { + __typename + } +`; + +const data = useFragment(graphql` + fragment Foo_query on Query { + node(id: "4") { + ...Foo_actor + } + } +`, queryRef); + +const environment = useRelayEnvironment(); +const onClick = () => { + commitLocalUpdate(environment, store => { + const {updatableData} = store.readUpdatableQuery( + graphql` + query FooUpdatableQuery @updatable { + viewer { + actor { + ...Foo_actor + } + } + } + `, + {} + ); + + if (updatableData.viewer != null && data.node != null) { + const validActor = validateActor(data.node); + if (validActor !== false) { + updatableData.viewer.actor = validActor; + } + } + }); +}; +``` + +### Can flow be used to infer the presence of this field? + +Unfortunately, if you check for the presence of `__isFoo_actor`, Flow does not infer that (on the type level), the field is not optional. Hence, we need to use validators. + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/imperatively-modifying-store-data-legacy.md b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/imperatively-modifying-store-data-legacy.md new file mode 100644 index 0000000000000..4ebb8049ba31c --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/imperatively-modifying-store-data-legacy.md @@ -0,0 +1,142 @@ +--- +id: imperatively-modifying-store-data-unsafe +title: Imperatively modifying store data (unsafe) +slug: /guided-tour/updating-data/imperatively-modifying-store-data-unsafe/ +description: Imperatively modifying store data +keywords: +- record source +- store +- updater +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +Data in Relay stores can be imperatively modified within updater functions. + +## When to use updaters + +### Complex client updates + +You might provide an updater function if the changes to local data are more complex than what can be achieved by simply writing a network response to the store and cannot be handled by the declarative mutation directives. + +### Client schema extensions + +In addition, since the network response necessarily will not include data for fields defined in client schema extensions, you may wish to use an updater to initialize data defined in client schema extensions. + +### Use of other APIs + +Lastly, there are things you can only achieve using updaters, such as invalidating nodes, deleting nodes, finding all connections at a given field, etc. + +### If multiple optimistic responses modify a given store value + +If two optimistic responses affect a given value, and the first optimistic response is rolled back, the second one will remain applied. + +For example, if two optimistic responses each increase a story's like count by one, and the first optimistic response is rolled back, the second optimistic response remains applied. Since the second optimistic response **not recalculated**, the value of the like count will remain increased by two. + +An optimistic updater, on the other hand, would be re-run in this circumstance. + +## When **not** to use updaters + +### To trigger other side effects + +You should use the `onCompleted` callback to trigger other side effects. + +## The various types of updater functions + +The `useMutation` and `commitMutation` APIs accept configuration objects which can include `optimisticUpdater` and `updater` fields. The `requestSubscription` and `useSubscription` APIs accept configuration objects which can include `updater` fields. + +In addition, there is another API (`commitLocalUpdate`) which also accepts an updater function. It will be discussed in the [Other APIs for modifying local data](../local-data-updates/) section. + +## Optimistic updaters vs updaters + +Mutations can have both optimistic and regular updaters. Optimistic updaters are executed when a mutation is triggered. When that mutation completes or errors, the optimistic update is rolled back. At that point, the mutation response is written to the store and regular updaters are executed. See [order of execution of updater functions](../graphql-mutations/#order-of-execution-of-updater-functions). + +Regular updaters are executed when a mutation completes successfully. + +## Example + +Let's consider an example that provides an updater to `commitMutation`. + +```js +import type {Environment} from 'react-relay'; +import type {CommentCreateData, CreateCommentMutation} from 'CreateCommentMutation.graphql'; + +const {commitMutation, graphql} = require('react-relay'); +const {ConnectionHandler} = require('relay-runtime'); + +function commitCommentCreateMutation( + environment: Environment, + feedbackID: string, + input: CommentCreateData, +) { + return commitMutation(environment, { + mutation: graphql` + mutation CreateCommentMutation($input: CommentCreateData!) { + comment_create(input: $input) { + comment_edge { + cursor + node { + body { + text + } + } + } + } + } + `, + variables: {input}, + updater: (store: RecordSourceSelectorProxy, _response: ?CreateCommentMutation$data) => { + // we are not using _response in this example, but it is + // provided and statically typed. + + const feedbackRecord = store.get(feedbackID); + + // Get connection record + const connectionRecord = ConnectionHandler.getConnection( + feedbackRecord, + 'CommentsComponent_comments_connection', + ); + + // Get the payload returned from the server + const payload = store.getRootField('comment_create'); + + // Get the edge inside the payload + const serverEdge = payload.getLinkedRecord('comment_edge'); + + // Build edge for adding to the connection + const newEdge = ConnectionHandler.buildConnectionEdge( + store, + connectionRecord, + serverEdge, + ); + + // Add edge to the end of the connection + ConnectionHandler.insertEdgeAfter( + connectionRecord, + newEdge, + ); + }, + }); +} + +module.exports = {commit: commitCommentCreateMutation}; +``` + +Let's distill this example: + +* The updater receives a `store` argument, which is an instance of a [`RecordSourceSelectorProxy`](../../../api-reference/store/); this interface allows you to *imperatively* write and read data directly to and from the Relay store. This means that you have full control over how to update the store in response to the mutation response: you can *create entirely new records*, or *update or delete existing ones*. +* The updater receives a second `data` argument, which contains the data selected by the mutation fragment. This can be used to retrieve the payload data without interacting with the *`store`*. The type of this mutation response can be imported from the auto-generated `Mutation.graphql.js` file, and is given the name `MutationName$data`. + * The type of this `data` argument is a nullable version of the `$data` type. + * The `data` arguments contains just the data selected directly by the mutation argument. In other words, if another fragment is spread in the mutation, the data from that fragment will not be available within `data` by default. +* In our specific example, we're adding a new comment to our local store after it has successfully been added on the server. Specifically, we're adding a new item to a connection; for more details on the specifics of how that works, check out our section on [adding and removing items from a connection](../../list-data/updating-connections/). + * There is no need for an updater in this example — it would be a great place to use the `@appendEdge` directive instead! +* Note that the mutation response is a *root field* record that can be read from the `store` using the `store.getRootField` API. In our case, we're reading the `comment_create` root field, which is a root field in the mutation response. +* Note that the `root` field of the mutation is different from the `root` of queries, and `store.getRootField` in the mutation updater can only get the record from the mutation response. To get records from the root that's not in the mutation response, use `store.getRoot().getLinkedRecord` instead. +* Once the updater completes, any local data updates caused by the mutation `updater` will automatically cause components subscribed to the data to be notified of the change and re-render. + +## Learn more + +See the full APIs [here](../../../api-reference/store/). + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/imperatively-modifying-store-data.md b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/imperatively-modifying-store-data.md new file mode 100644 index 0000000000000..ccf71d1928048 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/imperatively-modifying-store-data.md @@ -0,0 +1,275 @@ +--- +id: imperatively-modifying-store-data +title: Imperatively modifying store data +slug: /guided-tour/updating-data/imperatively-modifying-store-data/ +description: Using readUpdatableQuery to update scalar fields in the store +keywords: +- record source +- store +- updater +- typesafe updaters +- readUpdatableQuery +- readUpdatableFragment +- updatable +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +:::note +See also [this guide on updating linked fields in the store](../imperatively-modifying-linked-fields). +::: + +Data in Relay stores can be imperatively modified within updater functions. + +## When to use updaters + +### Complex client updates + +You might provide an updater function if the changes to local data are more complex than what can be achieved by simply writing a network response to the store and cannot be handled by the declarative mutation directives. + +### Client schema extensions + +In addition, since the network response necessarily will not include data for fields defined in client schema extensions, you may wish to use an updater to initialize data defined in client schema extensions. + +### Use of other APIs + +Lastly, there are things you can only achieve using updaters, such as invalidating nodes, deleting nodes, finding all connections at a given field, etc. + +### If multiple optimistic responses modify a given store value + +If two optimistic responses affect a given value, and the first optimistic response is rolled back, the second one will remain applied. + +For example, if two optimistic responses each increase a story's like count by one, and the first optimistic response is rolled back, the second optimistic response remains applied. However, it is **not recalculated**, and the value of the like count will remain increased by two. + +## When **not** to use updaters + +### To trigger other side effects + +You should use the `onCompleted` callback to trigger other side effects. `onCompleted` callbacks are guaranteed to be called once, but updaters and optimistic updaters can be called repeatedly. + +## The various types of updater functions + +The `useMutation` and `commitMutation` APIs accept configuration objects which can include `optimisticUpdater` and `updater` fields. The `requestSubscription` and `useSubscription` APIs accept configuration objects which can include `updater` fields. + +In addition, there is another API (`commitLocalUpdate`) which also accepts an updater function. It will be discussed in the [Other APIs for modifying local data](../local-data-updates/) section. + +## Optimistic updaters vs updaters + +Mutations can have both optimistic and regular updaters. Optimistic updaters are executed when a mutation is triggered. When that mutation completes or errors, the optimistic update is rolled back. + +Regular updaters are executed when a mutation completes successfully. + +## Example + +Let's construct an example in which an `is_new_comment` field (which is defined in a schema extension) is set to `true` on a newly created Feedback object in a mutation updater. + +```graphql +# Feedback.graphql +extend type Feedback { + is_new_comment: Boolean +} +``` + +```js +// CreateFeedback.js +import type {Environment} from 'react-relay'; +import type { + FeedbackCreateData, + CreateFeedbackMutation, + CreateFeedbackMutation$data, +} from 'CreateFeedbackMutation.graphql'; + +const {commitMutation, graphql} = require('react-relay'); +const {ConnectionHandler} = require('relay-runtime'); + +function commitCreateFeedbackMutation( + environment: Environment, + input: FeedbackCreateData, +) { + return commitMutation(environment, { + mutation: graphql` + mutation CreateFeedbackMutation($input: FeedbackCreateData!) { + feedback_create(input: $input) { + feedback { + id + # Step 1: in the mutation response, spread an updatable fragment (defined below). + # This updatable fragment will select the fields that we want to update on this + # particular feedback object. + ...CreateFeedback_updatable_feedback + } + } + } + `, + variables: {input}, + + // Step 2: define an updater + updater: (store: RecordSourceSelectorProxy, response: ?CreateCommentMutation$data) => { + // Step 3: Access and nullcheck the feedback object. + // Note that this could also have been achieved with the @required directive. + const feedbackRef = response?.feedback_create?.feedback; + if (feedbackRef == null) { + return; + } + + // Step 3: call store.readUpdatableFragment + const {updatableData} = store.readUpdatableFragment( + // Step 4: Pass it a fragment literal, where the fragment contains the @updatable directive. + // This fragment selects the fields that you wish to update on the feedback object. + // In step 1, we spread this fragment in the query response. + graphql` + fragment CreateFeedback_updatable_feedback on Feedback @updatable { + is_new_comment + } + `, + // Step 5: Pass the fragment reference. + feedbackRef + ); + + // Step 6: Mutate the updatableData object! + updatableData.is_new_comment = true; + }, + }); +} + +module.exports = {commit: commitCreateFeedbackMutation}; +``` + +Let's distill what's going on here. + +* The `updater` accepts two parameters: a `RecordSourceSelectorProxy` and an optional object that is the result of reading out the mutation response. + * The type of this second argument is a nullable version of the `$data` type that is imported from the generated mutation file. + * The second argument contains just the data selected directly by the mutation argument. In other words, it will not contain any fields selected solely by spread fragments. +* This `updater` is executed after the mutation response has been written to the store. +* In this example updater, we do three things: + * First, we spread an updatable fragment in the mutation response. + * Second, we read out the fields selected by this fragment by calling `readUpdatableFragment`. This returns an updatable proxy object. + * Third, we update fields on this updatable proxy. +* Once this updater completes, the updates that have been recorded are written to the store, and all affected components are re-rendered. + +## Example 2: Updating data in response to user interactions + +Let's consider the common case of updating store data in response to a user interaction. In a click handler, let's a toggle an `is_selected` field. This field is defined on Users in a client schema extension. + +```graphql +# User.graphql +extend type User { + is_selected: Boolean +} +``` + +```js +// UserSelectToggle.react.js +import type {RecordSourceSelectorProxy} from 'react-relay'; +import type {UserSelectToggle_viewer$key} from 'UserSelectToggle_viewer.graphql'; + +const {useRelayEnvironment, commitLocalUpdate} = require('react-relay'); + +function UserSelectToggle({ userId, viewerRef }: { + userId: string, + viewerRef: UserSelectToggle_viewer$key, +}) { + const viewer = useFragment(graphql` + fragment UserSelectToggle_viewer on Viewer { + user(user_id: $user_id) { + id + name + is_selected + ...UserSelectToggle_updatable_user + } + } + `, viewerRef); + + const environment = useRelayEnvironment(); + + return +} +``` + +Let's distill what's going on here. + +* In a click handler, we call `commitLocalUpdate`, which accepts a Relay environment and an updater function. **Unlike in the previous examples, this updater does not accept a second parameter** because there is no associated network payload. +* In this updater function, we access get an updatable proxy object by calling `store.readUpdatableFragment`, and toggle the `is_selected` field. +* Like the previous example in which we called `readUpdatableFragment`, this can be rewritten to use the `readUpdatableQuery` API. + +:::note +This example can be rewritten using the `environment.commitPayload` API, albeit without type safety. +::: + +## Alternative API: `readUpdatableQuery`. + +In the previous examples, we used an updatable fragment to access the record whose fields we want to update. This can also be possible to do with an updatable query. + +If we know the path from the root (i.e. the object whose type is `Query`) to the record we wish to modify, we can use the `readUpdatableQuery` API to achieve this. + +For example, we could set the viewer's `name` field in response to an event as follows: + +```js +// NameUpdater.react.js +function NameUpdater({ queryRef }: { + queryRef: NameUpdater_viewer$key, +}) { + const environment = useRelayEnvironment(); + const data = useFragment( + graphql` + fragment NameUpdater_viewer on Viewer { + name + } + `, + queryRef + ); + const [newName, setNewName] = useState(data?.viewer?.name); + const onSubmit = () => { + commitLocalUpdate(environment, store => { + const {updatableData} = store.readUpdatableQuery( + graphql` + query NameUpdaterUpdateQuery @updatable { + viewer { + name + } + } + `, + {} + ); + const viewer = updatableData.viewer; + if (viewer != null) { + viewer.name = newName; + } + }); + }; + + // etc +} +``` + +* This particular example can be rewritten using `readUpdatableFragment`. However, you may prefer `readUpdatableQuery` for several reasons: + * You do not have ready access to a fragment reference, e.g. if the call to `commitLocalUpdate` is not obviously associated with a component. + * You do not have ready access to a fragment where we select the **parent record** of the record we wish to modify (e.g. the `Query` in this example). Due to a known type hole in Relay, **updatable fragments cannot be spread at the top level.** + * You wish to use variables in the updatatable fragment. Currently, updatable fragments reuse the variables that were passed to the query. This means that you cannot, for example, have an updatable fragment with fragment-local variables and call `readUpdatableFragment` multiple times, each time passing different variables. + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/introduction.md b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/introduction.md new file mode 100644 index 0000000000000..50e732fc8134f --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/introduction.md @@ -0,0 +1,24 @@ +--- +id: introduction +title: Introduction +slug: /guided-tour/updating-data/ +description: Relay guide to updating data +keywords: +- updating +- mutation +- useMutation +- commitMutation +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +In previous sections, the guided tour discussed how to fetch data using GraphQL queries. Though [refetching data](../refetching/) can have the *incidental* effect of modifying data in Relay's local store (if the refetched data has changed), we haven't discussed any ways to *intentionally* modify our locally stored data. + +This section will do just that: it will discuss how to update data on the server and how to update our local data store. + +:::note +The **Relay store** is a cache of GraphQL data, associated with a given Relay environment, that we have encountered during the execution of an application. +::: + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/local-data-updates.md b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/local-data-updates.md new file mode 100644 index 0000000000000..1ef8381e2c673 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/local-data-updates.md @@ -0,0 +1,71 @@ +--- +id: local-data-updates +title: Local Data Updates +slug: /guided-tour/updating-data/local-data-updates/ +description: Other APIs for modifying store data +keywords: +- client-only +- commitLocalUpdate +- commitPayload +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbLocalDataUpdatesFlow from './fb/FbLocalDataUpdatesFlow.md'; + +There are a couple of APIs that Relay provides in order to make purely local updates to the Relay store (i.e. updates not tied to a server operation). + +Note that local data updates can be made both on [client-only data](../client-only-data/), or on regular data that was fetched from the server via an operation. + +## commitLocalUpdate + +To make updates using an [`updater`](../imperatively-modifying-store-data/) function, you can use the `commitLocalUpdate` API: + +```js +import type {Environment} from 'react-relay'; + +const {commitLocalUpdate, graphql} = require('react-relay'); + +function commitCommentCreateLocally( + environment: Environment, + feedbackID: string, +) { + return commitLocalUpdate(environment, store => { + // Imperatively mutate the store here + }); +} + +module.exports = {commit: commitCommentCreateLocally}; +``` + +* `commitLocalUpdate` update simply takes an environment and an updater function. + * `updater` takes a *`store`* argument, which is an instance of a [`RecordSourceSelectorProxy`](../../../api-reference/store/); this interface allows you to *imperatively* write and read data directly to and from the Relay store. This means that you have full control over how to update the store: you can *create entirely new records*, or *update or delete existing ones*. + * Unlike regular and optimistic updaters that are accepted by the mutation and subscription APIs, the updater passed to `commitLocalUpdate` does not accept a second parameter. This is because there is no associated network response. +* Note that any local data updates will automatically cause components subscribed to the data to be notified of the change and re-render. + +## commitPayload + +`commitPayload` takes an `OperationDescriptor` and the payload for the query, and writes it to the Relay Store. The payload will be resolved like a normal server response for a query, and will also resolve Data Driven Dependencies that are passed as `JSResource`, `requireDefer`, etc. + +```js +import type {FooQueryRawResponse} from 'FooQuery.graphql' + +const {createOperationDescriptor} = require('relay-runtime'); + +const operationDescriptor = createOperationDescriptor(FooQuery, { + id: 'an-id', + otherVariable: 'value', +}); + +const payload: FooQueryRawResponse = {...}; + +environment.commitPayload(operation, payload); +``` + +* An `OperationDescriptor` can be created by `createOperationDescriptor`; it takes the query and the query variables. +* The payload can be typed using the Flow type generated by adding the directive `@raw_response_type` to the query. +* Note that any local data updates will automatically cause components subscribed to the data to be notified of the change and re-render. + + + + diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/typesafe-updaters-faq.md b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/typesafe-updaters-faq.md new file mode 100644 index 0000000000000..3199f0e1c018b --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/updating-data/typesafe-updaters-faq.md @@ -0,0 +1,95 @@ +--- +id: typesafe-updaters-faq +title: Typesafe updaters FAQ +slug: /guided-tour/updating-data/typesafe-updaters-faq/ +description: Typesafe updater FAQ +keywords: +- typesafe updaters +- readUpdatableQuery +- readUpdatableFragment +- updater +- updatable +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + + + +:::caution + +Because in TypeScript, [getters and setters cannot have different types](https://github.com/microsoft/TypeScript/issues/43662), and the generated types of getters and setters is not the same, `readUpdatableQuery` is currently unusable with TypeScript. `readUpdatableFragment` is usable, as long as the updatable fragment contains only scalar fields. + +::: + + + +# Typesafe Updaters FAQ + + + +:::note + +Is something missing from this Q&A? Are you confused? Would you like help adopting these APIs? Please, reach out to [Robert Balicki](https://fb.workplace.com/profile.php?id=100042823931887). I am happy to help! + +::: + + + +# General + +## What is typesafe updaters? + +Typesafe updaters is the name given to a project to provide a typesafe and ergonomic alternative to the existing APIs for imperatively updating data in the Relay store. + +## Why? + +Relay provides typesafe and ergonomic APIs for fetching and managing data that originates on the server. In addition, Relay provides the ability to define local-only fields in **client schema extensions**. However, the APIs for mutating the data in these fields has hitherto been verbose and not ergonomic, meaning that we could not recommend Relay as a solution for managing local state. + +## What was wrong with the existing APIs? + +The pre-existing APIs are verbose and not typesafe. They make it easy to make a variety of mistakes and require that the developer understand a new set of APIs only when writing updaters. + +Typesafe updaters is a set of APIs that are typesafe and (hopefully) more ergonomic. They leverage well-known Relay idioms (queries, fragments, type refinement) and use getters and setters instead of requiring that the developer learn about a set of methods that are unused elsewhere. + +## How does a developer use typesafe updaters? + +With typesafe updaters, a developers writes an updatable query or a fragment that specifies the data to imperatively update. Then, the developer reads out that data from the store, returning a so-called **updatable proxy**. Then, the developer mutates that updatable proxy. Mutating that updatable proxy using setters (e.g. `updatableData.name = "Godzilla"`) results in calls to the old API, but with added type safety. + +## Why are these labeled `_EXPERIMENTAL`? + +These are de facto not experimental. We encourage you to use them when writing new code! This suffix will be removed soon. + +## What is an updatable query or fragment? + +An updatable query or fragment is a query or fragment that has the `@updatable` directive. + +# Updatable queries and fragments are not fetched + +## Are fields selected in updatable queries and fragments fetched from the server? + +No! The server doesn't know about updatable queries and fragments. Their fields are never fetched. + +Even if you spread an updatable fragment in a regular query or fragment, the fields selected by that updatable fragment are not fetched as part of that request. + +## What if I want to fetch a field and also mutate it? + +You should select that field in both a regular query/fragment **and** in an updatable query/fragment. + +## What are some consequences of this? + +* When you read out updatable data, it can be missing if it isn't present in the store. +* You cannot spread regular fragments in updatable queries/fragments. +* The generated artifact for updatable queries/fragments does not contain a query ID and does not contain a normalization AST (which is used for writing network data to the store.) +* Directives like `@defer`, etc. do not make sense in this context, and are disallowed. + +# Misc + +## Where do I get a `store`? + +The classes `RelayRecordSourceSelectorProxy` and `RelayRecordSourceProxy` contain the methods `readUpdatableQuery` and `readUpdatableFragment`. One can acquire an instance of these classes: + +* In updaters of mutations and subscriptions +* In optimistic updaters of mutations +* When using `RelayModernEnvironment`'s `commitUpdate`, `applyUpdate`, etc. methods. +* When using the standalone `commitLocalUpdate` method. diff --git a/website/versioned_docs/version-v15.0.0/guided-tour/workflow.md b/website/versioned_docs/version-v15.0.0/guided-tour/workflow.md new file mode 100644 index 0000000000000..ce8aa650a3127 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guided-tour/workflow.md @@ -0,0 +1,39 @@ +--- +id: workflow +title: Workflow +slug: /guided-tour/workflow/ +description: Relay guide to workflow +keywords: +- workflow +- compiler +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbWorkflow from './fb/FbWorkflow.md'; + + + + + + + +Before we can get started writing Relay code, we need to make sure to **[setup the Relay Compiler](../../getting-started/installation-and-setup/#set-up-relay-compiler)**. + +The **[Relay Compiler](../../guides/compiler/)** will analyze any `graphql` literals inside your Javascript code, and produce a set of artifacts that are used by Relay at runtime, when the application is running on the browser. + +So whenever we're developing Relay components, for example by writing [Fragments](../rendering/fragments/) or [Queries](../rendering/queries/), we will need to run the Relay Compiler: + +```sh +yarn run relay +``` + +Or we can run it in watch mode, so the artifacts are re-generated as we update our source code: + +```sh +yarn run relay --watch +``` + + + + diff --git a/website/versioned_docs/version-v15.0.0/guides/client-schema-extensions.md b/website/versioned_docs/version-v15.0.0/guides/client-schema-extensions.md new file mode 100644 index 0000000000000..88037e76fb6eb --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guides/client-schema-extensions.md @@ -0,0 +1,207 @@ +--- +id: client-schema-extensions +title: Client Schema Extensions +slug: /guides/client-schema-extensions/ +description: Relay guide to client schema extensions +keywords: +- client +- schema +- extension +- commitLocalUpdate +--- + +import DocsRating from '@site/src/core/DocsRating'; + +:::note +See also [the local data updates](../../guided-tour/updating-data/local-data-updates/) and [client-only data](../../guided-tour/updating-data/client-only-data/) sections of the guided tour. +:::note + +Relay can be used to read and write local data, and act as a single source of truth for _all_ data in your client application. + +The Relay Compiler fully supports client-side extensions of the schema, which allows you to define local fields and types. + +## Table of Contents: + +- [Extending the server schema](#extending-the-server-schema) +- [Querying local state](#querying-local-state) +- [Mutating local state](#mutating-local-state) +- [Initial local state](#initial-local-state) + +## Extending the server schema + +To extend the server schema, create a new `.graphql` file inside your `--src` directory. +Let's call it `./src/clientSchema.graphql`. +This file needs to be in a folder referenced in the `"schemaExtensions"` of your relay config. + +This schema describes what local data can be queried on the client. +It can even be used to extend an existing server schema. + +For example, we can create a new type called `Note`: + +```graphql +type Note { + id: ID! + title: String + body: String +} +``` + +And then extend the server schema type `User`, with a list of `Note`, called `notes`. + +```graphql +extend type User { + notes: [Note] +} +``` + +## Querying local state + +Accessing local data is no different from querying your GraphQL server, although you are required to include at least one server field in the query. +The field can be from the server schema, or it can be schema agnostic, like an introspection field (e.g. `__typename`). + +Here, we use [useLazyLoadQuery](../../api-reference/use-lazy-load-query) to get the current `User` via the `viewer` field, along with their id, name and the local list of notes. + +```javascript +// Example.js +import * as React from 'react'; +import { useLazyLoadQuery, graphql } from 'react-relay'; + +const Example = (props) => { + const data = useLazyLoadQuery(graphql` + query ExampleQuery { + viewer { + id + name + notes { + id + title + body + } + } + } + `, {}); + // ... +} +``` + +## Mutating local state + +All local data lives in the [Relay Store](../../api-reference/store/). + +Updating local state can be done with any `updater` function. + +The `commitLocalUpdate` function is especially ideal for this, because writes to local state are usually executed outside of a mutation. + +To build upon the previous example, let's try creating, updating and deleting a `Note` from the list of `notes` on `User`. + +### Create + +```javascript +import {commitLocalUpdate} from 'react-relay'; + +let tempID = 0; + +function createUserNote(environment) { + commitLocalUpdate(environment, store => { + const user = store.getRoot().getLinkedRecord('viewer'); + const userNoteRecords = user.getLinkedRecords('notes') || []; + + // Create a unique ID. + const dataID = `client:Note:${tempID++}`; + + //Create a new note record. + const newNoteRecord = store.create(dataID, 'Note'); + + // Add the record to the user's list of notes. + user.setLinkedRecords([...userNoteRecords, newNoteRecord], 'notes'); + }); +} +``` + +Note that since this record will be rendered by the `ExampleQuery` via `useLazyLoadQuery`, the query data will automatically be retained and won't be garbage collected. + +If no component is rendering the local data and you want to manually retain it, you can do so by calling `environment.retain()`: + +```javascript +import {createOperationDescriptor, getRequest} from 'relay-runtime'; + +// Create a query that references that record +const localDataQuery = graphql` + query LocalDataQuery { + viewer { + notes { + __typename + } + } + } +`; + +// Create an operation descriptor for the query +const request = getRequest(localDataQuery); +const operation = createOperationDescriptor(request, {} /* variables */); + + +// Tell Relay to retain this operation so any data referenced by it isn't garbage collected +// In this case, all the notes linked to the `viewer` will be retained +const disposable = environment.retain(operation); + + +// Whenever you don't need that data anymore and it's okay for Relay to garbage collect it, +// you can dispose of the retain +disposable.dispose(); +``` + +### Update + +```javascript +import {commitLocalUpdate} from 'react-relay'; + +function updateUserNote(environment, dataID, body, title) { + commitLocalUpdate(environment, store => { + const note = store.get(dataID); + + note.setValue(body, 'body'); + note.setValue(title, 'title') + }); +} +``` + +### Delete + +```javascript +import {commitLocalUpdate} from 'react-relay'; + +function deleteUserNote(environment, dataID) { + commitLocalUpdate(environment, store => { + const user = store.getRoot().getLinkedRecord('viewer'); + const userNoteRecords = user.getLinkedRecords('notes'); + + // Remove the note from the list of user notes. + const newUserNoteRecords = userNoteRecords.filter(x => x.getDataID() !== dataID); + + // Delete the note from the store. + store.delete(dataID); + + // Set the new list of notes. + user.setLinkedRecords(newUserNoteRecords, 'notes'); + }); +} +``` + +## Initial local state + +All new client-side schema fields default to `undefined` value. Often however, you will want to set the initial state before querying local data. +You can use an updater function via `commitLocalUpdate` to prime local state. + +```javascript +import {commitLocalUpdate} from 'react-relay'; + +commitLocalUpdate(environment, store => { + const user = store.getRoot().getLinkedRecord('viewer'); + + // initialize user notes to an empty array. + user.setLinkedRecords([], 'notes'); +}); +``` + + diff --git a/website/versioned_docs/version-v15.0.0/guides/compiler.md b/website/versioned_docs/version-v15.0.0/guides/compiler.md new file mode 100644 index 0000000000000..2f127a23aa20d --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guides/compiler.md @@ -0,0 +1,172 @@ +--- +id: compiler +title: Relay Compiler +slug: /guides/compiler/ +description: Relay guide to the compiler +keywords: +- compiler +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {FbInternalOnly, OssOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; +import FbRunningCompiler from './fb/FbRunningCompiler.md'; +import FbGraphQLSchema from './fb/FbGraphQLSchema.md'; +import FbImportingGeneratedDefinitions from './fb/FbImportingGeneratedDefinitions.md'; + +## `graphql` + +The `graphql` template tag provided by Relay serves as the mechanism to write queries, fragments, mutations and subscriptions in the [GraphQL](http://graphql.org/learn/) language. For example: + +```javascript +import {graphql} from 'react-relay'; + +graphql` + query MyQuery { + viewer { + id + } + } +`; +``` + +The result of using the `graphql` template tag is a `GraphQLTaggedNode`; a runtime representation of the GraphQL document. + +Note that `graphql` template tags are **never executed at runtime**. Instead, they are compiled ahead of time by the Relay compiler into generated artifacts that live alongside your source code, and which Relay requires to operate at runtime. + + +## Compiler + +Relay uses the Relay Compiler to convert [`graphql`](#graphql) literals into generated files that live alongside your source files. + +A fragment like the following: + +```javascript +graphql` + fragment MyComponent on Type { + field + } +` +``` + +Will cause a generated file to appear in `./__generated__/MyComponent.graphql.js`, +with both runtime artifacts (which help to read and write from the Relay Store) +and [Flow types](https://flow.org/) to help you write type-safe code. + +The Relay Compiler is responsible for generating code as part of a build step which can then be referenced at runtime. By building the query ahead of time, the Relay's runtime is not responsible for generating a query string, and various optimizations can be performed on the query that could be too expensive at runtime (for example, fields that are duplicated in the query can be merged during the build step, to improve efficiency of processing the GraphQL response). + +### GraphQL Schema + + + + + + + +To use the Relay Compiler, you need a `.graphql` [GraphQL Schema](https://graphql.org/learn/schema/) file, describing your GraphQL server's API. Typically these files are local representations of a server source of truth and are not edited directly. For example, we might have a `schema.graphql` like: + +```graphql +schema { + query: Root +} + +type Root { + dictionary: [Word] +} + +type Word { + id: String! + definition: WordDefinition +} + +type WordDefinition { + text: String + image: String +} +``` + + + +### Running the Compiler + + + + + + + +Additionally, you need a directory containing `.js` files that use the `graphql` tag to describe GraphQL queries and fragments. Let's call this `./src`. + +Then run `yarn run relay` as set up before. + +This will create a series of `__generated__` directories that are co-located with the corresponding files containing `graphql` tags. + +For example, given the two files: + +- `src/Components/DictionaryComponent.js` + +```javascript +const DictionaryWordFragment = graphql` + fragment DictionaryComponent_word on Word { + id + definition { + ...DictionaryComponent_definition + } + } +` + +const DictionaryDefinitionFragment = graphql` + fragment DictionaryComponent_definition on WordDefinition { + text + image + } +` +``` + +- `src/Queries/DictionaryQuery.js` + +```javascript +const DictionaryQuery = graphql` + query DictionaryQuery { + dictionary { + ...DictionaryComponent_word + } + } +` +``` + +This would produce three generated files, and two `__generated__` directories: + +- `src/Components/__generated__/DictionaryComponent_word.graphql.js` +- `src/Components/__generated__/DictionaryComponent_definition.graphql.js` +- `src/Queries/__generated__/DictionaryQuery.graphql.js` + + + + +### Importing generated definitions + + + + + + + + +Typically you will not need to import your generated definitions. The [Relay Babel plugin](../../getting-started/installation-and-setup#setup-babel-plugin-relay) will then convert the `graphql` literals in your code into `require()` calls for the generated files. + +However the Relay Compiler also automatically generates [Flow](https://flow.org) types as [type comments](https://flow.org/en/docs/types/comments/). For example, you can import the generated Flow types like so: + +```javascript +import type {DictionaryComponent_word} from './__generated__/DictionaryComponent_word.graphql'; +``` + +More rarely, you may need to access a query, mutation, fragment or subscription from multiple files. In these cases, you can also import it directly: + +```js +import DictionaryComponent_word from './__generated__/DictionaryComponent_word.graphql'; +``` + + + + + diff --git a/website/versioned_docs/version-v15.0.0/guides/graphql-server-specification.md b/website/versioned_docs/version-v15.0.0/guides/graphql-server-specification.md new file mode 100644 index 0000000000000..1fc1f9177b1f0 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guides/graphql-server-specification.md @@ -0,0 +1,447 @@ +--- +id: graphql-server-specification +title: GraphQL Server Specification +slug: /guides/graphql-server-specification/ +description: Relay GraphQL server specification guide +keywords: +- GraphQL +- server +- specification +--- + +import DocsRating from '@site/src/core/DocsRating'; + +The goal of this document is to specify the assumptions that Relay makes about a GraphQL server and demonstrate them through an example GraphQL schema. + +Table of Contents: + +- [Preface](#preface) +- [Schema](#schema) +- [Object Identification](#object-identification) +- [Connections](#connections) +- [Further Reading](#further-reading) + +## Preface + +The two core assumptions that Relay makes about a GraphQL server are that it provides: + +1. A mechanism for refetching an object. +2. A description of how to page through connections. + +This example demonstrates all two of these assumptions. This example is not comprehensive, but it is designed to quickly introduce these core assumptions, to provide some context before diving into the more detailed specification of the library. + +The premise of the example is that we want to use GraphQL to query for information about ships and factions in the original Star Wars trilogy. + +It is assumed that the reader is already familiar with [GraphQL](http://graphql.org/); if not, the README for [GraphQL.js](https://github.com/graphql/graphql-js) is a good place to start. + +It is also assumed that the reader is already familiar with [Star Wars](https://en.wikipedia.org/wiki/Star_Wars); if not, the 1977 version of Star Wars is a good place to start, though the 1997 Special Edition will serve for the purposes of this document. + +## Schema + +The schema described below will be used to demonstrate the functionality that a GraphQL server used by Relay should implement. The two core types are a faction and a ship in the Star Wars universe, where a faction has many ships associated with it. + +```graphql +interface Node { + id: ID! +} + +type Faction implements Node { + id: ID! + name: String + ships: ShipConnection +} + +type Ship implements Node { + id: ID! + name: String +} + +type ShipConnection { + edges: [ShipEdge] + pageInfo: PageInfo! +} + +type ShipEdge { + cursor: String! + node: Ship +} + +type PageInfo { + hasNextPage: Boolean! + hasPreviousPage: Boolean! + startCursor: String + endCursor: String +} + +type Query { + rebels: Faction + empire: Faction + node(id: ID!): Node +} +``` + +## Object Identification + +Both `Faction` and `Ship` have identifiers that we can use to refetch them. We expose this capability to Relay through the `Node` interface and the `node` field on the root query type. + +The `Node` interface contains a single field, `id`, which is an `ID!`. The `node` root field takes a single argument, an `ID!`, and returns a `Node`. These two work in concert to allow refetching; if we pass the `id` returned in that field to the `node` field, we get the object back. + +Let's see this in action, and query for the ID of the rebels: + +```graphql +query RebelsQuery { + rebels { + id + name + } +} +``` + +returns + +```json +{ + "rebels": { + "id": "RmFjdGlvbjox", + "name": "Alliance to Restore the Republic" + } +} +``` + +So now we know the ID of the Rebels in our system. We can now refetch them: + +```graphql +query RebelsRefetchQuery { + node(id: "RmFjdGlvbjox") { + id + ... on Faction { + name + } + } +} +``` + +returns + +```json +{ + "node": { + "id": "RmFjdGlvbjox", + "name": "Alliance to Restore the Republic" + } +} +``` + +If we do the same thing with the Empire, we'll find that it returns a different ID, and we can refetch it as well: + +```graphql +query EmpireQuery { + empire { + id + name + } +} +``` + +yields + +```json +{ + "empire": { + "id": "RmFjdGlvbjoy", + "name": "Galactic Empire" + } +} +``` + +and + +```graphql +query EmpireRefetchQuery { + node(id: "RmFjdGlvbjoy") { + id + ... on Faction { + name + } + } +} +``` + +yields + +```json +{ + "node": { + "id": "RmFjdGlvbjoy", + "name": "Galactic Empire" + } +} +``` + +The `Node` interface and `node` field assume globally unique IDs for this refetching. A system without globally unique IDs can usually synthesize them by combining the type with the type-specific ID, which is what was done in this example. + +The IDs we got back were base64 strings. IDs are designed to be opaque (the only thing that should be passed to the `id` argument on `node` is the unaltered result of querying `id` on some object in the system), and base64ing a string is a useful convention in GraphQL to remind viewers that the string is an opaque identifier. + +Complete details on how the server should behave are available in the [GraphQL Object Identification](https://graphql.org/learn/global-object-identification/) best practices guide in the GraphQL site. + +## Connections + +A faction has many ships in the Star Wars universe. Relay contains functionality to make manipulating one-to-many relationships easy, using a standardized way of expressing these one-to-many relationships. This standard connection model offers ways of slicing and paginating through the connection. + +Let's take the rebels, and ask for their first ship: + +```graphql +query RebelsShipsQuery { + rebels { + name + ships(first: 1) { + edges { + node { + name + } + } + } + } +} +``` + +yields + +```json +{ + "rebels": { + "name": "Alliance to Restore the Republic", + "ships": { + "edges": [ + { + "node": { + "name": "X-Wing" + } + } + ] + } + } +} +``` + +That used the `first` argument to `ships` to slice the result set down to the first one. But what if we wanted to paginate through it? On each edge, a cursor will be exposed that we can use to paginate. Let's ask for the first two this time, and get the cursor as well: + +``` +query MoreRebelShipsQuery { + rebels { + name + ships(first: 2) { + edges { + cursor + node { + name + } + } + } + } +} +``` + +and we get back + +```json + +{ + "rebels": { + "name": "Alliance to Restore the Republic", + "ships": { + "edges": [ + { + "cursor": "YXJyYXljb25uZWN0aW9uOjA=", + "node": { + "name": "X-Wing" + } + }, + { + "cursor": "YXJyYXljb25uZWN0aW9uOjE=", + "node": { + "name": "Y-Wing" + } + } + ] + } + } +} +``` + +Notice that the cursor is a base64 string. That's the pattern from earlier: the server is reminding us that this is an opaque string. We can pass this string back to the server as the `after` argument to the `ships` field, which will let us ask for the next three ships after the last one in the previous result: + +``` + +query EndOfRebelShipsQuery { + rebels { + name + ships(first: 3 after: "YXJyYXljb25uZWN0aW9uOjE=") { + edges { + cursor + node { + name + } + } + } + } +} +``` + +gives us + +```json + + +{ + "rebels": { + "name": "Alliance to Restore the Republic", + "ships": { + "edges": [ + { + "cursor": "YXJyYXljb25uZWN0aW9uOjI=", + "node": { + "name": "A-Wing" + } + }, + { + "cursor": "YXJyYXljb25uZWN0aW9uOjM=", + "node": { + "name": "Millenium Falcon" + } + }, + { + "cursor": "YXJyYXljb25uZWN0aW9uOjQ=", + "node": { + "name": "Home One" + } + } + ] + } + } +} +``` + +Sweet! Let's keep going and get the next four! + +```graphql +query RebelsQuery { + rebels { + name + ships(first: 4 after: "YXJyYXljb25uZWN0aW9uOjQ=") { + edges { + cursor + node { + name + } + } + } + } +} +``` + +yields + +```json +{ + "rebels": { + "name": "Alliance to Restore the Republic", + "ships": { + "edges": [] + } + } +} +``` + +Hm. There were no more ships; guess there were only five in the system for the rebels. It would have been nice to know that we'd reached the end of the connection, without having to do another round trip in order to verify that. The connection model exposes this capability with a type called `PageInfo`. So let's issue the two queries that got us ships again, but this time ask for `hasNextPage`: + +```graphql +query EndOfRebelShipsQuery { + rebels { + name + originalShips: ships(first: 2) { + edges { + node { + name + } + } + pageInfo { + hasNextPage + } + } + moreShips: ships(first: 3 after: "YXJyYXljb25uZWN0aW9uOjE=") { + edges { + node { + name + } + } + pageInfo { + hasNextPage + } + } + } +} +``` + +and we get back + +```json +{ + "rebels": { + "name": "Alliance to Restore the Republic", + "originalShips": { + "edges": [ + { + "node": { + "name": "X-Wing" + } + }, + { + "node": { + "name": "Y-Wing" + } + } + ], + "pageInfo": { + "hasNextPage": true + } + }, + "moreShips": { + "edges": [ + { + "node": { + "name": "A-Wing" + } + }, + { + "node": { + "name": "Millenium Falcon" + } + }, + { + "node": { + "name": "Home One" + } + } + ], + "pageInfo": { + "hasNextPage": false + } + } + } +} +``` + +So on the first query for ships, GraphQL told us there was a next page, but on the next one, it told us we'd reached the end of the connection. + +Relay uses all of this functionality to build out abstractions around connections, to make these easy to work with efficiently without having to manually manage cursors on the client. + +Complete details on how the server should behave are available in the [GraphQL Cursor Connections](https://relay.dev/graphql/connections.htm) spec. + +## Further Reading + +This concludes the overview of the GraphQL Server Specifications. For the detailed requirements of a Relay-compliant GraphQL server, a more formal description of the [Relay cursor connection](https://relay.dev/graphql/connections.htm) model, the [GraphQL global object identification](https://graphql.org/learn/global-object-identification/) model are all available. + +To see code implementing the specification, the [GraphQL.js Relay library](https://github.com/graphql/graphql-relay-js) provides helper functions for creating nodes and connections; that repository's [`__tests__`](https://github.com/graphql/graphql-relay-js/tree/main/src/__tests__) folder contains an implementation of the above example as integration tests for the repository. + + diff --git a/website/versioned_docs/version-v15.0.0/guides/network-layer.md b/website/versioned_docs/version-v15.0.0/guides/network-layer.md new file mode 100644 index 0000000000000..37e7076fa9552 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guides/network-layer.md @@ -0,0 +1,74 @@ +--- +id: network-layer +title: Network Layer +slug: /guides/network-layer/ +description: Relay guide to the network layer +keywords: +- network +- caching +--- + +import DocsRating from '@site/src/core/DocsRating'; +import useBaseUrl from '@docusaurus/useBaseUrl'; + + + +> In most cases, the network layer is setup for you. You should not need to worry about this step unless you are setting up a new environment. + + + +In order to know how to access your GraphQL server, Relay requires developers to provide an object implementing the `INetwork` interface when creating an instance of a Relay Environment. The environment uses this network layer to execute queries, mutations, and (if your server supports them) subscriptions. This allows developers to use whatever transport (HTTP, WebSockets, etc) and authentication is most appropriate for their application, decoupling the environment from the particulars of each application's network configuration. + +Currently the easiest way to create a network layer is via a helper from the `relay-runtime` package: + +```javascript +import { + Environment, + Network, + RecordSource, + Store, +} from 'relay-runtime'; + +// Define a function that fetches the results of an operation (query/mutation/etc) +// and returns its results as a Promise: +function fetchQuery( + operation, + variables, + cacheConfig, + uploadables, +) { + return fetch('/graphql', { + method: 'POST', + headers: { + // Add authentication and other headers here + 'content-type': 'application/json' + }, + body: JSON.stringify({ + query: operation.text, // GraphQL text from input + variables, + }), + }).then(response => { + return response.json(); + }); +} + +// Create a network layer from the fetch function +const network = Network.create(fetchQuery); +const store = new Store(new RecordSource()) + +const environment = new Environment({ + network, + store + // ... other options +}); + +export default environment; +``` + +Note that this is a basic example to help you get started. This example could be extended with additional features such as request/response caching (enabled e.g. when `cacheConfig.force` is false) and uploading form data for mutations (the `uploadables` parameter). + +## Caching + +The Relay store will cache data from queries that are currently retained. See the section on [reusing cached data](../../guided-tour/reusing-cached-data/) of the guided tour. + + diff --git a/website/versioned_docs/version-v15.0.0/guides/persisted-queries.md b/website/versioned_docs/version-v15.0.0/guides/persisted-queries.md new file mode 100644 index 0000000000000..6f82d191e71aa --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guides/persisted-queries.md @@ -0,0 +1,326 @@ +--- +id: persisted-queries +title: Persisted Queries +slug: /guides/persisted-queries/ +description: Relay guide to persisted queries +keywords: +- persisted +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {FbInternalOnly, OssOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + + + +> Persistence is handled by the `relay` command for you. You likely do not need to worry about the contents of this guide. + + + +The relay compiler supports persisted queries. This is useful because: + +- The client operation text becomes just an md5 hash which is usually shorter than the real + query string. This saves upload bytes from the client to the server. + +- The server can now allowlist queries which improves security by restricting the operations + that can be executed by a client. + + + +## Usage on the client + +### The `persistConfig` option + +In your relay configiration section in `package.json` you'll need specify +"persistConfig". + +``` +"scripts": { + "relay": "relay-compiler", + "relay-persisting": "node relayLocalPersisting.js" +}, +"relay": { + "src": "./src", + "schema": "./schema.graphql", + "persistConfig": { + "url": "http://localhost:2999", + "params": {} + } +} +``` + +Specifiying `persistConfig` in the config will do the following: + +1. It converts all query and mutation operation texts to md5 hashes. + + For example without `persistConfig`, a generated `ConcreteRequest` might look + like below: + + ```javascript + const node/*: ConcreteRequest*/ = (function(){ + //... excluded for brevity + return { + "kind": "Request", + "operationKind": "query", + "name": "TodoItemRefetchQuery", + "id": null, // NOTE: id is null + "text": "query TodoItemRefetchQuery(\n $itemID: ID!\n) {\n node(id: $itemID) {\n ...TodoItem_item_2FOrhs\n }\n}\n\nfragment TodoItem_item_2FOrhs on Todo {\n text\n isComplete\n}\n", + //... excluded for brevity + }; + })(); + + ``` + + With `persistConfig` this becomes: + + ```javascript + const node/*: ConcreteRequest*/ = (function(){ + //... excluded for brevity + return { + "kind": "Request", + "operationKind": "query", + "name": "TodoItemRefetchQuery", + "id": "3be4abb81fa595e25eb725b2c6a87508", // NOTE: id is now an md5 hash + // of the query text + "text": null, // NOTE: text is null now + //... excluded for brevity + }; + })(); + + ``` + +2. It will send an HTTP POST request with a `text` parameter to the +specified `url`. +You can also add additional request body parameters via the `params` option. + +``` +"scripts": { + "relay": "relay-compiler" +}, +"relay": { + "src": "./src", + "schema": "./schema.graphql", + "persistConfig": { + "url": "http://localhost:2999", + "params": {} + } +} +``` + +### Local Persisted Queries + +With the following config, you can generate a local JSON file which contains a map of `operation_id => full operation text`. + +``` +"scripts": { + "relay": "relay-compiler" +}, +"relay": { + "src": "./src", + "schema": "./schema.graphql", + "persistConfig": { + "file": "./persisted_queries.json", + "algorithm": "MD5" // this can be one of MD5, SHA256, SHA1 + } +} +``` + +Ideally, you'll take this file and ship it to your server at deploy time so your server knows about all the queries it could possibly receive. If you don't want to do that, you'll have to implement the [Automatic Persisted Queries handshake](https://www.apollographql.com/docs/apollo-server/performance/apq/). + +#### Tradeoffs + +- ✅ If your server's persisted query datastore gets wiped, you can recover automatically through your client's requests. +- ❌ When there's a cache miss, it'll cost you an extra round trip to the server. +- ❌ You'll have to ship your `persisted_queries.json` file to the browser which will increase your bundle size. + +### Example implemetation of `relayLocalPersisting.js` + +Here's an example of a simple persist server that will save query text to the `queryMap.json` file. + + +```javascript +const http = require('http'); +const crypto = require('crypto'); +const fs = require('fs'); + +function md5(input) { + return crypto.createHash('md5').update(input).digest('hex'); +} + +class QueryMap { + constructor(fileMapName) { + this._fileMapName = fileMapName; + this._queryMap = new Map(JSON.parse(fs.readFileSync(this._fileMapName))); + } + + _flush() { + const data = JSON.stringify(Array.from(this._queryMap.entries())); + fs.writeFileSync(this._fileMapName, data); + } + + saveQuery(text) { + const id = md5(text); + this._queryMap.set(id, text); + this._flush(); + return id; + } +} + +const queryMap = new QueryMap('./queryMap.json'); + +async function requestListener(req, res) { + if (req.method === 'POST') { + const buffers = []; + for await (const chunk of req) { + buffers.push(chunk); + } + const data = Buffer.concat(buffers).toString(); + res.writeHead(200, { + 'Content-Type': 'application/json' + }); + try { + if (req.headers['content-type'] !== 'application/x-www-form-urlencoded') { + throw new Error( + 'Only "application/x-www-form-urlencoded" requests are supported.' + ); + } + const text = new URLSearchParams(data).get('text'); + if (text == null) { + throw new Error('Expected to have `text` parameter in the POST.'); + } + const id = queryMap.saveQuery(text); + res.end(JSON.stringify({"id": id})); + } catch (e) { + console.error(e); + res.writeHead(400); + res.end(`Unable to save query: ${e}.`); + } + } else { + res.writeHead(400); + res.end("Request is not supported.") + } +} + +const PORT = 2999; +const server = http.createServer(requestListener); +server.listen(PORT); + +console.log(`Relay persisting server listening on ${PORT} port.`); +``` + +The example above writes the complete query map file to `./queryMap.json`. +To use this, you'll need to update `package.json`: + + +``` +"scripts": { + "persist-server": "node ./relayLocalPersisting.js", + "relay": "relay-compiler" +} +``` + + + +### Network layer changes + +You'll need to modify your network layer fetch implementation to pass an ID parameter in the POST body (e.g., `doc_id`) instead of a query parameter: + +```javascript +function fetchQuery(operation, variables) { + return fetch('/graphql', { + method: 'POST', + headers: { + 'content-type': 'application/json' + }, + body: JSON.stringify({ + doc_id: operation.id, // NOTE: pass md5 hash to the server + // query: operation.text, // this is now obsolete because text is null + variables, + }), + }).then(response => { + return response.json(); + }); +} +``` + + +## Executing Persisted Queries on the Server + + + +Your server should then look up the query referenced by `doc_id` when responding to this request. + + + + + +To execute client requests that send persisted queries instead of query text, your server will need to be able +to lookup the query text corresponding to each ID. Typically this will involve saving the output of the `queryMap.json` JSON file to a database or some other storage mechanism, and retrieving the corresponding text for the ID specified by a client. + +Additionally, your implementation of `relayLocalPersisting.js` could directly save queries to the database or other storage. + +For universal applications where the client and server code are in one project, this is not an issue since you can place +the query map file in a common location accessible to both the client and the server. + +### Compile time push + +For applications where the client and server projects are separate, one option is to have an additional npm run script +to push the query map at compile time to a location accessible by your server: + +```javascript +"scripts": { + "push-queries": "node ./pushQueries.js", + "persist-server": "node ./relayLocalPersisting.js", + "relay": "relay-compiler && npm run push-queries" +} +``` + +Some possibilities of what you can do in `./pushQueries.js`: + +- `git push` to your server repo. + +- Save the query maps to a database. + +### Run time push + +A second more complex option is to push your query maps to the server at runtime, without the server knowing the query IDs at the start. +The client optimistically sends a query ID to the server, which does not have the query map. The server then in turn requests +for the full query text from the client so it can cache the query map for subsequent requests. This is a more complex approach +requiring the client and server to interact to exchange the query maps. + +### Simple server example + +Once your server has access to the query map, you can perform the mapping. The solution varies depending on the server and +database technologies you use, so we'll just cover the most common and basic example here. + +If you use `express-graphql` and have access to the query map file, you can import it directly and +perform the matching using the `persistedQueries` middleware from [express-graphql-persisted-queries](https://github.com/kyarik/express-graphql-persisted-queries). + +```javascript +import express from 'express'; +import {graphqlHTTP} from 'express-graphql'; +import {persistedQueries} from 'express-graphql-persisted-queries'; +import queryMap from './path/to/queryMap.json'; + +const app = express(); + +app.use( + '/graphql', + persistedQueries({ + queryMap, + queryIdKey: 'doc_id', + }), + graphqlHTTP({schema}), +); +``` + +## Using `persistConfig` and `--watch` + +It is possible to continuously generate the query map files by using the `persistConfig` and `--watch` options simultaneously. +This only makes sense for universal applications i.e. if your client and server code are in a single project +and you run them both together on localhost during development. Furthermore, in order for the server to pick up changes +to the `queryMap.json`, you'll need to have server side hot-reloading set up. The details on how to set this up +are out of the scope of this document. + + + + diff --git a/website/versioned_docs/version-v15.0.0/guides/relay-resolvers.md b/website/versioned_docs/version-v15.0.0/guides/relay-resolvers.md new file mode 100644 index 0000000000000..8e44a3f197893 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guides/relay-resolvers.md @@ -0,0 +1,266 @@ +--- +id: relay-resolvers +title: "Relay Resolvers" +slug: /guides/relay-resolvers/ +description: Relay guide to Relay Resolvers +keywords: +- resolvers +- derived +- selectors +- reactive +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +Relay Resolvers is an experimental Relay feature which enables modeling derived state as client-only fields in Relay’s GraphQL graph. Similar to server [resolvers](https://graphql.org/learn/execution/), a Relay Resolver is a function which defines how to compute the value of a GraphQL field. However, unlike server resolvers, Relay Resolvers are evaluated reactively on the client. A Relay Resolver reads fields off of its parent object and returns a derived result. If any of those fields change, Relay will automatically reevaluate the resolver. + +Relay Resolvers are particularly valuable in apps which store client state in Relay via [client schema extensions](https://relay.dev/docs/guides/client-schema-extensions/), since they allow you to compose together client data, server data — and even other Relay Resolver fields — into fields which update reactively as the underlying data changes. + +Relay Resolvers were originally conceived of as an alternative to Flux-style [selectors](https://redux.js.org/usage/deriving-data-selectors) and can be thought of as providing similar capabilities. + +Concretely, Relay Resolvers are defined as functions annotated with a special docblock syntax. The Relay compiler will automatically recognize these docblocks in any JavaScript file and use them to extend the schema that is available within your project. + +## Defining a Resolver + +For Relay Resolvers we are using a special syntax to define a new field: + +The string after @RelayResolver is a GraphQL `TypeName` sperated by a dot the string with the field +defintion: https://spec.graphql.org/June2018/#FieldDefinition (Description and directives of the field are not supported). + +```js +/** +* @RelayResolver TypeName.fieldName(arg1: ArgTypeName): FieldTypeName +*/ +``` + +## Examples + +Note: In provided examples we're using Flow-type annotations, but Relay resolvers can also work with plain JavaScript, and TypeScript. + +Let’s look at an example Relay Resolver: + +```jsx +import type {UserGreetingResolver$key} from 'UserGreetingResolver.graphql'; +import {graphql} from 'relay-runtime'; +import {readFragment} from 'relay-runtime/store/ResolverFragments'; + +/** + * @RelayResolver User.greeting: String + * @rootFragment UserGreetingResolver + * + * A greeting for the user which includes their name and title. + */ +export function greeting(userKey: UserGreetingResolver$key): string { + const user = readFragment(graphql` + fragment UserGreetingResolver on User { + honorific + last_name + }`, userKey); + + return `Hello ${user.honorific} ${user.last_name}!`; +} +``` + +This resolver adds a new field `greeting` to the `User` object type. It reads the `honorific` and `last_name` fields off of the parent `User` and derives a greeting string. The new `greeting` field may now be used by any Relay component throughout your project which has access to a `User`. + +Consuming this new field looks identical to consuming a field defined in the server schema: + +```jsx +function MyGreeting({userKey}) { + const user = useFragment(` + fragment MyGreeting on User { + greeting + }`, userKey); + return

{user.greeting}

; +} +``` + +## Docblock Fields + +The Relay compiler looks for the following fields in any docblocks that includes `@RelayResolver`: + +- `@RelayResolver` (required) +- `@rootFragment` (optional) The name of the fragment read by `readFragment` +- `@deprecated` (optional) Indicates that the field is [deprecated](https://spec.graphql.org/June2018/#sec--deprecated). May be optionally followed text giving the reason that the field is deprecated. + +The docblock may also contain free text. This free text will be used as the field’s human-readable description, which will be surfaced in Relay’s editor support on hover and in autocomplete results. + +## Relay Resolver Convetions + +In order for Relay to be able to call a Relay Resolver, it must conform to a set of conventions: + +1. The resolver function must use named export. +2. The resolver must read its fragment using the special `readFragment` function. +3. The resolver function must be pure. +4. The resolver’s return value must be immutable. + +Unlike server resolvers, Relay Resolvers may return any JavaScript value. This includes classes, functions and arrays. However, we generally encourage having Relay Resolvers return scalar values and only returning more complex JavaScript values (like functions) as an escape hatch. + + +## Lint Rule + +In many cases, the contents of the docblock can be derived from the javascript implementation. In those cases, the [`relay-resolvers`](https://www.internalfb.com/eslint/relay-resolvers) ESLint rule rule will offer auto-fixes to derive the docblock from the implementation and ensure that the two remain in sync. The lint rule also enforces a naming convention for resolver function and modules names. + + +## How They Work + +When parsing your project, the Relay compiler looks for `@RelayResolver` docblocks and uses them to add special fields to the GraphQL schema. If a query or fragment references one of these fields, Relay’s generated artifact for that query or fragment will automatically include an `import` of the resolver function. *Note that this can happen recursively if the Relay Resolver field you are reading itself reads one or more Relay Resolver fields.* + +When the field is first read by a component, Relay will evaluate the Relay Resolver function and cache the result. Other components that read the same field will read the same cached value. If at any point any of the fields that the resolver reads (via its root fragment) change, Relay will reevaluate the resolver. If the return value changes (determined by `===` equality) Relay will propagate that change to all components (and other Relay Resolvers) that are currently reading the field. + +## Error Handling + +In order to make product code as robust as possible, Relay Resolvers follow the GraphQL spec’s documented [best practice](https://graphql.org/learn/best-practices/#nullability) of returning null when a field resolver errors. Instead of throwing, errors thrown by Relay Resolvers will be logged to your environment's configured `relayFieldLogger` with an event of kind `"relay_resolver.error"`. If you make use of Relay Resolves you should be sure to configure your environment with a `relayFieldLogger` which reports those events to whatever system you use for tracking runtime errors. + +If your component requires a non-null value in order to render, and can’t provide a reasonable fallback experience, you can annotate the field access with `@required`. + +## Passing arguments to resolver fields + +For resolvers, we support two ways of defining field arguments: + +1. GraphQL: Arguments that are defined via @argumentDefinitions on the resolver's fragment. +2. JS Runtime: Arguments that can be passed directly to the resolver function. +3. You can also combine these, and define arguments on the fragment and on the resolver's field itself, Relay will validate the naming (these arguments have to have different names), and pass GraphQL arguments to fragment, and JS arguments to the resolver's function. + + +Let’s look at the example 1: + +## Defining Resolver field with Fragment Arguments + +```js +/** +* @RelayResolver MyType.my_resolver_field: String +* @rootFragment myResolverFragment +*/ +export function my_resolver_field(fragmentKey: myResolverFragment$key): ?string { + const data = readFragment(graphql` + fragment myResolverFragment on MyType + @argumentDefinitions(my_arg: {type: "Float!"}) { + field_with_arg(arg: $my_arg) { + __typename + } + } + `, fragmentKey); + + return data.field_with_arg.__typename; +} +``` + +### Using Resolver field with arguments for Fragment + +This resolver will extend the **MyType** with the new field **my_resolver_field(my_arg: Float!)** and the fragment arguments for **myResolverFragment** can be passed directly to this field. + +```js +const data = useLazyLoadQuery(graphql` + query myQuery($id: ID, $my_arg: Float!) { + node(id: $id) { + ... on MyType { + my_resolver_field(my_arg: $my_arg) + } + } + } +`, { id: "some id", my_arg: 2.5 }); +``` + +For these fragment arguments relay will pass then all queries/fragments where the resolver field is used to the resolver’s fragment. + + +### Defining Resolver field with Runtime (JS) Arguments + +Relay resolvers also support runtime arguments that are not visible/passed to fragments, but are passed to the resolver function itself. + +You can define these fragments using GraphQL’s [Schema Definition Language](https://graphql.org/learn/schema/) in the **@fieldName** + +```js +/** +* @RelayResolver MyType.my_resolver_field(my_arg: String, my_other_arg: Int): String +* @rootFragment myResolverFragment +*/ +export function my_resolver_field( + fragmentKey: myResolverFragment$key, + args: { + my_arg: ?string, + my_other_arg: ?number + }, +): ?string { + if (args.my_other_arg === 0) { + return "The other arg is 0"; + } + + const data = readFragment(graphql` + fragment myResolverFragment on MyType + some_field + } + `, fragmentKey); + + return data.some_field.concat(args.my_arg); +} +``` + +### Using Resolver field with runtime arguments + +This resolver will extend **MyType** with the new field **my_resolver_field(my_arg: String, my_other_arg: Int).** + +```js +const data = useLazyLoadQuery(graphql` + query myQuery($id: ID, $my_arg: String!) { + node(id: $id) { + ... on MyType { + my_resolver_field(my_arg: $my_arg, my_other_arg: 1) + } + } + } +`, { id: "some id", my_arg: "hello world!"}); +``` + +### Defining Resolver field with Combined Arguments + +We can also combine both of these approaches and define field arguments both on the resolver’s fragment and on the field itself: + +```js +/** +* @RelayResolver MyType.my_resolver_field(my_js_arg: String!): String +* @rootFragment myResolverFragment +*/ +export function my_resolver_field( + fragmentKey: myResolverFragment$key, + args: { + my_js_arg: string + }, +): ?string { + const data = readFragment(graphql` + fragment myResolverFragment on MyType + @argumentDefinitions(my_gql_arg: {type: "Float!"}) { + field_with_arg(arg: $my_arg) { + __typename + } + } + `, fragmentKey); + + return `Hello ${args.my_js_arg}, ${data.field_with_arg.__typename}`; +} +``` + +### Using Resolver field with combined arguments + +Relay will extend the **MyType** with the new resolver field that has two arguments: **my_resolver_field(my_js_arg: String, my_gql_arg: Float!) + +** +Example query: + +```js +const data = useLazyLoadQuery(graphql` + query myQuery($id: ID, $my_arg: String!) { + node(id: $id) { + ... on MyType { + my_resolver_field(my_js_arg: "World", my_qql_arg: 2.5) + } + } + } +`, { id: "some id" }); +``` + +## Current Limitations + +- Relay Resolvers are still considered experimental. To use them you must ensure that the `ENABLE_RELAY_RESOLVERS` runtime feature flag is enabled, and that the `enable_relay_resolver_transform` feature flag is enabled in your project’s Relay config file. diff --git a/website/versioned_docs/version-v15.0.0/guides/required-directive.md b/website/versioned_docs/version-v15.0.0/guides/required-directive.md new file mode 100644 index 0000000000000..f6e373f343759 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guides/required-directive.md @@ -0,0 +1,234 @@ +--- +id: required-directive +title: "@required Directive" +slug: /guides/required-directive/ +description: Relay guide to @required +keywords: +- required +- directive +- optional +- nullthrows +--- + +import DocsRating from '@site/src/core/DocsRating'; + +The `@required` directive can be added to fields in your Relay queries to declare how null values should be handled at runtime. You can think of it as saying "if this field is ever null, its parent field is invalid and should be null". + +When you have a GraphQL schema where many fields are nullable, a considerable amount of product code is needed to handle each field's potential "nullness" before the underlying data can be used. With `@required`, Relay can handle some types of null checks before it returns data to your component, which means that **any field you annotate with** **`@required`** **will become non-nullable in the generated types for your response**. + +If a `@required` field is null at runtime, Relay will "bubble" that nullness up to the field's parent. For example, given this query: + +```graphql +query MyQuery { + viewer { + name @required(action: LOG) + age + } +} +``` + +If `name` is null, relay would return `{ viewer: null }`. You can think of `@required` in this instance as saying "`viewer` is useless without a `name`". + +## Action + +The `@required` directive has a required `action` argument which has three possible values: + +### `NONE` (expected) + +This field is expected to be null sometimes. + +### `LOG` (recoverable) + +This value is not expected to ever be null, but the component **can still render** if it is. If a field with `action: LOG` is null, the Relay environment logger will receive an event that looks like this: + +```javascript +{ + name: 'read.missing_required_field', + owner: string, // MyFragmentOrQueryName + fieldPath: string, // path.to.my.field +}; +``` + +### `THROW` (unrecoverable) + +This value should not be null, and the component **cannot render without it**. If a field with `action: THROW` is null at runtime, the component which reads that field **will throw during render**. The error message includes both the owner and field path. Only use this option if your component is contained within an [error boundary](https://react.dev/reference/react/Component#catching-rendering-errors-with-an-error-boundary). + +## Locality + +A field's `@required` status is **local to the fragment where it is specified**. This allows you to add add/remove the directive without having to think about anything outside the scope of your component. + +This choice reflects the fact that some components may be able to recover better from missing data than others. For example, a `` component could probably render something sensible even if the restaurant's address is missing, but a `` component might not. + +However, all usages of the `@required` directive on the same field in a single fragment must be consistent with their usage. This situation mostly occurs when selecting fields in inline fragments. For example, the following fragment would fail to compile: + +```graphql +fragment UserInfo on User { + job { + ... on Actor { + certifications + } + ... on Lawyer { + certifications @required(action: LOG) + } + } +} +``` + +The Relay compiler will give you an error like `All references to a field must have matching @required declarations.`. To fix this, either set the `@required` directive on each of the fields selected in the inline fragment or remove the directive entirely. + +## Chaining + +`@required` directives can be chained to make a deeply nested field accessible after just one null check: + +```javascript +const user = useFragment(graphql` + fragment MyUser on User { + name @required(action: LOG) + profile_picture @required(action: LOG) { + url @required(action: LOG) + } + }`, key); + if(user == null) { + return null; + } + return {user.name} +``` + +**Note**: If you use `@required` on a top level field of a fragment, the object returned from `useFragment` itself may become nullable. The generated types will reflect this. + +When chaining `@required` directives, the Relay compiler will help you from unintentionally creating a chain with a more severe action than intended. Consider the following fragment + +```graphql +fragment MyUser on User { + profile_picture @required(action: THROW) { + url @required(action: LOG) + } +} +``` + +In this example we want the component to THROW if the `profile_picture` field is null but we only want to LOG an error if the `url` field is null. But recall, Relay will "bubble" nullness up to the parent field, if the `url` field is null it will then cause the `profile_picture` field to become null as well. And once that happens, the component will THROW. If you implement a pattern like this, the Relay compiler will give you an error + +``` +A @required field may not have an `action` less severe than that of its @required parent. This @required directive should probably have `action: LOG` so that it can match its parent +``` + +To fix this, either change the `profile_picture` to use `action: LOG` or change the `url` field to use `action: THROW`. + +## Caveats with Connections + +There are currently some limitations in using the `@required` and `@connection` directives together. When you use the `@connection` directive, Relay automatically inserts some additional fields into the connection, and those fields won't be generated with the `@required` directive. This can result in inconsistencies if you use the `@required` directive on fields in a Connection type. Consider the following example: + +```graphql +fragment FriendsList on User @refetchable(queryName: "FriendsListQuery") { + friends(after: $cursor, first: $count) @connection(key: "FriendsList_friends") { + edges { + node @required(action: LOG) { + job @required(action: LOG) { + title @required(action: LOG) + } + } + } + } +} +``` + +Any usages of `@required` on the `node` field or any of its direct child fields will cause the Relay compiler to give you an error saying `All references to a field must have matching @required declarations.`. In order to bypass this you'll need to remove the `@required` directives on those fields. + +In the above example, we'd need to remove the `@required` directives on both the `node` and `job` fields, but the usage on the `title` field would not create an error. + +```graphql +fragment FriendsList on User @refetchable(queryName: "FriendsListQuery") { + friends(after: $cursor, first: $count) @connection(key: "FriendsList_friends") { + edges { + node { + job { + title @required(action: LOG) + } + } + } + } +} +``` + +## FAQ + +### Why did @required make a non-nullable field/root nullable? + +When using the `LOG` or `NONE` actions, Relay will "bubble" a missing field up to its parent field or fragment root. This means that adding `@required(action: LOG)` (for example) to a child of a non-nullable fragment root will cause the type of the fragment root to become nullable. + +### What happens if you use `@required` in a plural field + +If a `@required(action: LOG)` field is missing in a plural field, the _item_ in the list will be returned as null. It will _not_ cause the entire array to become null.. If you have any question about how it will behave, you can inspect the generated Flow types. + +### Why are @required fields in an inline fragment still nullable? + +Imagine a fragment like this: + +```graphql +fragment MyFrag on Actor { + ... on User { + name @required(action: THROW) + } +} +``` + +It's possible that your `Actor` will not be a `User` and therefore not include a `name`. To represent that in types, we generate a Flow type that looks like this: `{name?: string}`. + +If you encounter this issue, you can add a `__typename` like this: + +```graphql +fragment MyFrag on Actor { + __typename + ... on User { + name @required(action: THROW) + } +} +``` + +In this situation Relay will generate a union type like: `{__typename: 'User', name: string} | {__typename: '%ignore this%}`. Now you can check the `__typename` field to narrow your object's type down to one that has a non-nullable `name`. + + +Example diff showing the adoption of this strategy: D24370183 + + +### Why not implement this at the schema/server level? + +The "requiredness" of a field is actually a product decision and not a schema question. Therefore we need to implement the handling of it at the product level. Individual components need to be able to decide for themselves how to handle a missing value. + +For example, if a notification is trying to show the price for a Marketplace listing, it could probably just omit the price and still render. If payment flow for that same listing is missing the price, it should probably blow up. + +Another issue is that changes to the server schema are much more difficult to ship since they affect all existing clients across all platforms. + +Basically every value returned by Relay is nullable. This is intentional since we want to be able to handle field-level errors whenever possible. If we lean into KillsParentOnException we would end up wanting to make basically every field use it and our apps would be becomes more brittle since errors which used to be small, become large. + + + +_Extracted from [this comment thread](https://fb.workplace.com/groups/cometeng/permalink/937671436726844/?comment_id=937681186725869)._ +_Further discussion in [this comment thread](https://fb.workplace.com/groups/cometeng/permalink/937671436726844/?comment_id=938335873327067)._ + + +### Can `(action: NONE)` be the default? + +On one hand action: NONE makes the most sense as a default (omitted action == no action). However, we are aware that whichever value we choose as the default will be considered the default action for engineers to choose since it's the path of least resistance. + +We actually believe that in most cases LOG is the most ideal choice. It gives the component a chance to gracefully recover while also giving us signal that a part of our app is rendering in a sub-optimal way. + +We debated making LOG the default action for that reason, but I think that's confusing as well. + +So, for now we are planning to not offer a default argument. After all, it's still much less to write out than the equivalent manual null checks. Once we see how people use it we will consider what value (if any) should be the default. + + + +### Does @required change anything about the logger project field? + +When using recoverableViolation or unrecoverableViolation, the second argument is the FBLogger project name ([defined on Comet here](https://fburl.com/diffusion/rn99dl4s)): + +```javascript +recoverableViolation('My error string', 'my_logger_project'); +``` + +When you switch to using `@required`, any `THROW` or `LOG` actions will log to the `relay-required` logger project instead ([see here in logview](https://fburl.com/logview/l40t7cjv)). + +For most teams, this shouldn't be an issue; care has been taken to ensure tasks still get routed to the correct owner of the file that is using `@required`. However, if your team has any queries that utilize the logger project field, you may want to consider the implications. + + diff --git a/website/versioned_docs/version-v15.0.0/guides/testing-relay-components.md b/website/versioned_docs/version-v15.0.0/guides/testing-relay-components.md new file mode 100644 index 0000000000000..2cd077fc60371 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guides/testing-relay-components.md @@ -0,0 +1,584 @@ +--- +id: testing-relay-components +title: Testing Relay Components +slug: /guides/testing-relay-components/ +description: Relay guide to testing Relay components +keywords: +- testing +- createMockEnvironment +- RelayMockEnvironment +- MockPayloadGenerator +- relay_test_operation +- queuePendingOperation +- resolver +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {FbInternalOnly, OssOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +## Abstract + +The purpose of this document is to cover the Relay APIs for testing Relay components. + +The content is focused mostly on jest unit-tests (testing individual components) and integration tests (testing a combination of components). But these testing tools may be applied in different cases: screenshot-tests, production smoke-tests, "Redbox" tests, fuzz-tests, e2e test, etc. + +What are the benefits of writing jest tests: + +* In general, it improves the stability of the system. Flow helps with catching a various set of Javascript errors, but it is still possible to introduce regressions to the components. Unit-tests help find, reproduce, and fix regressions, and prevent them in the future. +* It simplifies the refactoring process: when properly written (testing public interface, not implementation) - tests help with changing the internal implementation of the components. +* It may speed up and improve the development workflow. Some people may call it Test Driven Development (TM). But essentially it's just writing tests for public interfaces of your components, and then writing the components that implement those interfaces. Jest —watch mode really shines in this case. +* It will simplify the on-boarding process for new developers. Having tests helps new developers ramp up on the new code base, allowing them to fix bugs and deliver features. + +One thing to notice: while jest unit- and integration tests will help improve the stability of the system, they should be considered one part of a bigger stability infrastructure with multiple layers of automated testing: flow, e2e, screenshot, "Redbox", performance tests. + +## Testing with Relay + +Testing applications that use Relay may be challenging, because of the additional data fetching layer that wraps the actual product code. + +And it's not always easy to understand the mechanics of all processes that are happening behind Relay, and how to properly handle interactions with the framework. + +Fortunately, there are tools that aim to simplify the process of writing tests for Relay components, by providing imperative APIs for controlling the request/response flow and additional API for mock data generation. + +There are two main modules that you may use in your tests: + +* `createMockEnvironment(options): RelayMockEnvironment` +* `MockPayloadGenerator` and the `@relay_test_operation` directive + + +With `createMockEnvironment,` you will be able to create an instance of `RelayMockEnvironment`, a Relay environment specifically for your tests. The instance created by `createMockEnvironment` implements the Relay Environment Interface and it also has an additional Mock layer, with methods that allow you to resolve/reject and control the flow of operations (queries/mutations/subscriptions). + +The main purpose of `MockPayloadGenerator` is to improve the process of creating and maintaining the mock data for tested components. + +One of the patterns you may see in the tests for Relay components: 95% of the test code is the test preparation—the gigantic mock object with dummy data, manually created, or just a copy of a sample server response that needs to be passed as the network response. And the remaining 5% is actual test code. As a result, people don't test much. It's hard to create and manage all these dummy payloads for different cases. Hence, writing tests is time-consuming and tests are sometimes painful to maintain. + +With the `MockPayloadGenerator` and `@relay_test_operation`, we want to get rid of this pattern and switch the developer's focus from the preparation of the test to the actual testing. + + +## RelayMockEnvironment API Overview + +RelayMockEnvironment is a special version of Relay Environment with additional API methods for controlling the operation flow: resolving and rejection operations, providing incremental payloads for subscriptions, working with the cache. + +* Methods for finding operations executed on the environment + * `getAllOperations()` - get all operation executed during the test by the current time + * `findOperation(findFn => boolean) `- find particular operation in the list of all executed operations, this method will throw, if operation is not available. Maybe useful to find a particular operation when multiple operations executed at the same time + * `getMostRecentOperation() -` return the most recent operation, this method will throw if no operations were executed prior this call. +* Methods for resolving or rejecting operations + * `nextValue(request | operation, data)` - provide payload for operation(request), but not complete request. Practically useful when testing incremental updates and subscriptions + * `complete(request | operation)` - complete the operation, no more payloads are expected for this operation, when it's completed. + * `resolve(request | operation, data)` - resolve the request with provided GraphQL response. Essentially, it's nextValue(...) and complete(...) + * `reject(request | operation, error)` - reject the request with particular error + * `resolveMostRecentOperation(operation => data)` - resolve and getMostRecentOperation work together + * `rejectMostRecentOperation(operation => error)` - reject and getMostRecentOperation work together + * `queueOperationResolver(operation => data | error)` - adds an OperationResolver function to the queue. The passed resolver will be used to resolve/reject operations as they appear + * `queuePendingOperation(query, variables)` - in order for the `usePreloadedQuery` hook to not suspend, one must call these functions: + * `queueOperationResolver(resolver)` + * `queuePendingOperation(query, variables)` + * `preloadQuery(mockEnvironment, query, variables)` with the same `query` and `variables` that were passed to `queuePendingOperation`. `preloadQuery` must be called after `queuePendingOperation`. +* Additional utility methods + * `isLoading(request | operation)` - will return `true` if operations has not been completed, yet. + * `cachePayload(request | operation, variables, payload)` - will add payload to QueryResponse cache + * `clearCache() `- will clear QueryResponse cache + +## Mock Payload Generator and the `@relay_test_operation` Directive + +`MockPayloadGenerator` may drastically simplify the process of creating and maintaining mock data for your tests. `MockPayloadGenerator` can generate dummy data for the selection that you have in your operation. There is an API to modify the generated data - Mock Resolvers. With Mock Resolvers, you may adjust the data for your needs. Mock Resolvers are defined as an object where **keys are names of GraphQL types (`ID`, `String`, `User`, `Comment`, etc),** and values are functions that return the default data for the type. + +Example of a simple Mock Resolver: + +```js +{ + ID() { + // Return mock value for a scalar filed with type ID + return 'my-id'; + }, + String() { + // Every scalar field with type String will have this default value + return "Lorem Ipsum" + } +} +``` + + +It is possible to define more resolvers for Object types + +```js +{ + // This will be the default values for User object in the query response + User() { + return { + id: 4, + name: "Mark", + profile_picture: { + uri: "http://my-image...", + }, + }; + }, +} +``` + + + +### Mock Resolver Context + +The first argument of the MockResolver is the object that contains Mock Resolver Context. It is possible to return dynamic values from mock resolvers based on the context - for instance, name or alias of the field, a path in the selection, arguments, or parent type. + + +```js +{ + String(context) { + if (context.name === 'zip') { + return '94025'; + } + if (context.path != null && context.path.join('.') === 'node.actor.name') { + return 'Current Actor Name'; + } + if (context.parentType === 'Image' && context.name === 'uri') { + return 'http://my-image.url'; + } + } +} +``` + +### ID Generation + +The second argument of the Mock Resolver is a function that will generate a sequence of integers, useful to generate unique ids in the tests + +```js +{ + // will generate strings "my-id-1", "my-id-2", etc. + ID(_, generateId) { + return `my-id-${generateId()}`; + }, +} +``` + +### Float, Integer, Boolean, etc... + +Please note, that for production queries we don't have full type information for Scalar fields - like Boolean, Integer, Float. And in the MockResolvers, they map to String. You can use `context` to adjust return values, based on the field name, alias, etc. + +### @relay_test_operation + +Most of GraphQL type information for a specific field in the selection is not available during Relay runtime. By default, Relay, cannot get type information for a scalar field in the selection, or an interface type of the object. + +Operation with the @relay_test_operation directive will have additional metadata that will contain GraphQL type info for fields in the operation's selection. And it will improve the quality of the generated data. You also will be able to define Mock resolvers for Scalar (not only ID and String) and Abstract types: + +```javascript +{ + Float() { + return 123.456; + }, + Boolean(context) { + if (context.name === 'can_edit') { + return true; + } + return false; + }, + Node() { + return { + __typename: 'User', + id: 'my-user-id', + }; + } +} +``` + +## Examples + +### Relay Component Test + +Using `createMockEnvironment` and `MockPayloadGenerator` allows writing concise tests for components that use Relay hooks. Both those modules can be imported from `relay-test-utils` + + +```javascript +// Say you have a component with the useLazyLoadQuery or a QueryRenderer +const MyAwesomeViewRoot = require('MyAwesomeViewRoot'); +const { + createMockEnvironment, + MockPayloadGenerator, +} = require('relay-test-utils'); + +// Relay may trigger 3 different states +// for this component: Loading, Error, Data Loaded +// Here is examples of tests for those states. +test('Loading State', () => { + const environment = createMockEnvironment(); + const renderer = ReactTestRenderer.create( + , + ); + + // Here we just verify that the spinner is rendered + expect( + renderer.root.find(node => node.props['data-testid'] === 'spinner'), + ).toBeDefined(); +}); + +test('Data Render', () => { + const environment = createMockEnvironment(); + const renderer = ReactTestRenderer.create( + , + ); + + // Wrapping in ReactTestRenderer.act will ensure that components + // are fully updated to their final state. + ReactTestRenderer.act(() => { + environment.mock.resolveMostRecentOperation(operation => + MockPayloadGenerator.generate(operation), + ); + }); + + // At this point operation will be resolved + // and the data for a query will be available in the store + expect( + renderer.root.find(node => node.props['data-testid'] === 'myButton'), + ).toBeDefined(); +}); + +test('Error State', () => { + const environment = createMockEnvironment(); + const renderer = ReactTestRenderer.create( + , + ); + + // Wrapping in ReactTestRenderer.act will ensure that components + // are fully updated to their final state. + ReactTestRenderer.act(() => { + // Error can be simulated with `rejectMostRecentOperation` + environment.mock.rejectMostRecentOperation(new Error('Uh-oh')); + }); + + expect( + renderer.root.find(item => (item.props.testID = 'errorMessage')), + ).toBeDefined(); +}); +``` + + + +### Fragment Component Tests + +Essentially, in the example above, `resolveMostRecentOperation` will generate data for all child fragment containers (pagination, refetch). But, usually the root component may have many child fragment components and you may want to exercise a specific component that uses `useFragment`. The solution for that would be to wrap your fragment container with the `useLazyLoadQuery` component that renders a Query that spreads fragments from your fragment component: + +```javascript +test('Fragment', () => { + const environment = createMockEnvironment(); + const TestRenderer = () => { + const data = useLazyLoadQuery( + graphql` + query TestQuery @relay_test_operation { + myData: node(id: "test-id") { + # Spread the fragment you want to test here + ...MyFragment + } + } + `, + {}, + ); + return + }; + + const renderer = ReactTestRenderer.create( + + + + + + ); + + // Wrapping in ReactTestRenderer.act will ensure that components + // are fully updated to their final state. + ReactTestRenderer.act(() => { + environment.mock.resolveMostRecentOperation(operation => + MockPayloadGenerator.generate(operation), + ); + }); + + expect(renderer).toMatchSnapshot(); +}); +``` + +### Pagination Component Test + +Essentially, tests for pagination components (e.g. using `usePaginationFragment`) are not different from fragment component tests. But we can do more here, we can actually see how the pagination works - we can assert the behavior of our components when performing pagination (load more, refetch). + +```js +// Pagination Example +test('`Pagination` Container', () => { + const environment = createMockEnvironment(); + const TestRenderer = () => { + const data = useLazyLoadQuery( + graphql` + query TestQuery @relay_test_operation { + myConnection: node(id: "test-id") { + connection { + # Spread the pagination fragment you want to test here + ...MyConnectionFragment + } + } + } + `, + {}, + ); + return + }; + + const renderer = ReactTestRenderer.create( + + + + + + ); + + // Wrapping in ReactTestRenderer.act will ensure that components + // are fully updated to their final state. + ReactTestRenderer.act(() => { + environment.mock.resolveMostRecentOperation(operation => + MockPayloadGenerator.generate(operation, { + ID(_, generateId) { + // Why we're doing this? + // To make sure that we will generate a different set of ID + // for elements on first page and the second page. + return `first-page-id-${generateId()}`; + }, + PageInfo() { + return { + has_next_page: true, + }; + }, + }), + ); + }); + + // Let's find a `loadMore` button and click on it to initiate pagination request, for example + const loadMore = renderer.root.find(node => node.props['data-testid'] === 'loadMore') + expect(loadMore.props.disabled).toBe(false); + loadMore.props.onClick(); + + // Wrapping in ReactTestRenderer.act will ensure that components + // are fully updated to their final state. + ReactTestRenderer.act(() => { + environment.mock.resolveMostRecentOperation(operation => + MockPayloadGenerator.generate(operation, { + ID(_, generateId) { + // See, the second page IDs will be different + return `second-page-id-${generateId()}`; + }, + PageInfo() { + return { + // And the button should be disabled, now. Probably. + has_next_page: false, + }; + }, + }), + ); + }); + + expect(loadMore.props.disabled).toBe(true); +}); +``` + +### Refetch Component + +We can use similar approach here with wrapping the component with a query. And for the sake of completeness, we will add an example here: + +```js +test('Refetch Container', () => { + const environment = createMockEnvironment(); + const TestRenderer = () => { + const data = useLazyLoadQuery( + graphql` + query TestQuery @relay_test_operation { + myData: node(id: "test-id") { + # Spread the pagination fragment you want to test here + ...MyRefetchableFragment + } + } + `, + {}, + ); + return + }; + + const renderer = ReactTestRenderer.create( + + + + + + ); + + ReactTestRenderer.act(() => { + environment.mock.resolveMostRecentOperation(operation => + MockPayloadGenerator.generate(operation), + ); + }); + + // Assuming we have refetch button in the Container + const refetchButton = renderer.root.find(node => node.props['data-testid'] === 'refetch'); + + // This should trigger the `refetch` + refetchButton.props.onClick(); + + ReactTestRenderer.act(() => { + environment.mock.resolveMostRecentOperation(operation => + MockPayloadGenerator.generate(operation, { + // We can customize mock resolvers, to change the output of the refetch query + }), + ); + }); + + expect(renderer).toMatchSnapshot(); +}); +``` + + + +### Mutations + +Mutations themselves are operations, so we can test them independently (unit-test) for a specific mutation, or in combination with the view from which this mutation is called. + +:::note +the `useMutation` API is an improvement over calling `commitMutation` directly. +::: + +```js +// Say, you have a mutation function +function sendMutation(environment, onCompleted, onError, variables) + commitMutation(environment, { + mutation: graphql`...`, + onCompleted, + onError, + variables, + }); +} + +// Example test may be written like so +test('it should send mutation', () => { + const environment = createMockEnvironment(); + const onCompleted = jest.fn(); + sendMutation(environment, onCompleted, jest.fn(), {}); + const operation = environment.mock.getMostRecentOperation(); + + ReactTestRenderer.act(() => { + environment.mock.resolve( + operation, + MockPayloadGenerator.generate(operation) + ); + }); + + expect(onCompleted).toBeCalled(); +}); +``` + +### Subscription + +> The `useSubscription` API is an improvement over calling `requestSubscription` directly. + +We can test subscriptions similarly to how we test mutations. + +```js +// Example subscribe function +function subscribe(environment, onNext, onError, variables) + requestSubscription(environment, { + subscription: graphql`...`, + onNext, + onError, + variables, + }); +} + +// Example test may be written like so +test('it should subscribe', () => { + const environment = createMockEnvironment(); + const onNext = jest.fn(); + subscribe(environment, onNext, jest.fn(), {}); + const operation = environment.mock.getMostRecentOperation(); + + ReactTestRenderer.act(() => { + environment.mock.nextValue( + operation, + MockPayloadGenerator.generate(operation) + ); + }); + + expect(onNext).toBeCalled(); +}); +``` + + + +### Example with `queueOperationResolver` + + +With `queueOperationResolver` it is possible to define responses for operations that will be executed on the environment + +```javascript +// Say you have a component with the QueryRenderer +const MyAwesomeViewRoot = require('MyAwesomeViewRoot'); +const { + createMockEnvironment, + MockPayloadGenerator, +} = require('relay-test-utils'); + +test('Data Render', () => { + const environment = createMockEnvironment(); + environment.mock.queueOperationResolver(operation => + MockPayloadGenerator.generate(operation), + ); + + const renderer = ReactTestRenderer.create( + , + ); + + // At this point operation will be resolved + // and the data for a query will be available in the store + expect( + renderer.root.find(node => node.props['data-testid'] === 'myButton'), + ).toBeDefined(); +}); + +test('Error State', () => { + const environment = createMockEnvironment(); + environment.mock.queueOperationResolver(() => + new Error('Uh-oh'), + ); + const renderer = ReactTestRenderer.create( + , + ); + + expect( + renderer.root.find(item => (item.props.testID = 'errorMessage')), + ).toBeDefined(); +}); +``` + +### With Relay Hooks + +The examples in this guide should work for testing components both with Relay Hooks, Containers or Renderers. When writing tests that involve the `usePreloadedQuery` hook, please also see the `queuePendingOperation` note above. + +### toMatchSnaphot(...) + +Even though in all of the examples here you can see assertions with `toMatchSnapshot()`, we keep it that way just to make examples concise. But it's not the recommended way to test your components. + +**[React Testing Library](https://testing-library.com/react)** is a set of helpers that let you test React components without relying on their implementation details. This approach makes refactoring a breeze and also nudges you towards best practices for accessibility. Although it doesn't provide a way to "shallowly" render a component without its children, a test runner like Jest lets you do this by [mocking](https://reactjs.org/docs/testing-recipes.html#mocking-modules). + + + +### More Examples + + + +As a reference implementation I've put working examples here: +https://phabricator.internmc.facebook.com/diffusion/FBS/browse/master/xplat/js/RKJSModules/Libraries/Relay/oss/relay-test-utils/__tests__/RelayMockEnvironmentWithComponents-test.js + + + + + +The best source of example tests is in [the relay-experimental package](https://github.com/facebook/relay/tree/main/packages/relay-experimental/__tests__). + + + +Testing is good. You should definitely do it. + + diff --git a/website/versioned_docs/version-v15.0.0/guides/testing-relay-with-preloaded-queries.md b/website/versioned_docs/version-v15.0.0/guides/testing-relay-with-preloaded-queries.md new file mode 100644 index 0000000000000..3dba45a6bcb97 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guides/testing-relay-with-preloaded-queries.md @@ -0,0 +1,163 @@ +--- +id: testing-relay-with-preloaded-queries +title: Testing Relay with Preloaded Queries +slug: /guides/testing-relay-with-preloaded-queries/ +description: Relay guide to testing with preloaded queries +keywords: +- testing +- preloaded +- usePreloadedQuery +- queueOperationResolver +- queuePendingOperation +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {FbInternalOnly, OssOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +Components that use preloaded queries (`useQueryLoader` and `usePreloadedQuery` hooks) require slightly different and more convoluted test setup. + +In short, there are two steps that need to be performed **before rendering the component** + +1. Configure the query resolver to generate the response via `environment.mock.queueOperationResolver` +2. Record a pending queue invocation via `environment.mock.queuePendingOperation` + +## Symptoms that something is wrong + +1. The test doesn't do what is expected from it. +2. The query seems to be blocking instead of executing + 1. E.g. the `Suspend` doesn't switch from "waiting" to "data loaded" state +3. If you add the `console.log` before and after `usePreloadedQuery`, only the "before" call is hit + +## TL;DR + +```javascript +const {RelayEnvironmentProvider} = require('react-relay'); +const { MockPayloadGenerator, createMockEnvironment } = require('relay-test-utils'); +const {render} = require('testing-library-react'); +// at the time of writing, act is not re-exported by our internal testing-library-react +// but is re-exported by the "external" version +const {act} = require('ReactTestUtils'); +test("...", () => { + // arrange + const environment = createMockEnvironment(); + environment.mock.queueOperationResolver(operation => { + return MockPayloadGenerator.generate(operation, { + CurrencyAmount() { + return { + formatted_amount: "1234$", + }; + }, + }); + }); + const query = YourComponentGraphQLQueryGoesHere; // can be the same, or just identical + const variables = { + // ACTUAL variables for the invocation goes here + }; + environment.mock.queuePendingOperation(YourComponentGraphQLQuery, variables); + + // act + const {getByTestId, ..otherStuffYouMightNeed} = render( + + + + ); + // trigger the loading - click a button, emit an event, etc. or ... + act(() => jest.runAllImmediates()); // ... if loadQuery is in the useEffect() + // assert + // your assertions go here +}); +``` + +### Configure the query resolver to generate the response + +This is done via `environment.mock.queueOperationResolver(operation)` call, but getting it right might be tricky. + +The crux of this call is to return a mocked graphql result in a very particular format (as `MockResolvers` type, to be precise). This is done via a second parameter to `generate` - it is an object, whose keys are GraphQL types that we want to mock. (See [`mock-payload-generator`](../testing-relay-components/#mock-payload-generator-and-the-relay_test_operation-directive)). + +Continuing on the above example: + +```js +return MockPayloadGenerator.generate(operation, { + CurrencyAmount() { // <-- the GraphQL type + return { + formatted_amount: "response_value" <-- CurrencyAmount fields, selected in the query + }; + } +}); +``` +The tricky thing here is to obtain the name of the GraphQL type and fields to return. This can be done in two ways: + +* Call `console.log(JSON.stringify(operation, null, 2))` and look for the `concreteType` that corresponds to what we want to mock. Then look at the sibling `selections` array, which describes the fields that are selected from that object. + + + +* This is somewhat intense - P139017123 is the output for [this query](https://fburl.com/diffusion/irqurgj9). Rule of thumb - one nested call in the query produces one nested object in the output. +* Look up the type in the graphiql (bunnylol graphiql), then specify the fields listed on the query. + +:::note +The type you need seems to be the type returned by the *innermost function call* (or calls, if you have multiple functions called in one query - see D23078476). This needs to be confirmed - in both example diffs the target types was also leafs. +::: + + + + +It is **possible** to return different data for different query variables via [Mock Resolver Context](../testing-relay-components/#mock-resolver-context). The query variables will be available on the `context.args`, but only to the *innermost function call* (for the query above, only `offer_ids` are available) + +```javascript +CurrencyAmount(context) { + console.log(JSON.stringify(context, null, 2)); // <-- + return { formatted_amount: mockResponse } +} +// <-- logs { ...snip..., "name": "subtotal_price_for_offers", args: { offer_ids: [...] } } +``` +### Record a pending queue invocation + +This is more straightforward - it is done via a call to `environment.mock.queuePendingOperation(query, variables)` + +* `Query` needs to match the query issues by the component. Simplest (and most robust against query changes) is to export the query from the component module and use it in the test, but having an *identical* (but not the same) query works as well. +* `variables` has to match the variables that will be used in this test invocation. + * Beware of nested objects and arrays - they are compared via `areEqual` ([invocation code](https://github.com/facebook/relay/blob/046f758c6b411608371d4cc2f0a594ced331864e/packages/relay-test-utils/RelayModernMockEnvironment.js#L233)) + * Arrays are compared by values (not by reference), but the order of elements matter + * Nested objects - performs deep compare, order of keys is not relevant (this is not confirmed - please update this doc if you used a graphql query with "deep" structure*)* + + + +### Example diffs + +* [D23078476](https://internalfb.com/intern/diff/D23078476) +* [D23101739](https://www.internalfb.com/diff/D23101739) + + + +## Troubleshooting + +* `console.log`, `console.log` everywhere! Recommended places: + * component: before and after `useQueryLoader, usePreloadedQuery, loadQuery` + * test: in `queueOperationResolver` callback + * library: in `RelayModernMockEnvironment.execute`, after the `const currentOperation = ...` call ([here](https://github.com/facebook/relay/blob/046f758c6b411608371d4cc2f0a594ced331864e/packages/relay-test-utils/RelayModernMockEnvironment.js#L230)) +* If `loadQuery` is not called - make sure to issue the triggering event. Depending on your component implementation it could be a user-action (like button click or key press), javascript event (via event emitter mechanisms) or a simple "delayed execution" with `useEffect`. + * The `useEffect` case is probably easiest to miss - make sure to call `act(() => jest.runAllImmediates())` **after** rendering the component +* If "before" `usePreloadedQuery` is hit, but "after" is not - the query suspends. This entire guide is written to resolve it - you might want to re-read it. But most likely it is either: + * Used a different query - the query resolver would not be called, `currentOperation` will be `null` + * Query variables don't match - the query resolver would not be called, `currentOperation` will be `null` (make sure to inspect the `variables`). + * Also, make sure arrays are in the same order, if any (or better yet, use sets, if at all possible). +* If data returned rom the query is not what you expect, make sure you're generating the right graphql type. + * You can tell you're mocking the wrong one if the return values look something like `` + + +:::note +Make sure the component and the test use the same environment (i.e. there's no `` somewhere nested in your test React tree. +::: + + +## Epilogue + +Examples here use `testing-library-react`, but it works with the `react-test-renderer` as well. + + + +See [D23078476](https://www.internalfb.com/diff/D23078476). + + + + diff --git a/website/versioned_docs/version-v15.0.0/guides/type-emission.md b/website/versioned_docs/version-v15.0.0/guides/type-emission.md new file mode 100644 index 0000000000000..3171df6369ca3 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/guides/type-emission.md @@ -0,0 +1,414 @@ +--- +id: type-emission +title: Type Emission +slug: /guides/type-emission/ +description: Relay guide to type emission +keywords: +- type emission +--- + +import DocsRating from '@site/src/core/DocsRating'; +import {FbInternalOnly, OssOnly, fbContent} from 'docusaurus-plugin-internaldocs-fb/internal'; +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +As part of its normal work, the [**Relay Compiler**](../compiler) will emit type information for your language of choice that helps you write type-safe application code. These types are included in the artifacts that `relay-compiler` generates to describe your operations and fragments. + +## Operation variables + +The shape of the variables object used for query, mutation, or subscription operations. + +In this example the emitted type-information would require the variables object to contain an `artistID` key with a non-null string. + + + + +```javascript +/** + * export type ExampleQuery$variables = { + * +artistID: string, + * } + * export type ExampleQuery$data = { + * +artist: { + * +name: ?string, + * } + * } + * export type ExampleQuery = { + * +variables: ExampleQuery$variables, + * +response: ExampleQuery$data, + * } + */ + +const data = useLazyLoadQuery( + graphql` + query ExampleQuery($artistID: ID!) { + artist(id: $artistID) { + name + } + } + `, + // variables are expected to be of type ExampleQuery$variables + {artistID: 'banksy'}, +); +``` + + + + + +```javascript +/** + * export type ExampleQuery$variables = { + * readonly artistID: string + * } + * export type ExampleQuery$data = { + * readonly artist?: { + * readonly name?: string + * } + * } + * export type ExampleQuery = { + * readonly variables: ExampleQuery$variables + * readonly response: ExampleQuery$data + * } + */ +const data = useLazyLoadQuery( + graphql` + query ExampleQuery($artistID: ID!) { + artist(id: $artistID) { + name + } + } + `, + // variables are expected to be of type ExampleQuery$variables + {artistID: 'banksy'}, +); +``` + + + + +## Operation and fragment data + +The shape of the data selected in a operation or fragment, following the [data-masking] rules. That is, excluding any data selected by fragment spreads. + +In this example the emitted type-information describes the response data which is returned by `useLazyLoadQuery` (or `usePreloadedQuery`). + + + + +```javascript +/** + * export type ExampleQuery$variables = { + * +artistID: string, + * } + * export type ExampleQuery$data = { + * +artist: { + * +name: ?string, + * } + * } + * export type ExampleQuery = { + * +variables: ExampleQuery$variables, + * +response: ExampleQuery$data, + * } + */ + +// data is of type ExampleQuery$data +const data = useLazyLoadQuery( + graphql` + query ExampleQuery($artistID: ID!) { + artist(id: $artistID) { + name + } + } + `, + {artistID: 'banksy'}, +); + +return props.artist &&
{props.artist.name} is great!
+``` + +
+ + + +```javascript +/** + * export type ExampleQuery$variables = { + * readonly artistID: string + * } + * export type ExampleQuery$data = { + * readonly artist?: { + * readonly name?: string + * } + * } + * export type ExampleQuery = { + * readonly variables: ExampleQuery$variables + * readonly response: ExampleQuery$data + * } + */ + +// data is of type ExampleQuery$data +const data = useLazyLoadQuery( + graphql` + query ExampleQuery($artistID: ID!) { + artist(id: $artistID) { + name + } + } + `, + {artistID: 'banksy'}, +); + +return props.artist &&
{props.artist.name} is great!
+``` + +
+
+ + +Similarly, in this example the emitted type-information describes the type of the prop to match the type of the fragment reference `useFragment` expects to receive. + + + + +```javascript +/** + * export type ExampleFragmentComponent_artist$data = { + * +name: string + * } + * + * export type ExampleFragmentComponent_artist$key = { ... } + */ + +import type { ExampleFragmentComponent_artist$key } from "__generated__/ExampleFragmentComponent_artist.graphql" + +type Props = { + artist: ExampleFragmentComponent_artist$key, +}; + +export default ExampleFragmentComponent(props) { + // data is of type ExampleFragmentComponent_artist$data + const data = useFragment( + graphql` + fragment ExampleFragmentComponent_artist on Artist { + biography + } + `, + props.artist, + ); + + return
About the artist: {props.artist.biography}
; +} +``` + +
+ + + +```javascript +/** + * export type ExampleFragmentComponent_artist$data = { + * readonly name: string + * } + * + * export type ExampleFragmentComponent_artist$key = { ... } + */ + +import { ExampleFragmentComponent_artist$key } from "__generated__/ExampleFragmentComponent_artist.graphql" + +interface Props { + artist: ExampleFragmentComponent_artist$key, +}; + +export default ExampleFragmentComponent(props: Props) { + // data is of type ExampleFragmentComponent_artist$data + const data = useFragment( + graphql` + fragment ExampleFragmentComponent_artist on Artist { + biography + } + `, + props.artist, + ); + + return
About the artist: {props.artist.biography}
; +} +``` + +
+
+ +## Fragment references + +The opaque identifier described in [data-masking] that a child container expects to receive from its parent, which represents the child container’s fragment spread inside the parent’s fragment. + + + +:::important +Please read [this important caveat](#single-artifact-directory) about actually enabling type-safe fragment reference checking. +::: + + + +Consider a component that [composes](../../guided-tour/rendering/fragments/#composing-fragments) the above fragment component example. In this example, the emitted type-information of the child component receives a unique opaque identifier type, called a fragment reference, which the type-information emitted for the parent’s fragment references in the location where the child’s fragment is spread. Thus ensuring that the child’s fragment is spread into the parent’s fragment _and_ the correct fragment reference is passed to the child component at runtime. + + + + +```javascript +import { ExampleFragmentComponent } from "./ExampleFragmentComponent" + +/** + * import type { ExampleFragmentComponent_artist$fragmentType } from "ExampleFragmentComponent_artist.graphql"; + * + * export type ExampleQuery$data = { + * +artist: ?{ + * +name: ?string, + * +$fragmentSpreads: ExampleFragmentComponent_artist$fragmentType, + * } + * }; + * export type ExampleQuery$variables = { + * +artistID: string, + * } + * export type ExampleQuery = { + * +variables: ExampleQuery$variables, + * +response: ExampleQuery$data, + * } + */ + +// data is of type ExampleQuery$data +const data = useLazyLoadQuery( + graphql` + query ExampleQuery($artistID: ID!) { + artist(id: $artistID) { + name + ...ExampleFragmentComponent_artist + } + } + `, + {artistID: 'banksy'}, +); + +// Here only `data.artist.name` is directly visible, +// the marker prop $fragmentSpreads indicates that `data.artist` +// can be used for the component expecting this fragment spread. +return ; +``` + + + + + +```javascript +import { ExampleFragmentComponent } from "./ExampleFragmentComponent" + +/** + * import { ExampleFragmentComponent_artist$fragmentType } from "ExampleFragmentComponent_artist.graphql"; + * + * export type ExampleQuery$data = { + * readonly artist?: { + * readonly name: ?string, + * readonly " $fragmentSpreads": ExampleFragmentComponent_artist$fragmentType + * } + * } + * export type ExampleQuery$variables = { + * readonly artistID: string + * } + * export type ExampleQuery = { + * readonly variables: ExampleQuery$variables + * readonly response: ExampleQuery$data + * } + */ + +// data is of type ExampleQuery$data +const data = useLazyLoadQuery( + graphql` + query ExampleQuery($artistID: ID!) { + artist(id: $artistID) { + name + ...ExampleFragmentComponent_artist + } + } + `, + {artistID: 'banksy'}, +); + +// Here only `data.artist.name` is directly visible, +// the marker prop $fragmentSpreads indicates that `data.artist` +// can be used for the component expecting this fragment spread. +return ; +``` + + + + + + +## Single artifact directory + +An important caveat to note is that by default strict fragment reference type-information will _not_ be emitted, instead they will be typed as `any` and would allow you to pass in any data to the child component. + +To enable this feature, you will have to tell the compiler to store all the artifacts in a single directory, by specifing the `artifactDirectory` in the +compiler configuration: + +``` +{ + // package.json + "relay": { + "artifactDirectory": "./src/__generated__", + ... + }, + ... +} +``` + +…and additionally inform the babel plugin in your `.babelrc` config where to look for the artifacts: + +```json +{ + "plugins": [ + ["relay", { "artifactDirectory": "./src/__generated__" }] + ] +} +``` + +It is recommended to alias this directory in your module resolution configuration such that you don’t need to specify relative paths in your source files. This is what is also done in the above examples, where artifacts are imported from a `__generated__` alias, rather than relative paths like `../../../../__generated__`. + +### Background information + +The reason is that `relay-compiler` and its artifact emission is stateless. Meaning that it does not keep track of locations of original source files and where the compiler previously saved the accompanying artifact on disk. Thus, if the compiler were to emit artifacts that try to import fragment reference types from _other_ artifacts, the compiler would: + +- first need to know where on disk that other artifact exists; +- and update imports when the other artifact changes location on disk. + +Facebook uses a module system called [Haste], in which all source files are considered in a flat namespace. This means that an import declaration does not need to specify the path to another module and thus there is no need for the compiler to ever consider the above issues. I.e. an import only needs to specify the basename of the module filename and Haste takes care of actually finding the right module at import time. Outside of Facebook, however, usage of the Haste module system is non-existent nor encouraged, thus the decision to not import fragment reference types but instead type them as `any`. + +At its simplest, we can consider Haste as a single directory that contains all module files, thus all module imports always being safe to import using relative sibling paths. This is what is achieved by the single artifact directory feature. Rather than co-locating artifacts with their source files, all artifacts are stored in a single directory, allowing the compiler to emit imports of fragment reference types. + + + +[data-masking]: ../../principles-and-architecture/thinking-in-relay#data-masking + +[Haste]: https://twitter.com/dan_abramov/status/758655309212704768 + + diff --git a/website/versioned_docs/version-v15.0.0/home.md b/website/versioned_docs/version-v15.0.0/home.md new file mode 100644 index 0000000000000..77e006c7a0b04 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/home.md @@ -0,0 +1,65 @@ +--- +id: home +title: Home +slug: / +description: Relay documentation landing page +keywords: +- relay +- graphql +- data +- introduction +- home +--- + +# Relay Docs + +import DocsRating from '@site/src/core/DocsRating'; +import {OssOnly, FbInternalOnly} from 'docusaurus-plugin-internaldocs-fb/internal'; + +Relay is a data management library for React that lets you fetch and update data with GraphQL. It embodies years of learning to give you **outstanding performance by default** while keeping your code **stable and maintainable**. + +Relay brings the composability of React components to data fetching. Each component declares its own data needs, and Relay combines them into efficient pre-loadable queries. Every aspect of its design is to make the natural way of writing components also the most performant. + +## Features + +* Declarative data: Just declare what data each component needs and Relay will handle the loading states. +* Co-location and composability: Each component declares its own data needs; Relay combines them into efficient queries. When you re-use a component on a different screen, your queries are automatically updated. +* Pre-fetching: Relay analyses your code so you can start fetching queries before your code even downloads or runs. +* UI patterns: Relay implements loading states, pagination, refetching, optimistic updates, rollbacks, and other common UI behaviors that are tricky to get right. +* Consistent updates: Relay maintains a normalized data store, so components that observe the same data stay in sync even if they reach it by different queries. +* Streaming and deferred data: Declaratively defer parts of your query and Relay will progressively re-render your UI as the data streams in. +* Great developer experience: Relay provides autocompletion and go-to-definition for your GraphQL schema. +* Type safety: Relay generates type definitions so that mistakes are caught statically, not at runtime. +* Manage local data: Use the same API for server data and local client state. +* Hyper-optimized runtime: Relay is relentlessly optimized. Its JIT-friendly runtime processes incoming data faster by statically determining what payloads to expect. + +## Stack + +Relay works on the Web and on React Native — it is used extensively at Meta in both environments. It is framework-agnostic and works with Next, React Router, Create React App, etc. It works with both TypeScript and Flow. + +Relay is completely tied to GraphQL, so if you cannot use GraphQL then it's not the right choice for you. + +Relay has a UI-agnostic layer that fetches and manages data, and a React-specific layer that handles loading states, pagination, and other UI paradigms. It is mainly supported when used with React, although you can access your Relay data outside of React if you need to. The React-specific parts of Relay are based on Suspense, so there are some limitations if you're stuck on an older version of React. + +## Where to Go from Here + + + +
+Start with the tutorial — it will take you step-by-step through building a Relay app. +
+ + +- An overview of the **[prerequisites](./getting-started/prerequisites/)** for using Relay, and an **[installation and setup guide](./getting-started/installation-and-setup/)**. +- The **[API reference](./api-reference/relay-environment-provider/)**, for a reference of our APIs including a detailed overview of their inputs and outputs. + +
+ + + +- Start with the **[tutorial](./tutorial/intro/)** — it will take you step-by-step through building a Relay app. +- The **[API reference](./api-reference/relay-environment-provider/)**, for a reference of our APIs including a detailed overview of their inputs and outputs. + + + + diff --git a/website/versioned_docs/version-v15.0.0/migration-and-compatibility/relay-hooks-and-legacy-container-apis.md b/website/versioned_docs/version-v15.0.0/migration-and-compatibility/relay-hooks-and-legacy-container-apis.md new file mode 100644 index 0000000000000..fc9d1dc248aea --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/migration-and-compatibility/relay-hooks-and-legacy-container-apis.md @@ -0,0 +1,564 @@ +--- +id: relay-hooks-and-legacy-container-apis +title: Relay Hooks and Legacy Container APIs +slug: /migration-and-compatibility/relay-hooks-and-legacy-container-apis/ +description: Relay guide to compatibility between hooks and containers +keywords: +- migration +- compatibility +- container +- QueryRenderer +- FragmentContainer +- RefetchContainer +- PaginationContainer +--- + +import DocsRating from '@site/src/core/DocsRating'; + +## Compatibility between Relay Hooks and Containers + +Relay Hooks are fully compatible with Relay's [container-based APIs](../../api-reference/legacy-apis/), meaning that containers can render components that use Hooks, and vice-versa. + +This means that you can adopt Relay Hooks incrementally, either by using them exclusively for new code, or by migrating specific parts of your app, without affecting the rest of your existing application. + + +## Migrating existing container-based code + +As we've mentioned, migrating existing code to Relay Hooks is ***not*** required, and **container-based code will continue to work**. + +However, in this section we will go over common migration patterns you can follow if you do choose to migrate container-based code to Relay Hooks. + + +### `QueryRenderer` → `useLazyLoadQuery` + +Converting from a `QueryRenderer` to the [`useLazyLoadQuery`](../../api-reference/use-lazy-load-query/) Hook is the most straightforward conversion, and will have a similar behavior of fetching the specified query *during render.* + +To convert a `QueryRenderer` to `useLazyLoadQuery`, you need to take the following steps: + +1. Render a [`RelayEnvironmentProvider`](../../api-reference/relay-environment-provider/) where the QueryRenderer was, or above it. Usually, we recommend rendering the `RelayEnvironmentProvider` at the very root of your app: + +```js + + + +``` + + +2. Convert the `QueryRenderer` into `useLazyLoadQuery`: + +**Before:** + +```js +import * as React from 'React'; +import {graphql, QueryRenderer} from 'react-relay'; + +export default function Home() { + return ( + { + if (error) { + return ; + } + if (!props) { + return ; + } + return

{props.user?.name}

+ }} + /> + ); +} +``` + + +**After:** +Fetch and render the query: + +```js +import * as React from 'React'; +import {graphql, useLazyLoadQuery} from 'react-relay'; + +export default function Home() { + const data = useLazyLoadQuery( + graphql` + query HomeQuery($id: ID!) { + user(id: $id) { + name + } + } + `, + {id: 4}, + ); + + return

{data.user?.name}

; +} +``` + +[Loading states](../../guided-tour/rendering/loading-states/) and [error states](../../guided-tour/rendering/error-states/) are handled by Suspense and Error Boundaries: + +```js + + }> + + + +``` + + + +### `QueryRenderer` → `useQueryLoader` + `usePreloadedQuery` + +Unlike `useLazyLoadQuery`, using [`useQueryLoader`](../../api-reference/use-query-loader/) in combination with [`usePreloadedQuery`](../../api-reference/use-preloaded-query/) will start fetching the data *ahead* of render, following the "render-as-you-fetch" pattern. This means that the data fetch will start sooner, and potentially speed up the time it takes to show content to users. + +To make best use of this pattern, query loading is usually integrated at the router level, or other parts of your UI infra. To see a full example, see our [`issue-tracker`](https://github.com/relayjs/relay-examples/blob/main/issue-tracker/src/routes.js) example app. + + +To convert a `QueryRenderer` to `useQueryLoader`, you need to take the following steps: + +1. Render a [`RelayEnvironmentProvider`](../../api-reference/relay-environment-provider/) where the QueryRenderer was, or above it. Usually, we recommend rendering the `RelayEnvironmentProvider` at the very root of your app: + +```js + + + +``` + +2. Convert the `QueryRenderer` into `useQueryLoader` + `usePreloadedQuery`: + +**Before:** + +```js +import * as React from 'React'; +import {graphql, QueryRenderer} from 'react-relay'; + +export default function UserPopover() { + return ( + { + if (error) { + return ; + } + if (!props) { + return ; + } + return

{props.user?.name}

+ }} + /> + ); +} +``` + + +**After:** +Render the preloaded query: + +```js +import * as React from 'React'; +import {graphql, usePreloadedQuery} from 'react-relay'; + +export default function UserPopover(props) { + const data = usePreloadedQuery( + graphql` + query UserPopoverQuery($id: ID!) { + user(id: $id) { + name + } + } + `, + props.queryRef, + ); + + return

{data.user?.name}

; +} +``` + + +Load the query with `loadQuery` from `useQueryLoader`. This part of the code would usually be integrated in your routing, or other parts of your UI infra: + +```js +import * as React from 'React'; +import {useQueryLoader} from 'react-relay'; + +// Import the query defined in the UserPopover component +import UserPopoverQuery from '__generated__/UserPopoverQuery.graphql'; + +// This is *NOT* a real-world example, only used +// to illustrate usage. + +export default function UserPopoverButton(props) { + const [queryRef, loadQuery] = useQueryLoader(UserPopoverQuery) + + const handleClick = useCallback(() => { + // Load the query in the event handler, onClick + loadQuery({id: props.userID}) + }, [loadQuery, props.userID]); + + return ( + <> + + + ); +} + +export default createRefetchContainer( + CommentBody, + { + user: graphql` + fragment CommentBody_comment on Comment { + body(lang: $lang) { + text + } + } + `, + }, + + // This option is no longer required, the refetch query + // will automatically be generated by Relay using the @refetchable + // directive. + graphql` + query AppQuery($id: ID!, lang: Lang) { + node(id: $id) { + ...CommentBody_comment + } + } + `, +); +``` + +**After:** + +```js +import * as React from 'React'; +import {graphql, useRefetchableFragment} from 'react-relay'; + +export default function CommentBody(props: Props) { + const [data, refetch] = useRefetchableFragment( + graphql` + fragment CommentBody_comment on Comment + @refetchable(queryName: "CommentBodyRefetchQuery") { + body(lang: $lang) { + text + } + } + `, + props.comment, + ); + + const handleClick = useCallback(() => { + refetch({lang: 'SPANISH'}); + }, [refetch]); + + return ( + <> +

{data.body?.text}

+ + + ); +} +``` + + + +### Pagination Container → `usePaginationFragment` + +The pagination API for [`usePaginationFragment`](../../api-reference/use-pagination-fragment/) has been greatly simplified and reduced compared to the former PaginationContainer. Migration will require mapping inputs into the new API. + +**Before:** + +```js +import * as React from 'React'; +import {graphql, createPaginationContainer} from 'react-relay'; + +class UserContainerComponent extends React.Component { + render(): React.Node { + const isLoading = this.props.relay.isLoading() || this.state.loading; + const hasMore = this.props.relay.hasMore(); + + return ( + <> + + + + ); + } + + loadMore() { + if ( + !this.props.relay.hasMore() || + this.props.relay.isLoading() || + this.state.loading + ) { + return; + } + + this.setState({loading: true}); + + this.props.relay.loadMore(5, () => this.setState({loading: false})); + } +} + +export default createPaginationContainer( + UserContainerComponent, + { + user: graphql` + fragment UserContainerComponent_user on User + @argumentDefinitions(count: {type: "Int!"}, cursor: {type: "ID"}) + @refetchable(queryName: "UserComponentRefetchQuery") { + friends(first: $count, after: $cursor) + @connection(key: "UserComponent_user_friends") { + edges { + node { + name + } + } + } + } + `, + }, + { + // This option is no longer necessary, usePaginationFragment supports + // bi-directional pagination out of the box. + direction: 'forward', + + // This option is no longer required, and will be automatically + // determined by usePaginationFragment + getConnectionFromProps(props: Props) { + return props.user?.friends; + }, + + // This option is no longer required, and will be automatically + // determined by usePaginationFragment + getFragmentVariables(vars, count) { + return {...vars, count}; + }, + + // This option is no longer required, and will be automatically + // determined by usePaginationFragment + getVariables(props: Props, {count, cursor}) { + return { + cursor, + count, + }; + }, + + // This option is no longer required, the pagination query + // will automatically be generated by Relay using the @refetchable + // directive. + query: graphql` + query UserContainerComponentQuery { + viewer { + actor { + ... on User { + ...UserContainerComponent_user @arguments(count: 10) + } + } + } + } + `, + }, +); +``` + + +**After:** + +```js +import * as React from 'React'; +import {graphql, usePaginationFragment} from 'react-relay'; + +export default function UserComponent(props: Props) { + const {data, loadNext, hasNext, isLoadingNext} = usePaginationFragment( + graphql` + fragment UserComponent_user on User + @refetchable(queryName: "UserComponentRefetchQuery") { + friends(first: $count, after: $after) + @connection(key: "UserComponent_user_friends") { + edges { + node { + name + } + } + } + } + `, + props.user, + ); + + const handleClick = useCallback(() => { + loadNext(5) + }, [loadNext]) + + return ( + <> + + + + ); +} +``` + + + + +* * * + +### QueryRenderer → useEntryPointLoader + EntryPointContainer + +TODO + + + +### commitMutation → useMutation + +TODO + + +### requestSubscription → useSubscription + +TODO + + diff --git a/website/versioned_docs/version-v15.0.0/migration-and-compatibility/suspense-compatibility.md b/website/versioned_docs/version-v15.0.0/migration-and-compatibility/suspense-compatibility.md new file mode 100644 index 0000000000000..75930e4f02b52 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/migration-and-compatibility/suspense-compatibility.md @@ -0,0 +1,36 @@ +--- +id: suspense-compatibility +title: Suspense Compatibility +slug: /migration-and-compatibility/suspense-compatibility/ +description: Relay guide to suspense compatibility +keywords: +- suspense +- container +--- + +import DocsRating from '@site/src/core/DocsRating'; + +## What about Suspense? + +Relay Hooks uses React Suspense for [specifying loading states](../../guided-tour/rendering/loading-states/), so you might be wondering: Why is that the case if Suspense for Data Fetching is still not supported? Does this mean that Suspense for Data Fetching is officially supported now in React 17? + +## Is Suspense for Data Fetching ready yet? + +The short answer is: **NO**. + +**Support, general guidance, and requirements for usage of Suspense for Data Fetching are still not ready**, and the React team is still defining what this guidance will be for upcoming React releases. + +With that said, even though there are still things to figure out before Suspense for Data Fetching can be broadly implemented and adopted, we released Relay Hooks on React 17 for a few reasons: + +* Relay was a very early adopter of Suspense, and collaborated with React on the research of Suspense for Data Fetching. It was one of the first testing grounds for using Suspense in production, and helped inform some of its design decisions. As such, there are still parts of our Suspense *implementation* that reflect those early learnings (which aren't yet fully documented) and which aren't quite where we want them to be. Although we know there are still likely changes to be made in the implementation, and that there will be some limitations when Suspense is used in React 17, we know Relay Hooks are on the right trajectory for upcoming releases of React, and those changes can be streamlined and allow us to release Relay Hooks a bit earlier. +* The Relay Hooks APIs represent the APIs we want to deliver long-term for Relay and which we believe are an improvement over our previous APIs. Even though their underlying implementation is still changing and will likely change more as the Suspense for Data Fetching guidance is documented and finalized by the React team, the Relay Hooks APIs themselves are stable. They have been widely adopted internally at Facebook, and have been in use in production for over a year, so we are confident that they work. We want to allow the community to start adopting them, and be able to get external feedback from the community as well. + + +## What does it mean for me if I start using Relay Hooks in React 17? + +What this means for users adopting Relay Hooks is: + +* There will be some limitations when using Suspense in React 17, which we've documented in [our docs](../../guided-tour/refetching/refetching-queries-with-different-data/#if-you-need-to-avoid-suspense). Specifically, the current release includes a subset of features that work with both synchronous rendering and concurrent rendering. In order to fully support Suspense for Data Fetching, we also need features such as concurrently rendering suspended trees, and transitioning to new trees when data is refetched. The APIs we've currently released will allow us to support concurrent rendering with the same APIs in future versions of React. +* When a future version of React is released that fully supports concurrent rendering and Suspense for Data Fetching, Relay will also make a new major release alongside the React release. That release will likely include breaking changes that we will document for the upgrade. + + diff --git a/website/versioned_docs/version-v15.0.0/migration-and-compatibility/upgrading-to-relay-hooks.md b/website/versioned_docs/version-v15.0.0/migration-and-compatibility/upgrading-to-relay-hooks.md new file mode 100644 index 0000000000000..f9619a27907bb --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/migration-and-compatibility/upgrading-to-relay-hooks.md @@ -0,0 +1,38 @@ +--- +id: upgrading-to-relay-hooks +title: Upgrading to Relay Hooks +slug: /migration-and-compatibility/ +description: Relay guide to upgrading to Relay hooks +keywords: +- upgrade +- hooks +--- + +[Relay Hooks](/blog/2021/03/09/introducing-relay-hooks) is a set of new Hooks-based APIs for using Relay with React that improves upon the existing container-based APIs. + +In this we will cover how to start using Relay Hooks, what you need to know about compatibility, and how to migrate existing container-based code to Hooks if you choose to do so. However, note that migrating existing code to Relay Hooks is ***not*** required, and **container-based code will continue to work**. + +## Accessing Relay Hooks + +Make sure the latest versions of React and Relay are installed, and that you’ve followed additional setup in our [Installation & Setup](../getting-started/installation-and-setup/) guide: + +``` +yarn add react react-dom react-relay +``` + +Then, you can import Relay Hooks from the **`react-relay`** module, or if you only want to include Relay Hooks in your bundle, you can import them from **`react-relay/hooks`**: + +```js +import {graphql, useFragment} from 'react-relay'; // or 'react-relay/hooks' + +// ... +``` + +## Next Steps + +Check out the following guides in this section: +* [Suspense Compatibility](./suspense-compatibility/) +* [Relay Hooks and Legacy Container APIs](./relay-hooks-and-legacy-container-apis/) + + +For more documentation on the APIs themselves, check out our [API Reference](../api-reference/relay-environment-provider) or our [Guided Tour](../guided-tour/). diff --git a/website/versioned_docs/version-v15.0.0/principles-and-architecture/architecture-overview.md b/website/versioned_docs/version-v15.0.0/principles-and-architecture/architecture-overview.md new file mode 100644 index 0000000000000..fdf24c7641935 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/principles-and-architecture/architecture-overview.md @@ -0,0 +1,24 @@ +--- +id: architecture-overview +title: Architecture Overview +slug: /principles-and-architecture/architecture-overview/ +description: Relay architecture overview guide +keywords: +- architecture +--- + +import DocsRating from '@site/src/core/DocsRating'; + +This document, together with [Runtime Architecture](../runtime-architecture/) and [Compiler Architecture](../compiler-architecture/), describes the high-level architecture of Relay. The intended audience includes developers interested in contributing to Relay, developers hoping to utilize the building blocks of Relay to create higher-level APIs, and anyone interested in understanding more about Relay internals. For developers wanting to learn more about _using_ Relay to build products, the [Guided Tour](../../guided-tour/) is the best resource. + +## Core Modules + +Relay is composed of three core parts: + +- **Relay Compiler:** A GraphQL to GraphQL optimizing _compiler_, providing general utilities for transforming and optimizing queries as well as generating build artifacts. A novel feature of the compiler is that it facilitates experimentation with new GraphQL features - in the form of custom directives - by making it easy to translate code using these directives into standard, spec-compliant GraphQL. +- **Relay Runtime:** A full-featured, high-performance GraphQL _runtime_ that can be used to build higher-level client APIs. The runtime features a normalized object cache, optimized "write" and "read" operations, a generic abstraction for incrementally fetching field data (such as for pagination), garbage collection for removing unreferenced cache entries, optimistic mutations with arbitrary logic, support for building subscriptions and live queries, and more. +- **React/Relay:** A high-level _product API_ that integrates the Relay Runtime with React. This is the primary public interface to Relay for most product developers, featuring APIs to fetch the data for a query or define data dependencies for reusable components (e.g. `useFragment`). + +Note that these modules are _loosely coupled_. For example, the compiler emits representations of queries in a well-defined format that the runtime consumes, such that the compiler implementation can be swapped out if desired. React/Relay relies only on the well-documented public interface of the runtime, such that the actual implementation can be swapped out. We hope that this loose coupling will allow the community to explore new use-cases such as the development of specialized product APIs using the Relay runtime or integrations of the runtime with view libraries other than React. + + diff --git a/website/versioned_docs/version-v15.0.0/principles-and-architecture/compiler-architecture.md b/website/versioned_docs/version-v15.0.0/principles-and-architecture/compiler-architecture.md new file mode 100644 index 0000000000000..f872264b8082c --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/principles-and-architecture/compiler-architecture.md @@ -0,0 +1,106 @@ +--- +id: compiler-architecture +title: Compiler Architecture +slug: /principles-and-architecture/compiler-architecture/ +description: Relay compiler architecture guide +keywords: +- compiler +- architecture +- transform +--- + +import DocsRating from '@site/src/core/DocsRating'; + +The compiler is a set of modules designed to extract GraphQL documents from across a codebase, transform/optimize them, and generate build artifacts. Examples of common types of artifacts include optimized GraphQL to persist to your server, runtime representations of the queries for use with GraphQL clients such as the Relay runtime, or generated source code for use with GraphQL frameworks for compiled languages (Java/Swift/etc). + +## Data Flow + +The high-level flow of data through the compiler is represented in the following diagram: + +``` + + ┌─────────────┐┌─────────────┐ + │ GraphQL ││ Schema │ + └─────────────┘└─────────────┘ + │ │ parse + └───────┬──────┘ + ▼ + ┌────────────────────────────┐ + │ CompilerContext │ + │ │ + │ ┌─────┐ ┌─────┐ ┌─────┐ │──┐ + │ │ IR │ │ IR │ │ ... │ │ │ + │ └─────┘ └─────┘ └─────┘ │ │ + └────────────────────────────┘ │ transform/ + │ │ ▲ │ optimize + │ │ └────────────┘ + │ │ + │ └──────────┐ + │ print │ codegen + ▼ ▼ + ┌─────────────┐ ┌─────────────┐ + │ GraphQL │ │ Artifacts │ + └─────────────┘ └─────────────┘ +``` + +1. GraphQL text is extracted from source files and "parsed" into an intermediate representation (IR) using information from the schema. +2. The set of IR documents forms a CompilerContext, which is then transformed and optimized. +3. Finally, GraphQL is printed (e.g. to files, saved to a database, etc) and any artifacts are generated. + +## Data Types & Modules + +The compiler module is composed of a set of core building blocks as well as a helper that packages them together in an easy to use API. Some of the main data types and modules in the compiler are as follows: + +- `IR` (Intermediate Representation): an (effectively immutable) representation of a GraphQL document (query, fragment, field, etc) as a tree structure, including type information from a schema. Compared to the standard GraphQL AST (produced by e.g. `graphql-js`) the main difference is that it encodes more of the semantics of GraphQL. For example, conditional branches (`@include` and `@skip`) are represented directly, making it easier to target optimizations for these directives (One such optimization is to merge sibling fields with the same condition, potentially reducing the number of conditionals that must be evaluated at runtime). +- `CompilerContext`: an immutable representation of a corpus of GraphQL documents. It contains the schema and a mapping of document names to document representations (as IR, see above). +- `Transform`: a "map"-like function that accepts a `CompilerContext` as input and returns a new, modified context as output. Examples below. +- `Parser`: Converts a GraphQL schema and raw GraphQL text into typed IR objects. +- `Printer`: a function that accepts IR and converts it to a GraphQL string. + +The `RelayCompiler` module is a helper class that demonstrates one way of combining these primitives. It takes IR transforms, and given IR definitions, constructs a CompilerContext from them, transforming them, and generating output artifacts intended for use with Relay runtime. + +## Transforms + +One of the main goals of the compiler is to provide a consistent platform for writing tools that transform or optimize GraphQL. This includes the ability to experiment with new directives by transforming them away at compile time. Transform functions should typically perform a single type of modification - it's expected that an app will have multiple transforms configured in the compiler instance. + +Here are a few examples of some of the included transforms: + +- `FlattenTransform`: Reduces extraneous levels of indirection in a query, inlining fields from anonymous fragments wherever they match the parent type. This can be beneficial when generating code to read the results of a query or process query results, as it reduces duplicate field processing. For example: + +``` +# before: `id` is processed twice +foo { # type FooType + id + ... on FooType { # matches the parent type, so this is extraneous + id + } + } + + # after: `id` is processed once + foo { + id + } +``` + +- `SkipRedundantNodeTransform`: A more advanced version of flattening, this eliminates more complex cases of field duplication such as when a field is fetched both unconditionally and conditionally, or is fetched by two different sub-fragments. For example: + +``` +# before: `id` processed up to 2x +foo { + bar { + id + } + ... on FooType @include(if: $cond) { # can't be flattened due to conditional + id # but this field is guaranteed to be fetched regardless + } +} + +# after: `id` processed at most once +foo { + bar { + id + } +} +``` + + diff --git a/website/versioned_docs/version-v15.0.0/principles-and-architecture/runtime-architecture.md b/website/versioned_docs/version-v15.0.0/principles-and-architecture/runtime-architecture.md new file mode 100644 index 0000000000000..9e1e84abcd706 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/principles-and-architecture/runtime-architecture.md @@ -0,0 +1,249 @@ +--- +id: runtime-architecture +title: Runtime Architecture +slug: /principles-and-architecture/runtime-architecture/ +description: Relay runtime architecture guide +keywords: +- runtime +- architecture +- store +- DataID +- Record +- RecordSource +--- + +import DocsRating from '@site/src/core/DocsRating'; + +The Relay runtime is a full-featured GraphQL client that is designed for high performance even on low-end mobile devices and is capable of scaling to large, complex apps. The runtime API is not intended to be used directly in product code, but rather to provide a foundation for building higher-level product APIs such as React/Relay. This foundation includes: + +- A normalized, in-memory object graph/cache. +- An optimized "write" operation for updating the cache with the results of queries/mutations/subscriptions. +- A mechanism for reading data from the cache and subscribing for updates when these results change due to a mutation, subscription update, etc. +- Garbage collection to evict entries from the cache when they can no longer be referenced by any view. +- A generic mechanism for intercepting data prior to publishing it to the cache and either synthesizing new data or merging new and existing data together (which among other things enables the creation of a variety of pagination schemes). +- Mutations with optimistic updates and the ability to update the cache with arbitrary logic. +- Support for live queries where supported by the network/server. +- Core primitives to enable subscriptions. +- Core primitives for building offline/persisted caching. + +## Data Types + +- `DataID` (type): A globally unique or client-generated identifier for a record, stored as a string. +- `Record` (type): A representation of a distinct data entity with an identity, type, and fields. Note that the actual runtime representation is opaque to the system: all accesses to `Record` objects (including record creation) is mediated through the `RelayModernRecord` module. This allows the representation itself to be changed in a single place (e.g. to use `Map`s or a custom class). It is important that other code does not assume that `Record`s will always be plain objects. +- `RecordSource` (type): A collection of records keyed by their data ID, used both to represent the cache and updates to it. For example the store's record cache is a `RecordSource` and the results of queries/mutations/subscriptions are normalized into `RecordSource`s that are published to a store. Sources also define methods for asynchronously loading records in order to (eventually) support offline use-cases. Currently the only implementation of this interface is `RelayInMemoryRecordSource`; future implementations may add support for loading records from disk. +- `Store` (type): The source of truth for an instance of `RelayRuntime`, holding the canonical set of records in the form of a `RecordSource` (though this is not required). Currently the only implementation is `RelayModernStore`. +- `Network` (type): Provides methods for fetching query data from and executing mutations against an external data source. +- `Environment` (type): Represents an encapsulated environment combining a `Store` and `Network`, providing a high-level API for interacting with both. This is the main public API of `RelayRuntime`. + +Types for working with queries and their results include: + +- `Selector` (type): A selector defines the starting point for a traversal into the graph for the purposes of targeting a subgraph, combining a GraphQL fragment, variables, and the Data ID for the root object from which traversal should progress. Intuitively, this "selects" a portion of the object graph. +- `Snapshot` (type): The (immutable) results of executing a `Selector` at a given point in time. This includes the selector itself, the results of executing it, and a list of the Data IDs from which data was retrieved (useful in determining when these results might change). + +## Data Model + +Relay Runtime is designed for use with GraphQL schemas that describe **object graphs** in which objects have a type, an identity, and a set of fields with values. Objects may reference each other, which is represented by fields whose values are one or more other objects in the graph [1]. To distinguish from JavaScript `Object`s, these units of data are referred to as `Record`s. Relay represents both its internal cache as well as query/mutation/etc results as a mapping of **data ID**s to **records**. The data ID is the unique (with respect to the cache) identifier for a record - it may be the value of an actual `id` field or based on the path to the record from the nearest object with an `id` (such path-based ids are called **client ids**). Each `Record` stores its data ID, type, and any fields that have been fetched. Multiple records are stored together as a `RecordSource`: a mapping of data IDs to `Record` instances. + +For example, a user and their address might be represented as follows: + +``` + +// GraphQL Fragment +fragment on User { + id + name + address { + city + } +} + +// Response +{ + id: '842472', + name: 'Joe', + address: { + city: 'Seattle', + } +} + +// Normalized Representation +RecordSource { + '842472': Record { + __id: '842472', + __typename: 'User', // the type is known statically from the fragment + id: '842472', + name: 'Joe', + address: {__ref: 'client:842472:address'}, // link to another record + }, + 'client:842472:address': Record { + // A client ID, derived from the path from parent & parent's ID + __id: 'client:842472:address', + __typename: 'Address', + city: 'Seattle', + } +} +``` + +[1] Note that GraphQL itself does not impose this constraint, and Relay Runtime may also be used for schemas that do not conform to it. For example, both systems can be used to query a single denormalized table. However, many of the features that Relay Runtime provides, such as caching and normalization, work best when the data is represented as a normalized graph with stable identities for discrete pieces of information. + +### Store Operations + +The `Store` is the source of truth for application data and provides the following core operations. + +- `lookup(selector: Selector): Snapshot`: Reads the results of a selector from the store, returning the value given the data currently in the store. + +- `subscribe(snapshot: Snapshot, callback: (snapshot: Snapshot) => void): Disposable`: Subscribe to changes to the results of a selector. The callback is called when data has been published to the store that would cause the results of the snapshot's selector to change. + +- `publish(source: RecordSource): void`: Update the store with new information. All updates to the store are expressed in this form, including the results of queries/mutation/subscriptions as well as optimistic mutation updates. All of those operations internally create a new `RecordSource` instance and ultimately publish it to the store. Note that `publish()` does _not_ immediately update any `subscribe()`-ers. Internally, the store compares the new `RecordSource` with its internal source, updating it as necessary: + - Records that exist only in the published source are added to the store. + - Records that exist in both are merged into a new record (inputs unchanged), with the result added to the store. + - Records that are null in the published source are deleted (set to null) in the store. + - Records with a special sentinel value are removed from the store. This supports un-publishing optimistically created records. + +- `notify(): void`: Calls any `subscribe()`-ers whose results have changed due to intervening `publish()`-es. Separating `publish()` and `notify()` allows for multiple payloads to be published before performing any downstream update logic (such as rendering). + +- `retain(selector: Selector): Disposable`: Ensure that all the records necessary to fulfill the given selector are retained in-memory. The records will not be eligible for garbage collection until the returned reference is disposed. + +### Example Data Flow: Fetching Query Data + +``` + + ┌───────────────────────┐ + │ Query │ + └───────────────────────┘ + │ + ▼ + ┌ ─ ─ ─ ┐ + fetch ◀────────────▶ Server + └ ─ ─ ─ ┘ + │ + ┌─────┴───────┐ + ▼ ▼ + ┌──────────┐ ┌──────────┐ + │ Query │ │ Response │ + └──────────┘ └──────────┘ + │ │ + └─────┬───────┘ + │ + ▼ + normalize + │ + ▼ + ┌───────────────────────┐ + │ RecordSource │ + │ │ + │┌──────┐┌──────┐┌─────┐│ + ││Record││Record││ ... ││ + │└──────┘└──────┘└─────┘│ + └───────────────────────┘ + +``` + +1. The query is fetched from the network. +2. The query and response are traversed together, extracting the results into `Record` objects which are added to a fresh `RecordSource`. + +This fresh `RecordSource` would then be published to the store: + +``` + + publish + │ + ▼ + ┌───────────────────────────┐ + │ Store │ + │ ┌───────────────────────┐ │ + │ │ RecordSource │ │ + │ │ │ │ + │ │┌──────┐┌──────┐┌─────┐│ │ + │ ││Record││Record││ ... ││ │ <--- records are updated + │ │└──────┘└──────┘└─────┘│ │ + │ └───────────────────────┘ │ + │ ┌───────────────────────┐ │ + │ │ Subscriptions │ │ + │ │ │ │ + │ │┌──────┐┌──────┐┌─────┐│ │ + │ ││ Sub. ││ Sub. ││ ... ││ │ <--- subscriptions do not fire yet + │ │└──────┘└──────┘└─────┘│ │ + │ └───────────────────────┘ │ + └───────────────────────────┘ + +``` + +Publishing the results updates the store but does _not_ immediately notify any subscribers. This is accomplished by calling `notify()`... + +``` + + notify + │ + ▼ + ┌───────────────────────────┐ + │ Store │ + │ ┌───────────────────────┐ │ + │ │ RecordSource │ │ + │ │ │ │ + │ │┌──────┐┌──────┐┌─────┐│ │ + │ ││Record││Record││ ... ││ │ + │ │└──────┘└──────┘└─────┘│ │ + │ └───────────────────────┘ │ + │ ┌───────────────────────┐ │ + │ │ Subscriptions │ │ + │ │ │ │ + │ │┌──────┐┌──────┐┌─────┐│ │ + │ ││ Sub.││ Sub.││ ...││ │ <--- affected subscriptions fire + │ │└──────┘└──────┘└─────┘│ │ + │ └───┼───────┼───────┼───┘ │ + └─────┼───────┼───────┼─────┘ + │ │ │ + ▼ │ │ + callback │ │ + ▼ │ + callback │ + ▼ + callback + +``` + +...which calls the callbacks for any `subscribe()`-ers whose results have changed. Each subscription is checked as follows: + +1. First, the list of data IDs that have changed since the last `notify()` is compared against data IDs listed in the subscription's latest `Snapshot`. If there is no overlap, the subscription's results cannot possibly have changed (if you imagine the graph visually, there is no overlap between the part of the graph that changed and the part that is selected). In this case the subscription is ignored, otherwise processing continues. +2. Second, any subscriptions that do have overlapping data IDs are re-read, and the new/previous results are compared. If the result has not changed, the subscription is ignored (this can occur if a field of a record changed that is not relevant to the subscription's selector), otherwise processing continues. +3. Finally, subscriptions whose data actually changed are notified via their callback. + +### Example Data Flow: Reading and Observing the Store + +Products access the store primarily via `lookup()` and `subscribe()`. Lookup reads the initial results of a fragment, and subscribe observes that result for any changes. Note that the output of `lookup()` - a `Snapshot` - is the input to `subscribe()`. This is important because the snapshot contains important information that can be used to optimize the subscription - if `subscribe()` accepted only a `Selector`, it would have to re-read the results in order to know what to subscribe to, which is less efficient. + +Therefore a typical data flow is as follows - note that this flow is managed automatically by higher-level APIs such as React/Relay. First a component will lookup the results of a selector against a record source (e.g. the store's canonical source): + +``` + + ┌───────────────────────┐ ┌──────────────┐ + │ RecordSource │ │ │ + │ │ │ │ + │┌──────┐┌──────┐┌─────┐│ │ Selector │ + ││Record││Record││ ... ││ │ │ + │└──────┘└──────┘└─────┘│ │ │ + └───────────────────────┘ └──────────────┘ + │ │ + │ │ + └──────────────┬────────────┘ + │ + │ lookup + │ (read) + │ + ▼ + ┌─────────────┐ + │ │ + │ Snapshot │ + │ │ + └─────────────┘ + │ + │ render, etc + │ + ▼ + +``` + +Next, it will `subscribe()` using this snapshot in order to be notified of any changes - see the above diagram for `publish()` and `notify()`. + + diff --git a/website/versioned_docs/version-v15.0.0/principles-and-architecture/thinking-in-graphql.md b/website/versioned_docs/version-v15.0.0/principles-and-architecture/thinking-in-graphql.md new file mode 100644 index 0000000000000..fc787bd18b318 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/principles-and-architecture/thinking-in-graphql.md @@ -0,0 +1,309 @@ +--- +id: thinking-in-graphql +title: Thinking in GraphQL +slug: /principles-and-architecture/thinking-in-graphql/ +description: Relay guide to thinking in GraphQL +keywords: +- GraphQL +--- + +import DocsRating from '@site/src/core/DocsRating'; + +GraphQL presents new ways for clients to fetch data by focusing on the needs of product developers and client applications. It provides a way for developers to specify the precise data needed for a view and enables a client to fetch that data in a single network request. Compared to traditional approaches such as REST, GraphQL helps applications to fetch data more efficiently (compared to resource-oriented REST approaches) and avoid duplication of server logic (which can occur with custom endpoints). Furthermore, GraphQL helps developers to decouple product code and server logic. For example, a product can fetch more or less information without requiring a change to every relevant server endpoint. It's a great way to fetch data. + +In this article we'll explore what it means to build a GraphQL client framework and how this compares to clients for more traditional REST systems. Along the way we'll look at the design decisions behind Relay and see that it's not just a GraphQL client but also a framework for _declarative data-fetching_. Let's start at the beginning and fetch some data! + +## Fetching Data + +Imagine we have a simple application that fetches a list of stories, and some details about each one. Here's how that might look in resource-oriented REST: + +```javascript +// Fetch the list of story IDs but not their details: +rest.get('/stories').then(stories => + // This resolves to a list of items with linked resources: + // `[ { href: "http://.../story/1" }, ... ]` + Promise.all(stories.map(story => + rest.get(story.href) // Follow the links + )) +).then(stories => { + // This resolves to a list of story items: + // `[ { id: "...", text: "..." } ]` + console.log(stories); +}); +``` + +Note that this approach requires _n+1_ requests to the server: 1 to fetch the list, and _n_ to fetch each item. With GraphQL we can fetch the same data in a single network request to the server (without creating a custom endpoint that we'd then have to maintain): + +```javascript +graphql.get(`query { stories { id, text } }`).then( + stories => { + // A list of story items: + // `[ { id: "...", text: "..." } ]` + console.log(stories); + } +); +``` + +So far we're just using GraphQL as a more efficient version of typical REST approaches. Note two important benefits in the GraphQL version: + +- All data is fetched in a single round trip. +- The client and server are decoupled: the client specifies the data needed instead of _relying on_ the server endpoint to return the correct data. + +For a simple application that's already a nice improvement. + +## Client Caching + +Repeatedly refetching information from the server can get quite slow. For example, navigating from the list of stories, to a list item, and back to the list of stories means we have to refetch the whole list. We'll solve this with the standard solution: _caching_. + +In a resource-oriented REST system, we can maintain a **response cache** based on URIs: + +```javascript +var _cache = new Map(); +rest.get = uri => { + if (!_cache.has(uri)) { + _cache.set(uri, fetch(uri)); + } + return _cache.get(uri); +}; +``` + +Response-caching can also be applied to GraphQL. A basic approach would work similarly to the REST version. The text of the query itself can be used as a cache key: + +```javascript +var _cache = new Map(); +graphql.get = queryText => { + if (!_cache.has(queryText)) { + _cache.set(queryText, fetchGraphQL(queryText)); + } + return _cache.get(queryText); +}; +``` + +Now, requests for previously cached data can be answered immediately without making a network request. This is a practical approach to improving the perceived performance of an application. However, this method of caching can cause problems with data consistency. + +## Cache Consistency + +With GraphQL it is very common for the results of multiple queries to overlap. However, our response cache from the previous section doesn't account for this overlap — it caches based on distinct queries. For example, if we issue a query to fetch stories: + +```graphql +query { stories { id, text, likeCount } } +``` + +and then later refetch one of the stories whose `likeCount` has since been incremented: + +```graphql +query { story(id: "123") { id, text, likeCount } } +``` + +We'll now see different `likeCount`s depending on how the story is accessed. A view that uses the first query will see an outdated count, while a view using the second query will see the updated count. + +### Caching A Graph + +The solution to caching GraphQL is to normalize the hierarchical response into a flat collection of **records**. Relay implements this cache as a map from IDs to records. Each record is a map from field names to field values. Records may also link to other records (allowing it to describe a cyclic graph), and these links are stored as a special value type that references back into the top-level map. With this approach each server record is stored _once_ regardless of how it is fetched. + +Here's an example query that fetches a story's text and its author's name: + +```graphql +query { + story(id: "1") { + text, + author { + name + } + } +} +``` + +And here's a possible response: + +```json +{ + "query": { + "story": { + "text": "Relay is open-source!", + "author": { + "name": "Jan" + } + } + } +} +``` + +Although the response is hierarchical, we'll cache it by flattening all the records. Here is an example of how Relay would cache this query response: + +```javascript +Map { + // `story(id: "1")` + 1: Map { + text: 'Relay is open-source!', + author: Link(2), + }, + // `story.author` + 2: Map { + name: 'Jan', + }, +}; +``` + +This is only a simple example: in reality the cache must handle one-to-many associations and pagination (among other things). + +### Using The Cache + +So how do we use this cache? Let's look at two operations: writing to the cache when a response is received, and reading from the cache to determine if a query can be fulfilled locally (the equivalent to `_cache.has(key)` above, but for a graph). + +### Populating The Cache + +Populating the cache involves walking a hierarchical GraphQL response and creating or updating normalized cache records. At first it may seem that the response alone is sufficient to process the response, but in fact this is only true for very simple queries. Consider `user(id: "456") { photo(size: 32) { uri } }` — how should we store `photo`? Using `photo` as the field name in the cache won't work because a different query might fetch the same field but with different argument values (e.g. `photo(size: 64) {...}`). A similar issue occurs with pagination. If we fetch the 11th to 20th stories with `stories(first: 10, offset: 10)`, these new results should be _appended_ to the existing list. + +Therefore, a normalized response cache for GraphQL requires processing payloads and queries in parallel. For example, the `photo` field from above might be cached with a generated field name such as `photo_size(32)` in order to uniquely identify the field and its argument values. + +### Reading From Cache + +To read from the cache we can walk a query and resolve each field. But wait: that sounds _exactly_ like what a GraphQL server does when it processes a query. And it is! Reading from the cache is a special case of an executor where a) there's no need for user-defined field functions because all results come from a fixed data structure and b) results are always synchronous — we either have the data cached or we don't. + +Relay implements several variations of **query traversal**: operations that walk a query alongside some other data such as the cache or a response payload. For example, when a query is fetched Relay performs a "diff" traversal to determine what fields are missing (much like React diffs virtual DOM trees). This can reduce the amount of data fetched in many common cases and even allow Relay to avoid network requests at all when queries are fully cached. + +### Cache Updates + +Note that this normalized cache structure allows overlapping results to be cached without duplication. Each record is stored once regardless of how it is fetched. Let's return to the earlier example of inconsistent data and see how this cache helps in that scenario. + +The first query was for a list of stories: + +```graphql +query { stories { id, text, likeCount } } +``` + +With a normalized response cache, a record would be created for each story in the list. The `stories` field would store links to each of these records. + +The second query refetched the information for one of those stories: + +```graphql +query { story(id: "123") { id, text, likeCount } } +``` + +When this response is normalized, Relay can detect that this result overlaps with existing data based on its `id`. Rather than create a new record, Relay will update the existing `123` record. The new `likeCount` is therefore available to _both_ queries, as well as any other query that might reference this story. + +## Data/View Consistency + +A normalized cache ensures that the _cache_ is consistent. But what about our views? Ideally, our React views would always reflect the current information from the cache. + +Consider rendering the text and comments of a story along with the corresponding author names and photos. Here's the GraphQL query: + +```graphql +query { + story(id: "1") { + text, + author { name, photo }, + comments { + text, + author { name, photo } + } + } +} +``` + +After initially fetching this story our cache might be as follows. Note that the story and comment both link to the same record as `author`: + +``` +// Note: This is pseudo-code for `Map` initialization to make the structure +// more obvious. +Map { + // `story(id: "1")` + 1: Map { + text: 'got GraphQL?', + author: Link(2), + comments: [Link(3)], + }, + // `story.author` + 2: Map { + name: 'Yuzhi', + photo: 'http://.../photo1.jpg', + }, + // `story.comments[0]` + 3: Map { + text: 'Here\'s how to get one!', + author: Link(2), + }, +} +``` + +The author of this story also commented on it — quite common. Now imagine that some other view fetches new information about the author, and her profile photo has changed to a new URI. Here's the _only_ part of our cached data that changes: + +``` +Map { + ... + 2: Map { + ... + photo: 'http://.../photo2.jpg', + }, +} +``` + +The value of the `photo` field has changed; and therefore the record `2` has also changed. And that's it. Nothing else in the _cache_ is affected. But clearly our _view_ needs to reflect the update: both instances of the author in the UI (as story author and comment author) need to show the new photo. + +A standard response is to "just use immutable data structures" — but let's see what would happen if we did: + +``` +ImmutableMap { + 1: ImmutableMap // same as before + 2: ImmutableMap { + ... // other fields unchanged + photo: 'http://.../photo2.jpg', + }, + 3: ImmutableMap // same as before +} +``` + +If we replace `2` with a new immutable record, we'll also get a new immutable instance of the cache object. However, records `1` and `3` are untouched. Because the data is normalized, we can't tell that `story`'s contents have changed just by looking at the `story` record alone. + +### Achieving View Consistency + +There are a variety of solutions for keeping views up to date with a flattened cache. The approach that Relay takes is to maintain a mapping from each UI view to the set of IDs it references. In this case, the story view would subscribe to updates on the story (`1`), the author (`2`), and the comments (`3` and any others). When writing data into the cache, Relay tracks which IDs are affected and notifies _only_ the views that are subscribed to those IDs. The affected views re-render, and unaffected views opt-out of re-rendering for better performance (Relay provides a safe but effective default `shouldComponentUpdate`). Without this strategy, every view would re-render for even the tiniest change. + +Note that this solution will also work for _writes_: any update to the cache will notify the affected views, and writes are just another thing that updates the cache. + +## Mutations + +So far we've looked at the process of querying data and keeping views up to date, but we haven't looked at writes. In GraphQL, writes are called **mutations**. We can think of them as queries with side effects. Here's an example of calling a mutation that might mark a given story as being liked by the current user: + +```graphql +// Give a human-readable name and define the types of the inputs, +// in this case the id of the story to mark as liked. +mutation StoryLike($storyID: String) { + // Call the mutation field and trigger its side effects + storyLike(storyID: $storyID) { + // Define fields to re-fetch after the mutation completes + likeCount + } +} +``` + +Notice that we're querying for data that _may_ have changed as a result of the mutation. An obvious question is: why can't the server just tell us what changed? The answer is: it's complicated. GraphQL abstracts over _any_ data storage layer (or an aggregation of multiple sources), and works with any programming language. Furthermore, the goal of GraphQL is to provide data in a form that is useful to product developers building a view. + +We've found that it's common for the GraphQL schema to differ slightly or even substantially from the form in which data is stored on disk. Put simply: there isn't always a 1:1 correspondence between data changes in your underlying _data storage_ (disk) and data changes in your _product-visible schema_ (GraphQL). The perfect example of this is privacy: returning a user-facing field such as `age` might require accessing numerous records in our data-storage layer to determine if the active user is even allowed to _see_ that `age` (Are we friends? Is my age shared? Did I block you? etc.). + +Given these real-world constraints, the approach in GraphQL is for clients to query for things that may change after a mutation. But what exactly do we put in that query? During the development of Relay we explored several ideas — let's look at them briefly in order to understand why Relay uses the approach that it does: + +- Option 1: Re-fetch everything that the app has ever queried. Even though only a small subset of this data will actually change, we'll still have to wait for the server to execute the _entire_ query, wait to download the results, and wait to process them again. This is very inefficient. + +- Option 2: Re-fetch only the queries required by actively rendered views. This is a slight improvement over option 1. However, cached data that _isn't_ currently being viewed won't be updated. Unless this data is somehow marked as stale or evicted from the cache subsequent queries will read outdated information. + +- Option 3: Re-fetch a fixed list of fields that _may_ change after the mutation. We'll call this list a **fat query**. We found this to also be inefficient because typical applications only render a subset of the fat query, but this approach would require fetching all of those fields. + +- Option 4 (Relay): Re-fetch the intersection of what may change (the fat query) and the data in the cache. In addition to the cache of data Relay also remembers the queries used to fetch each item. These are called **tracked queries**. By intersecting the tracked and fat queries, Relay can query exactly the set of information the application needs to update and nothing more. + +## Data-Fetching APIs + +So far we looked at the lower-level aspects of data-fetching and saw how various familiar concepts translate to GraphQL. Next, let's step back and look at some higher-level concerns that product developers often face around data-fetching: + +- Fetching all the data for a view hierarchy. +- Managing asynchronous state transitions and coordinating concurrent requests. +- Managing errors. +- Retrying failed requests. +- Updating the local cache after receiving query/mutation responses. +- Queuing mutations to avoid race conditions. +- Optimistically updating the UI while waiting for the server to respond to mutations. + +We've found that typical approaches to data-fetching — with imperative APIs — force developers to deal with too much of this non-essential complexity. For example, consider _optimistic UI updates_. This is a way of giving the user feedback while waiting for a server response. The logic of _what_ to do can be quite clear: when the user clicks "like", mark the story as being liked and send the request to the server. But the implementation is often much more complex. Imperative approaches require us to implement all of those steps: reach into the UI and toggle the button, initiate a network request, retry it if necessary, show an error if it fails (and untoggle the button), etc. The same goes for data-fetching: specifying _what_ data we need often dictates _how_ and _when_ it is fetched. Next, we'll explore our approach to solving these concerns with **Relay**. + + diff --git a/website/versioned_docs/version-v15.0.0/principles-and-architecture/thinking-in-relay.md b/website/versioned_docs/version-v15.0.0/principles-and-architecture/thinking-in-relay.md new file mode 100644 index 0000000000000..f07d9c7959488 --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/principles-and-architecture/thinking-in-relay.md @@ -0,0 +1,104 @@ +--- +id: thinking-in-relay +title: Thinking in Relay +slug: /principles-and-architecture/thinking-in-relay/ +description: Relay guide to thinking in Relay +--- + +import DocsRating from '@site/src/core/DocsRating'; + +Relay's approach to data-fetching is heavily inspired by our experience with React. In particular, React breaks complex interfaces into reusable **components**, allowing developers to reason about discrete units of an application in isolation, and reducing the coupling between disparate parts of an application. Even more important is that these components are **declarative**: they allow developers to specify _what_ the UI should look like for a given state, and not have to worry about _how_ to show that UI. Unlike previous approaches that used imperative commands to manipulate native views (e.g. the DOM), React uses a UI description to automatically determine the necessary commands. + +Let's look at some product use-cases to understand how we incorporated these ideas into Relay. We'll assume a basic familiarity with React. + +## Fetching Data For a View + +In our experience, the overwhelming majority of products want one specific behavior: fetch all the data for a view hierarchy while displaying a loading indicator, and then render the entire view once the data is available. + +One solution is to have a root component declare and fetch the data required by it and all of its children. However, this would introduce coupling: any change to a child component would require changing any root component that might render it! This coupling could mean a greater chance for bugs and slow the pace of development. + +Another logical approach is to have each component declare and fetch the data it requires. This sounds great. However, the problem is that a component may render different children based on the data it received. So, nested components will be unable to render and begin fetching their data until parent components' queries have completed. In other words, *this forces data fetching to proceed in stages:* first render the root and fetch the data it needs, then render its children and fetch their data, and so on until you reach leaf components. Rendering would require multiple slow, serial roundtrips. + +Relay combines the advantages of both of these approaches by allowing components to specify what data they require, but to coalesce those requirements into a single query that fetches the data for an entire subtree of components. In other words, it determines *statically* (i.e. before your application runs; at the time you write your code) the requirements for an entire view! + +This is achieved with the help of GraphQL. Functional components use one or more GraphQL fragments to describe their data requirements. These fragments are then nested within other fragments, and ultimately within queries. And when such a query is fetched, Relay will make a single network request for it and all of its nested fragments. In other words, the Relay runtime is then able to make a *single network request* for all of the data required by a view! + +Let's dive deeper to understand how Relay achieves this feat. + +## Specifying the data requirements of a component + +With Relay, the data requirements for a component are specified with fragments. Fragments are named snippets of GraphQL that specify which fields to select from an object of a particular type. Fragments are written within GraphQL literals. For example, the following declares a GraphQL literal containing a fragment which selects an author's name and photo url: + +```javascript +// AuthorDetails.react.js +const authorDetailsFragment = graphql` + fragment AuthorDetails_author on Author { + name + photo { + url + } + } +`; +``` + +This data is then read out from the store by calling the `useFragment(...)` hook in a functional React component. The actual author from which to read this data is determined by the second parameter passed to `useFragment`. For example: + +```javascript +// AuthorDetails.react.js +export default function AuthorDetails(props) { + const data = useFragment(authorDetailsFragment, props.author); + // ... +} +``` + +This second parameter (`props.author`) is a fragment reference. Fragment references are obtained by **spreading** a fragment into another fragment or query. Fragments cannot be fetched directly. Instead, all fragments must ultimately be spread (either directly or transitively) into a query for the data to be fetched. + +Let's take a look at one such query. + +## Queries + +In order to fetch that data, we might declare a query which spreads `AuthorDetails_author` as follows: + +```javascript +// Story.react.js +const storyQuery = graphql` + query StoryQuery($storyID: ID!) { + story(id: $storyID) { + title + author { + ...AuthorDetails_author + } + } + } +`; +``` + +Now, we can fetch the query by calling `const data = useLazyLoadQuery(storyQuery, {storyID})`. At this point, `data.story.author` (if it is present; all fields are nullable by default) will be a fragment reference that we can pass to `AuthorDetails`. For example: + +```javascript +// Story.react.js +function Story(props) { + const data = useLazyLoadQuery(storyQuery, props.storyId); + + return (<> + {data?.story.title} + {data?.story?.author && } + ); +} +``` + +Note what has happened here. We made a single network request which contained the data required by *both* the `Story` component *and* the `AuthorDetails` component! When that data was available, the entire view could render in a single pass. + +## Data Masking + +With typical approaches to data-fetching we found that it was common for two components to have _implicit dependencies_. For example `` might use some data without directly ensuring that the data was fetched. This data would often be fetched by some other part of the system, such as ``. Then when we changed `` and removed that data-fetching logic, `` would suddenly and inexplicably break. These types of bugs are not always immediately apparent, especially in larger applications developed by larger teams. Manual and automated testing can only help so much: this is exactly the type of systematic problem that is better solved by a framework. + +We've seen that Relay ensures that the data for a view is fetched all at once. But Relay also provide another benefit that isn't immediately obvious: **data masking**. Relay only allows components to access data they specifically ask for in GraphQL fragments, and nothing more. So if one component queries for a Story's `title`, and another for its `text`, each can see _only_ the field that they asked for. In fact, components can't even see the data requested by their _children_: that would also break encapsulation. + +Relay also goes further: it uses opaque identifiers on `props` to validate that we've explicitly fetched the data for a component before rendering it. If `` renders `` but forgets to spread its fragment, Relay will warn that the data for `` is missing. In fact, Relay will warn _even if_ some other component happened to fetch the same data required by ``. This warning tells us that although things _might_ work now, they're highly likely to break later. + +# Conclusion + +GraphQL provides a powerful tool for building efficient, decoupled client applications. Relay builds on this functionality to provide a framework for **declarative data-fetching**. By separating _what_ data to fetch from _how_ it is fetched, Relay helps developers build applications that are robust, transparent, and performant by default. It's a great complement to the component-centric way of thinking championed by React. While each of these technologies — React, Relay, and GraphQL — are powerful on their own, the combination is a **UI platform** that allows us to _move fast_ and _ship high-quality apps at scale_. + + diff --git a/website/versioned_docs/version-v15.0.0/principles-and-architecture/videos.md b/website/versioned_docs/version-v15.0.0/principles-and-architecture/videos.md new file mode 100644 index 0000000000000..27826e2c3197c --- /dev/null +++ b/website/versioned_docs/version-v15.0.0/principles-and-architecture/videos.md @@ -0,0 +1,50 @@ +--- +id: videos +title: Videos +slug: /principles-and-architecture/videos/ +description: Relay videos +--- + +import DocsRating from '@site/src/core/DocsRating'; + +## React Conf 2021 + +### Re-introducing Relay | Robert Balicki + + + +## Facebook F8 2017 + +### [The Evolution of React and GraphQL at Facebook and Beyond](https://developers.facebook.com/videos/f8-2017/the-evolution-of-react-and-graphql-at-facebook-and-beyond/) + + + +## Facebook F8 2017 + +### [The Evolution of React and GraphQL at Facebook and Beyond](https://developers.facebook.com/videos/f8-2017/the-evolution-of-react-and-graphql-at-facebook-and-beyond/) + + + +## Facebook F8 2017 + +### [The Evolution of React and GraphQL at Facebook and Beyond](https://developers.facebook.com/videos/f8-2017/the-evolution-of-react-and-graphql-at-facebook-and-beyond/) + +